Coverage for pydantic/type_adapter.py: 89.16%
159 statements
« prev ^ index » next coverage.py v7.10.0, created at 2025-07-26 11:49 +0000
« prev ^ index » next coverage.py v7.10.0, created at 2025-07-26 11:49 +0000
1"""Type adapter specification."""
3from __future__ import annotations as _annotations 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
5import sys 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
6from collections.abc import Callable, Iterable 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
7from dataclasses import is_dataclass 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
8from types import FrameType 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
9from typing import ( 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
10 Any,
11 Generic,
12 Literal,
13 TypeVar,
14 cast,
15 final,
16 overload,
17)
19from pydantic_core import CoreSchema, SchemaSerializer, SchemaValidator, Some 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
20from typing_extensions import ParamSpec, is_typeddict 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
22from pydantic.errors import PydanticUserError 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
23from pydantic.main import BaseModel, IncEx 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
25from ._internal import _config, _generate_schema, _mock_val_ser, _namespace_utils, _repr, _typing_extra, _utils 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
26from .config import ConfigDict 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
27from .errors import PydanticUndefinedAnnotation 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
28from .json_schema import ( 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
29 DEFAULT_REF_TEMPLATE,
30 GenerateJsonSchema,
31 JsonSchemaKeyT,
32 JsonSchemaMode,
33 JsonSchemaValue,
34)
35from .plugin._schema_validator import PluggableSchemaValidator, create_schema_validator 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
37T = TypeVar('T') 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
38R = TypeVar('R') 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
39P = ParamSpec('P') 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
40TypeAdapterT = TypeVar('TypeAdapterT', bound='TypeAdapter') 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
43def _getattr_no_parents(obj: Any, attribute: str) -> Any: 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
44 """Returns the attribute value without attempting to look up attributes from parent types."""
45 if hasattr(obj, '__dict__'): 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
46 try: 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
47 return obj.__dict__[attribute] 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
48 except KeyError: 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
49 pass 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
51 slots = getattr(obj, '__slots__', None) 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
52 if slots is not None and attribute in slots: 52 ↛ 53line 52 didn't jump to line 53 because the condition on line 52 was never true1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
53 return getattr(obj, attribute)
54 else:
55 raise AttributeError(attribute) 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
58def _type_has_config(type_: Any) -> bool: 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
59 """Returns whether the type has config."""
60 type_ = _typing_extra.annotated_type(type_) or type_ 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
61 try: 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
62 return issubclass(type_, BaseModel) or is_dataclass(type_) or is_typeddict(type_) 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
63 except TypeError:
64 # type is not a class
65 return False
68@final 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
69class TypeAdapter(Generic[T]): 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
70 """!!! abstract "Usage Documentation"
71 [`TypeAdapter`](../concepts/type_adapter.md)
73 Type adapters provide a flexible way to perform validation and serialization based on a Python type.
75 A `TypeAdapter` instance exposes some of the functionality from `BaseModel` instance methods
76 for types that do not have such methods (such as dataclasses, primitive types, and more).
78 **Note:** `TypeAdapter` instances are not types, and cannot be used as type annotations for fields.
80 Args:
81 type: The type associated with the `TypeAdapter`.
82 config: Configuration for the `TypeAdapter`, should be a dictionary conforming to
83 [`ConfigDict`][pydantic.config.ConfigDict].
85 !!! note
86 You cannot provide a configuration when instantiating a `TypeAdapter` if the type you're using
87 has its own config that cannot be overridden (ex: `BaseModel`, `TypedDict`, and `dataclass`). A
88 [`type-adapter-config-unused`](../errors/usage_errors.md#type-adapter-config-unused) error will
89 be raised in this case.
90 _parent_depth: Depth at which to search for the [parent frame][frame-objects]. This frame is used when
91 resolving forward annotations during schema building, by looking for the globals and locals of this
92 frame. Defaults to 2, which will result in the frame where the `TypeAdapter` was instantiated.
94 !!! note
95 This parameter is named with an underscore to suggest its private nature and discourage use.
96 It may be deprecated in a minor version, so we only recommend using it if you're comfortable
97 with potential change in behavior/support. It's default value is 2 because internally,
98 the `TypeAdapter` class makes another call to fetch the frame.
99 module: The module that passes to plugin if provided.
101 Attributes:
102 core_schema: The core schema for the type.
103 validator: The schema validator for the type.
104 serializer: The schema serializer for the type.
105 pydantic_complete: Whether the core schema for the type is successfully built.
107 ??? tip "Compatibility with `mypy`"
108 Depending on the type used, `mypy` might raise an error when instantiating a `TypeAdapter`. As a workaround, you can explicitly
109 annotate your variable:
111 ```py
112 from typing import Union
114 from pydantic import TypeAdapter
116 ta: TypeAdapter[Union[str, int]] = TypeAdapter(Union[str, int]) # type: ignore[arg-type]
117 ```
119 ??? info "Namespace management nuances and implementation details"
121 Here, we collect some notes on namespace management, and subtle differences from `BaseModel`:
123 `BaseModel` uses its own `__module__` to find out where it was defined
124 and then looks for symbols to resolve forward references in those globals.
125 On the other hand, `TypeAdapter` can be initialized with arbitrary objects,
126 which may not be types and thus do not have a `__module__` available.
127 So instead we look at the globals in our parent stack frame.
129 It is expected that the `ns_resolver` passed to this function will have the correct
130 namespace for the type we're adapting. See the source code for `TypeAdapter.__init__`
131 and `TypeAdapter.rebuild` for various ways to construct this namespace.
133 This works for the case where this function is called in a module that
134 has the target of forward references in its scope, but
135 does not always work for more complex cases.
137 For example, take the following:
139 ```python {title="a.py"}
140 IntList = list[int]
141 OuterDict = dict[str, 'IntList']
142 ```
144 ```python {test="skip" title="b.py"}
145 from a import OuterDict
147 from pydantic import TypeAdapter
149 IntList = int # replaces the symbol the forward reference is looking for
150 v = TypeAdapter(OuterDict)
151 v({'x': 1}) # should fail but doesn't
152 ```
154 If `OuterDict` were a `BaseModel`, this would work because it would resolve
155 the forward reference within the `a.py` namespace.
156 But `TypeAdapter(OuterDict)` can't determine what module `OuterDict` came from.
158 In other words, the assumption that _all_ forward references exist in the
159 module we are being called from is not technically always true.
160 Although most of the time it is and it works fine for recursive models and such,
161 `BaseModel`'s behavior isn't perfect either and _can_ break in similar ways,
162 so there is no right or wrong between the two.
164 But at the very least this behavior is _subtly_ different from `BaseModel`'s.
165 """
167 core_schema: CoreSchema 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
168 validator: SchemaValidator | PluggableSchemaValidator 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
169 serializer: SchemaSerializer 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
170 pydantic_complete: bool 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
172 @overload 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
173 def __init__( 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
174 self,
175 type: type[T], 1abcdefghijklmn
176 *,
177 config: ConfigDict | None = ..., 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
178 _parent_depth: int = ..., 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
179 module: str | None = ..., 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
180 ) -> None: ... 1abcdefghijklmn
182 # This second overload is for unsupported special forms (such as Annotated, Union, etc.)
183 # Currently there is no way to type this correctly
184 # See https://github.com/python/typing/pull/1618
185 @overload 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
186 def __init__( 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
187 self,
188 type: Any, 1abcdefghijklmn
189 *,
190 config: ConfigDict | None = ..., 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
191 _parent_depth: int = ..., 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
192 module: str | None = ..., 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
193 ) -> None: ... 1abcdefghijklmn
195 def __init__( 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
196 self,
197 type: Any,
198 *,
199 config: ConfigDict | None = None,
200 _parent_depth: int = 2,
201 module: str | None = None,
202 ) -> None:
203 if _type_has_config(type) and config is not None: 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
204 raise PydanticUserError( 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
205 'Cannot use `config` when the type is a BaseModel, dataclass or TypedDict.'
206 ' These types can have their own config and setting the config via the `config`'
207 ' parameter to TypeAdapter will not override it, thus the `config` you passed to'
208 ' TypeAdapter becomes meaningless, which is probably not what you want.',
209 code='type-adapter-config-unused',
210 )
212 self._type = type 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
213 self._config = config 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
214 self._parent_depth = _parent_depth 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
215 self.pydantic_complete = False 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
217 parent_frame = self._fetch_parent_frame() 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
218 if parent_frame is not None: 218 ↛ 223line 218 didn't jump to line 223 because the condition on line 218 was always true1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
219 globalns = parent_frame.f_globals 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
220 # Do not provide a local ns if the type adapter happens to be instantiated at the module level:
221 localns = parent_frame.f_locals if parent_frame.f_locals is not globalns else {} 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
222 else:
223 globalns = {}
224 localns = {}
226 self._module_name = module or cast(str, globalns.get('__name__', '')) 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
227 self._init_core_attrs( 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
228 ns_resolver=_namespace_utils.NsResolver(
229 namespaces_tuple=_namespace_utils.NamespacesTuple(locals=localns, globals=globalns),
230 parent_namespace=localns,
231 ),
232 force=False,
233 )
235 def _fetch_parent_frame(self) -> FrameType | None: 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
236 frame = sys._getframe(self._parent_depth) 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
237 if frame.f_globals.get('__name__') == 'typing': 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
238 # Because `TypeAdapter` is generic, explicitly parametrizing the class results
239 # in a `typing._GenericAlias` instance, which proxies instantiation calls to the
240 # "real" `TypeAdapter` class and thus adding an extra frame to the call. To avoid
241 # pulling anything from the `typing` module, use the correct frame (the one before):
242 return frame.f_back 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
244 return frame 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
246 def _init_core_attrs( 1opqrstabcdGHIuvwxyzghijJKLABCDEFklmnMNO
247 self, ns_resolver: _namespace_utils.NsResolver, force: bool, raise_errors: bool = False
248 ) -> bool:
249 """Initialize the core schema, validator, and serializer for the type.
251 Args:
252 ns_resolver: The namespace resolver to use when building the core schema for the adapted type.
253 force: Whether to force the construction of the core schema, validator, and serializer.
254 If `force` is set to `False` and `_defer_build` is `True`, the core schema, validator, and serializer will be set to mocks.
255 raise_errors: Whether to raise errors if initializing any of the core attrs fails.
257 Returns:
258 `True` if the core schema, validator, and serializer were successfully initialized, otherwise `False`.
260 Raises:
261 PydanticUndefinedAnnotation: If `PydanticUndefinedAnnotation` occurs in`__get_pydantic_core_schema__`
262 and `raise_errors=True`.
263 """
264 if not force and self._defer_build: 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
265 _mock_val_ser.set_type_adapter_mocks(self) 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
266 self.pydantic_complete = False 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
267 return False 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
269 try: 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
270 self.core_schema = _getattr_no_parents(self._type, '__pydantic_core_schema__') 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
271 self.validator = _getattr_no_parents(self._type, '__pydantic_validator__') 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
272 self.serializer = _getattr_no_parents(self._type, '__pydantic_serializer__') 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
274 # TODO: we don't go through the rebuild logic here directly because we don't want
275 # to repeat all of the namespace fetching logic that we've already done
276 # so we simply skip to the block below that does the actual schema generation
277 if ( 1opqrstefuvwxyzABCDEF
278 isinstance(self.core_schema, _mock_val_ser.MockCoreSchema)
279 or isinstance(self.validator, _mock_val_ser.MockValSer)
280 or isinstance(self.serializer, _mock_val_ser.MockValSer)
281 ):
282 raise AttributeError() 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
283 except AttributeError: 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
284 config_wrapper = _config.ConfigWrapper(self._config) 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
286 schema_generator = _generate_schema.GenerateSchema(config_wrapper, ns_resolver=ns_resolver) 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
288 try: 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
289 core_schema = schema_generator.generate_schema(self._type) 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
290 except PydanticUndefinedAnnotation: 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
291 if raise_errors: 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
292 raise 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
293 _mock_val_ser.set_type_adapter_mocks(self) 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
294 return False 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
296 try: 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
297 self.core_schema = schema_generator.clean_schema(core_schema) 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
298 except _generate_schema.InvalidSchemaError:
299 _mock_val_ser.set_type_adapter_mocks(self)
300 return False
302 core_config = config_wrapper.core_config(None) 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
304 self.validator = create_schema_validator( 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
305 schema=self.core_schema,
306 schema_type=self._type,
307 schema_type_module=self._module_name,
308 schema_type_name=str(self._type),
309 schema_kind='TypeAdapter',
310 config=core_config,
311 plugin_settings=config_wrapper.plugin_settings,
312 )
313 self.serializer = SchemaSerializer(self.core_schema, core_config) 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
315 self.pydantic_complete = True 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
316 return True 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
318 @property 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
319 def _defer_build(self) -> bool: 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
320 config = self._config if self._config is not None else self._model_config 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
321 if config: 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
322 return config.get('defer_build') is True 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
323 return False 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
325 @property 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
326 def _model_config(self) -> ConfigDict | None: 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
327 type_: Any = _typing_extra.annotated_type(self._type) or self._type # Eg FastAPI heavily uses Annotated 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
328 if _utils.lenient_issubclass(type_, BaseModel): 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
329 return type_.model_config 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
330 return getattr(type_, '__pydantic_config__', None) 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
332 def __repr__(self) -> str: 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
333 return f'TypeAdapter({_repr.display_as_type(self._type)})' 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
335 def rebuild( 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
336 self,
337 *,
338 force: bool = False,
339 raise_errors: bool = True,
340 _parent_namespace_depth: int = 2,
341 _types_namespace: _namespace_utils.MappingNamespace | None = None,
342 ) -> bool | None:
343 """Try to rebuild the pydantic-core schema for the adapter's type.
345 This may be necessary when one of the annotations is a ForwardRef which could not be resolved during
346 the initial attempt to build the schema, and automatic rebuilding fails.
348 Args:
349 force: Whether to force the rebuilding of the type adapter's schema, defaults to `False`.
350 raise_errors: Whether to raise errors, defaults to `True`.
351 _parent_namespace_depth: Depth at which to search for the [parent frame][frame-objects]. This
352 frame is used when resolving forward annotations during schema rebuilding, by looking for
353 the locals of this frame. Defaults to 2, which will result in the frame where the method
354 was called.
355 _types_namespace: An explicit types namespace to use, instead of using the local namespace
356 from the parent frame. Defaults to `None`.
358 Returns:
359 Returns `None` if the schema is already "complete" and rebuilding was not required.
360 If rebuilding _was_ required, returns `True` if rebuilding was successful, otherwise `False`.
361 """
362 if not force and self.pydantic_complete: 362 ↛ 363line 362 didn't jump to line 363 because the condition on line 362 was never true1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
363 return None
365 if _types_namespace is not None: 365 ↛ 366line 365 didn't jump to line 366 because the condition on line 365 was never true1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
366 rebuild_ns = _types_namespace
367 elif _parent_namespace_depth > 0: 367 ↛ 370line 367 didn't jump to line 370 because the condition on line 367 was always true1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
368 rebuild_ns = _typing_extra.parent_frame_namespace(parent_depth=_parent_namespace_depth, force=True) or {} 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
369 else:
370 rebuild_ns = {}
372 # we have to manually fetch globals here because there's no type on the stack of the NsResolver
373 # and so we skip the globalns = get_module_ns_of(typ) call that would normally happen
374 globalns = sys._getframe(max(_parent_namespace_depth - 1, 1)).f_globals 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
375 ns_resolver = _namespace_utils.NsResolver( 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
376 namespaces_tuple=_namespace_utils.NamespacesTuple(locals=rebuild_ns, globals=globalns),
377 parent_namespace=rebuild_ns,
378 )
379 return self._init_core_attrs(ns_resolver=ns_resolver, force=True, raise_errors=raise_errors) 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
381 def validate_python( 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
382 self,
383 object: Any,
384 /,
385 *,
386 strict: bool | None = None,
387 from_attributes: bool | None = None,
388 context: dict[str, Any] | None = None,
389 experimental_allow_partial: bool | Literal['off', 'on', 'trailing-strings'] = False,
390 by_alias: bool | None = None,
391 by_name: bool | None = None,
392 ) -> T:
393 """Validate a Python object against the model.
395 Args:
396 object: The Python object to validate against the model.
397 strict: Whether to strictly check types.
398 from_attributes: Whether to extract data from object attributes.
399 context: Additional context to pass to the validator.
400 experimental_allow_partial: **Experimental** whether to enable
401 [partial validation](../concepts/experimental.md#partial-validation), e.g. to process streams.
402 * False / 'off': Default behavior, no partial validation.
403 * True / 'on': Enable partial validation.
404 * 'trailing-strings': Enable partial validation and allow trailing strings in the input.
405 by_alias: Whether to use the field's alias when validating against the provided input data.
406 by_name: Whether to use the field's name when validating against the provided input data.
408 !!! note
409 When using `TypeAdapter` with a Pydantic `dataclass`, the use of the `from_attributes`
410 argument is not supported.
412 Returns:
413 The validated object.
414 """
415 if by_alias is False and by_name is not True: 415 ↛ 416line 415 didn't jump to line 416 because the condition on line 415 was never true1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
416 raise PydanticUserError(
417 'At least one of `by_alias` or `by_name` must be set to True.',
418 code='validate-by-alias-and-name-false',
419 )
421 return self.validator.validate_python( 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
422 object,
423 strict=strict,
424 from_attributes=from_attributes,
425 context=context,
426 allow_partial=experimental_allow_partial,
427 by_alias=by_alias,
428 by_name=by_name,
429 )
431 def validate_json( 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
432 self,
433 data: str | bytes | bytearray,
434 /,
435 *,
436 strict: bool | None = None,
437 context: dict[str, Any] | None = None,
438 experimental_allow_partial: bool | Literal['off', 'on', 'trailing-strings'] = False,
439 by_alias: bool | None = None,
440 by_name: bool | None = None,
441 ) -> T:
442 """!!! abstract "Usage Documentation"
443 [JSON Parsing](../concepts/json.md#json-parsing)
445 Validate a JSON string or bytes against the model.
447 Args:
448 data: The JSON data to validate against the model.
449 strict: Whether to strictly check types.
450 context: Additional context to use during validation.
451 experimental_allow_partial: **Experimental** whether to enable
452 [partial validation](../concepts/experimental.md#partial-validation), e.g. to process streams.
453 * False / 'off': Default behavior, no partial validation.
454 * True / 'on': Enable partial validation.
455 * 'trailing-strings': Enable partial validation and allow trailing strings in the input.
456 by_alias: Whether to use the field's alias when validating against the provided input data.
457 by_name: Whether to use the field's name when validating against the provided input data.
459 Returns:
460 The validated object.
461 """
462 if by_alias is False and by_name is not True: 462 ↛ 463line 462 didn't jump to line 463 because the condition on line 462 was never true1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
463 raise PydanticUserError(
464 'At least one of `by_alias` or `by_name` must be set to True.',
465 code='validate-by-alias-and-name-false',
466 )
468 return self.validator.validate_json( 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
469 data,
470 strict=strict,
471 context=context,
472 allow_partial=experimental_allow_partial,
473 by_alias=by_alias,
474 by_name=by_name,
475 )
477 def validate_strings( 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
478 self,
479 obj: Any,
480 /,
481 *,
482 strict: bool | None = None,
483 context: dict[str, Any] | None = None,
484 experimental_allow_partial: bool | Literal['off', 'on', 'trailing-strings'] = False,
485 by_alias: bool | None = None,
486 by_name: bool | None = None,
487 ) -> T:
488 """Validate object contains string data against the model.
490 Args:
491 obj: The object contains string data to validate.
492 strict: Whether to strictly check types.
493 context: Additional context to use during validation.
494 experimental_allow_partial: **Experimental** whether to enable
495 [partial validation](../concepts/experimental.md#partial-validation), e.g. to process streams.
496 * False / 'off': Default behavior, no partial validation.
497 * True / 'on': Enable partial validation.
498 * 'trailing-strings': Enable partial validation and allow trailing strings in the input.
499 by_alias: Whether to use the field's alias when validating against the provided input data.
500 by_name: Whether to use the field's name when validating against the provided input data.
502 Returns:
503 The validated object.
504 """
505 if by_alias is False and by_name is not True: 505 ↛ 506line 505 didn't jump to line 506 because the condition on line 505 was never true1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
506 raise PydanticUserError(
507 'At least one of `by_alias` or `by_name` must be set to True.',
508 code='validate-by-alias-and-name-false',
509 )
511 return self.validator.validate_strings( 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
512 obj,
513 strict=strict,
514 context=context,
515 allow_partial=experimental_allow_partial,
516 by_alias=by_alias,
517 by_name=by_name,
518 )
520 def get_default_value(self, *, strict: bool | None = None, context: dict[str, Any] | None = None) -> Some[T] | None: 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
521 """Get the default value for the wrapped type.
523 Args:
524 strict: Whether to strictly check types.
525 context: Additional context to pass to the validator.
527 Returns:
528 The default value wrapped in a `Some` if there is one or None if not.
529 """
530 return self.validator.get_default_value(strict=strict, context=context) 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
532 def dump_python( 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
533 self,
534 instance: T,
535 /,
536 *,
537 mode: Literal['json', 'python'] = 'python',
538 include: IncEx | None = None,
539 exclude: IncEx | None = None,
540 by_alias: bool | None = None,
541 exclude_unset: bool = False,
542 exclude_defaults: bool = False,
543 exclude_none: bool = False,
544 round_trip: bool = False,
545 warnings: bool | Literal['none', 'warn', 'error'] = True,
546 fallback: Callable[[Any], Any] | None = None,
547 serialize_as_any: bool = False,
548 context: dict[str, Any] | None = None,
549 ) -> Any:
550 """Dump an instance of the adapted type to a Python object.
552 Args:
553 instance: The Python object to serialize.
554 mode: The output format.
555 include: Fields to include in the output.
556 exclude: Fields to exclude from the output.
557 by_alias: Whether to use alias names for field names.
558 exclude_unset: Whether to exclude unset fields.
559 exclude_defaults: Whether to exclude fields with default values.
560 exclude_none: Whether to exclude fields with None values.
561 round_trip: Whether to output the serialized data in a way that is compatible with deserialization.
562 warnings: How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors,
563 "error" raises a [`PydanticSerializationError`][pydantic_core.PydanticSerializationError].
564 fallback: A function to call when an unknown value is encountered. If not provided,
565 a [`PydanticSerializationError`][pydantic_core.PydanticSerializationError] error is raised.
566 serialize_as_any: Whether to serialize fields with duck-typing serialization behavior.
567 context: Additional context to pass to the serializer.
569 Returns:
570 The serialized object.
571 """
572 return self.serializer.to_python( 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
573 instance,
574 mode=mode,
575 by_alias=by_alias,
576 include=include,
577 exclude=exclude,
578 exclude_unset=exclude_unset,
579 exclude_defaults=exclude_defaults,
580 exclude_none=exclude_none,
581 round_trip=round_trip,
582 warnings=warnings,
583 fallback=fallback,
584 serialize_as_any=serialize_as_any,
585 context=context,
586 )
588 def dump_json( 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
589 self,
590 instance: T,
591 /,
592 *,
593 indent: int | None = None,
594 ensure_ascii: bool = False,
595 include: IncEx | None = None,
596 exclude: IncEx | None = None,
597 by_alias: bool | None = None,
598 exclude_unset: bool = False,
599 exclude_defaults: bool = False,
600 exclude_none: bool = False,
601 round_trip: bool = False,
602 warnings: bool | Literal['none', 'warn', 'error'] = True,
603 fallback: Callable[[Any], Any] | None = None,
604 serialize_as_any: bool = False,
605 context: dict[str, Any] | None = None,
606 ) -> bytes:
607 """!!! abstract "Usage Documentation"
608 [JSON Serialization](../concepts/json.md#json-serialization)
610 Serialize an instance of the adapted type to JSON.
612 Args:
613 instance: The instance to be serialized.
614 indent: Number of spaces for JSON indentation.
615 ensure_ascii: If `True`, the output is guaranteed to have all incoming non-ASCII characters escaped.
616 If `False` (the default), these characters will be output as-is.
617 include: Fields to include.
618 exclude: Fields to exclude.
619 by_alias: Whether to use alias names for field names.
620 exclude_unset: Whether to exclude unset fields.
621 exclude_defaults: Whether to exclude fields with default values.
622 exclude_none: Whether to exclude fields with a value of `None`.
623 round_trip: Whether to serialize and deserialize the instance to ensure round-tripping.
624 warnings: How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors,
625 "error" raises a [`PydanticSerializationError`][pydantic_core.PydanticSerializationError].
626 fallback: A function to call when an unknown value is encountered. If not provided,
627 a [`PydanticSerializationError`][pydantic_core.PydanticSerializationError] error is raised.
628 serialize_as_any: Whether to serialize fields with duck-typing serialization behavior.
629 context: Additional context to pass to the serializer.
631 Returns:
632 The JSON representation of the given instance as bytes.
633 """
634 return self.serializer.to_json( 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
635 instance,
636 indent=indent,
637 include=include,
638 exclude=exclude,
639 by_alias=by_alias,
640 exclude_unset=exclude_unset,
641 exclude_defaults=exclude_defaults,
642 exclude_none=exclude_none,
643 round_trip=round_trip,
644 warnings=warnings,
645 fallback=fallback,
646 serialize_as_any=serialize_as_any,
647 context=context,
648 )
650 def json_schema( 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
651 self,
652 *,
653 by_alias: bool = True,
654 ref_template: str = DEFAULT_REF_TEMPLATE,
655 schema_generator: type[GenerateJsonSchema] = GenerateJsonSchema,
656 mode: JsonSchemaMode = 'validation',
657 ) -> dict[str, Any]:
658 """Generate a JSON schema for the adapted type.
660 Args:
661 by_alias: Whether to use alias names for field names.
662 ref_template: The format string used for generating $ref strings.
663 schema_generator: The generator class used for creating the schema.
664 mode: The mode to use for schema generation.
666 Returns:
667 The JSON schema for the model as a dictionary.
668 """
669 schema_generator_instance = schema_generator(by_alias=by_alias, ref_template=ref_template) 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
670 if isinstance(self.core_schema, _mock_val_ser.MockCoreSchema): 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
671 self.core_schema.rebuild() 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
672 assert not isinstance(self.core_schema, _mock_val_ser.MockCoreSchema), 'this is a bug! please report it' 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
673 return schema_generator_instance.generate(self.core_schema, mode=mode) 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
675 @staticmethod 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
676 def json_schemas( 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
677 inputs: Iterable[tuple[JsonSchemaKeyT, JsonSchemaMode, TypeAdapter[Any]]],
678 /,
679 *,
680 by_alias: bool = True,
681 title: str | None = None,
682 description: str | None = None,
683 ref_template: str = DEFAULT_REF_TEMPLATE,
684 schema_generator: type[GenerateJsonSchema] = GenerateJsonSchema,
685 ) -> tuple[dict[tuple[JsonSchemaKeyT, JsonSchemaMode], JsonSchemaValue], JsonSchemaValue]:
686 """Generate a JSON schema including definitions from multiple type adapters.
688 Args:
689 inputs: Inputs to schema generation. The first two items will form the keys of the (first)
690 output mapping; the type adapters will provide the core schemas that get converted into
691 definitions in the output JSON schema.
692 by_alias: Whether to use alias names.
693 title: The title for the schema.
694 description: The description for the schema.
695 ref_template: The format string used for generating $ref strings.
696 schema_generator: The generator class used for creating the schema.
698 Returns:
699 A tuple where:
701 - The first element is a dictionary whose keys are tuples of JSON schema key type and JSON mode, and
702 whose values are the JSON schema corresponding to that pair of inputs. (These schemas may have
703 JsonRef references to definitions that are defined in the second returned element.)
704 - The second element is a JSON schema containing all definitions referenced in the first returned
705 element, along with the optional title and description keys.
707 """
708 schema_generator_instance = schema_generator(by_alias=by_alias, ref_template=ref_template) 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
710 inputs_ = [] 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
711 for key, mode, adapter in inputs: 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
712 # This is the same pattern we follow for model json schemas - we attempt a core schema rebuild if we detect a mock
713 if isinstance(adapter.core_schema, _mock_val_ser.MockCoreSchema): 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
714 adapter.core_schema.rebuild() 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
715 assert not isinstance(adapter.core_schema, _mock_val_ser.MockCoreSchema), ( 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
716 'this is a bug! please report it'
717 )
718 inputs_.append((key, mode, adapter.core_schema)) 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
720 json_schemas_map, definitions = schema_generator_instance.generate_definitions(inputs_) 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
722 json_schema: dict[str, Any] = {} 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
723 if definitions: 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
724 json_schema['$defs'] = definitions 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
725 if title: 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
726 json_schema['title'] = title 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
727 if description: 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
728 json_schema['description'] = description 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO
730 return json_schemas_map, json_schema 1opqrstabcdGHIefuvwxyzghijJKLABCDEFklmnMNO