Coverage for pydantic/type_adapter.py: 91.62%
153 statements
« prev ^ index » next coverage.py v7.6.12, created at 2025-02-13 19:35 +0000
« prev ^ index » next coverage.py v7.6.12, created at 2025-02-13 19:35 +0000
1"""Type adapter specification."""
3from __future__ import annotations as _annotations 1opqrstabcdefuvwxyzghijABCDEFklmn
5import sys 1opqrstabcdefuvwxyzghijABCDEFklmn
6from collections.abc import Callable, Iterable 1opqrstabcdefuvwxyzghijABCDEFklmn
7from dataclasses import is_dataclass 1opqrstabcdefuvwxyzghijABCDEFklmn
8from types import FrameType 1opqrstabcdefuvwxyzghijABCDEFklmn
9from typing import ( 1opqrstabcdefuvwxyzghijABCDEFklmn
10 Any,
11 Generic,
12 Literal,
13 TypeVar,
14 cast,
15 final,
16 overload,
17)
19from pydantic_core import CoreSchema, SchemaSerializer, SchemaValidator, Some 1opqrstabcdefuvwxyzghijABCDEFklmn
20from typing_extensions import ParamSpec, is_typeddict 1opqrstabcdefuvwxyzghijABCDEFklmn
22from pydantic.errors import PydanticUserError 1opqrstabcdefuvwxyzghijABCDEFklmn
23from pydantic.main import BaseModel, IncEx 1opqrstabcdefuvwxyzghijABCDEFklmn
25from ._internal import _config, _generate_schema, _mock_val_ser, _namespace_utils, _repr, _typing_extra, _utils 1opqrstabcdefuvwxyzghijABCDEFklmn
26from .config import ConfigDict 1opqrstabcdefuvwxyzghijABCDEFklmn
27from .errors import PydanticUndefinedAnnotation 1opqrstabcdefuvwxyzghijABCDEFklmn
28from .json_schema import ( 1opqrstabcdefuvwxyzghijABCDEFklmn
29 DEFAULT_REF_TEMPLATE,
30 GenerateJsonSchema,
31 JsonSchemaKeyT,
32 JsonSchemaMode,
33 JsonSchemaValue,
34)
35from .plugin._schema_validator import PluggableSchemaValidator, create_schema_validator 1opqrstabcdefuvwxyzghijABCDEFklmn
37T = TypeVar('T') 1opqrstabcdefuvwxyzghijABCDEFklmn
38R = TypeVar('R') 1opqrstabcdefuvwxyzghijABCDEFklmn
39P = ParamSpec('P') 1opqrstabcdefuvwxyzghijABCDEFklmn
40TypeAdapterT = TypeVar('TypeAdapterT', bound='TypeAdapter') 1opqrstabcdefuvwxyzghijABCDEFklmn
43def _getattr_no_parents(obj: Any, attribute: str) -> Any: 1opqrstabcdefuvwxyzghijABCDEFklmn
44 """Returns the attribute value without attempting to look up attributes from parent types."""
45 if hasattr(obj, '__dict__'): 1opqrstabcdefuvwxyzghijABCDEFklmn
46 try: 1opqrstabcdefuvwxyzghijABCDEFklmn
47 return obj.__dict__[attribute] 1opqrstabcdefuvwxyzghijABCDEFklmn
48 except KeyError: 1opqrstabcdefuvwxyzghijABCDEFklmn
49 pass 1opqrstabcdefuvwxyzghijABCDEFklmn
51 slots = getattr(obj, '__slots__', None) 1opqrstabcdefuvwxyzghijABCDEFklmn
52 if slots is not None and attribute in slots: 52 ↛ 53line 52 didn't jump to line 53 because the condition on line 52 was never true1opqrstabcdefuvwxyzghijABCDEFklmn
53 return getattr(obj, attribute)
54 else:
55 raise AttributeError(attribute) 1opqrstabcdefuvwxyzghijABCDEFklmn
58def _type_has_config(type_: Any) -> bool: 1opqrstabcdefuvwxyzghijABCDEFklmn
59 """Returns whether the type has config."""
60 type_ = _typing_extra.annotated_type(type_) or type_ 1opqrstabcdefuvwxyzghijABCDEFklmn
61 try: 1opqrstabcdefuvwxyzghijABCDEFklmn
62 return issubclass(type_, BaseModel) or is_dataclass(type_) or is_typeddict(type_) 1opqrstabcdefuvwxyzghijABCDEFklmn
63 except TypeError:
64 # type is not a class
65 return False
68@final 1opqrstabcdefuvwxyzghijABCDEFklmn
69class TypeAdapter(Generic[T]): 1opqrstabcdefuvwxyzghijABCDEFklmn
70 """!!! abstract "Usage Documentation"
71 [`TypeAdapter`](../concepts/type_adapter.md)
73 Type adapters provide a flexible way to perform validation and serialization based on a Python type.
75 A `TypeAdapter` instance exposes some of the functionality from `BaseModel` instance methods
76 for types that do not have such methods (such as dataclasses, primitive types, and more).
78 **Note:** `TypeAdapter` instances are not types, and cannot be used as type annotations for fields.
80 Args:
81 type: The type associated with the `TypeAdapter`.
82 config: Configuration for the `TypeAdapter`, should be a dictionary conforming to
83 [`ConfigDict`][pydantic.config.ConfigDict].
85 !!! note
86 You cannot provide a configuration when instantiating a `TypeAdapter` if the type you're using
87 has its own config that cannot be overridden (ex: `BaseModel`, `TypedDict`, and `dataclass`). A
88 [`type-adapter-config-unused`](../errors/usage_errors.md#type-adapter-config-unused) error will
89 be raised in this case.
90 _parent_depth: Depth at which to search for the [parent frame][frame-objects]. This frame is used when
91 resolving forward annotations during schema building, by looking for the globals and locals of this
92 frame. Defaults to 2, which will result in the frame where the `TypeAdapter` was instantiated.
94 !!! note
95 This parameter is named with an underscore to suggest its private nature and discourage use.
96 It may be deprecated in a minor version, so we only recommend using it if you're comfortable
97 with potential change in behavior/support. It's default value is 2 because internally,
98 the `TypeAdapter` class makes another call to fetch the frame.
99 module: The module that passes to plugin if provided.
101 Attributes:
102 core_schema: The core schema for the type.
103 validator: The schema validator for the type.
104 serializer: The schema serializer for the type.
105 pydantic_complete: Whether the core schema for the type is successfully built.
107 ??? tip "Compatibility with `mypy`"
108 Depending on the type used, `mypy` might raise an error when instantiating a `TypeAdapter`. As a workaround, you can explicitly
109 annotate your variable:
111 ```py
112 from typing import Union
114 from pydantic import TypeAdapter
116 ta: TypeAdapter[Union[str, int]] = TypeAdapter(Union[str, int]) # type: ignore[arg-type]
117 ```
119 ??? info "Namespace management nuances and implementation details"
121 Here, we collect some notes on namespace management, and subtle differences from `BaseModel`:
123 `BaseModel` uses its own `__module__` to find out where it was defined
124 and then looks for symbols to resolve forward references in those globals.
125 On the other hand, `TypeAdapter` can be initialized with arbitrary objects,
126 which may not be types and thus do not have a `__module__` available.
127 So instead we look at the globals in our parent stack frame.
129 It is expected that the `ns_resolver` passed to this function will have the correct
130 namespace for the type we're adapting. See the source code for `TypeAdapter.__init__`
131 and `TypeAdapter.rebuild` for various ways to construct this namespace.
133 This works for the case where this function is called in a module that
134 has the target of forward references in its scope, but
135 does not always work for more complex cases.
137 For example, take the following:
139 ```python {title="a.py"}
140 IntList = list[int]
141 OuterDict = dict[str, 'IntList']
142 ```
144 ```python {test="skip" title="b.py"}
145 from a import OuterDict
147 from pydantic import TypeAdapter
149 IntList = int # replaces the symbol the forward reference is looking for
150 v = TypeAdapter(OuterDict)
151 v({'x': 1}) # should fail but doesn't
152 ```
154 If `OuterDict` were a `BaseModel`, this would work because it would resolve
155 the forward reference within the `a.py` namespace.
156 But `TypeAdapter(OuterDict)` can't determine what module `OuterDict` came from.
158 In other words, the assumption that _all_ forward references exist in the
159 module we are being called from is not technically always true.
160 Although most of the time it is and it works fine for recursive models and such,
161 `BaseModel`'s behavior isn't perfect either and _can_ break in similar ways,
162 so there is no right or wrong between the two.
164 But at the very least this behavior is _subtly_ different from `BaseModel`'s.
165 """
167 core_schema: CoreSchema 1opqrstabcdefuvwxyzghijABCDEFklmn
168 validator: SchemaValidator | PluggableSchemaValidator 1opqrstabcdefuvwxyzghijABCDEFklmn
169 serializer: SchemaSerializer 1opqrstabcdefuvwxyzghijABCDEFklmn
170 pydantic_complete: bool 1opqrstabcdefuvwxyzghijABCDEFklmn
172 @overload 1opqrstabcdefuvwxyzghijABCDEFklmn
173 def __init__( 1opqrstabcdefuvwxyzghijABCDEFklmn
174 self,
175 type: type[T], 1abcdefghijklmn
176 *,
177 config: ConfigDict | None = ..., 1opqrstabcdefuvwxyzghijABCDEFklmn
178 _parent_depth: int = ..., 1opqrstabcdefuvwxyzghijABCDEFklmn
179 module: str | None = ..., 1opqrstabcdefuvwxyzghijABCDEFklmn
180 ) -> None: ... 1abcdefghijklmn
182 # This second overload is for unsupported special forms (such as Annotated, Union, etc.)
183 # Currently there is no way to type this correctly
184 # See https://github.com/python/typing/pull/1618
185 @overload 1opqrstabcdefuvwxyzghijABCDEFklmn
186 def __init__( 1opqrstabcdefuvwxyzghijABCDEFklmn
187 self,
188 type: Any, 1abcdefghijklmn
189 *,
190 config: ConfigDict | None = ..., 1opqrstabcdefuvwxyzghijABCDEFklmn
191 _parent_depth: int = ..., 1opqrstabcdefuvwxyzghijABCDEFklmn
192 module: str | None = ..., 1opqrstabcdefuvwxyzghijABCDEFklmn
193 ) -> None: ... 1abcdefghijklmn
195 def __init__( 1opqrstabcdefuvwxyzghijABCDEFklmn
196 self,
197 type: Any,
198 *,
199 config: ConfigDict | None = None,
200 _parent_depth: int = 2,
201 module: str | None = None,
202 ) -> None:
203 if _type_has_config(type) and config is not None: 1opqrstabcdefuvwxyzghijABCDEFklmn
204 raise PydanticUserError( 1opqrstabcdefuvwxyzghijABCDEFklmn
205 'Cannot use `config` when the type is a BaseModel, dataclass or TypedDict.'
206 ' These types can have their own config and setting the config via the `config`'
207 ' parameter to TypeAdapter will not override it, thus the `config` you passed to'
208 ' TypeAdapter becomes meaningless, which is probably not what you want.',
209 code='type-adapter-config-unused',
210 )
212 self._type = type 1opqrstabcdefuvwxyzghijABCDEFklmn
213 self._config = config 1opqrstabcdefuvwxyzghijABCDEFklmn
214 self._parent_depth = _parent_depth 1opqrstabcdefuvwxyzghijABCDEFklmn
215 self.pydantic_complete = False 1opqrstabcdefuvwxyzghijABCDEFklmn
217 parent_frame = self._fetch_parent_frame() 1opqrstabcdefuvwxyzghijABCDEFklmn
218 if parent_frame is not None: 218 ↛ 223line 218 didn't jump to line 223 because the condition on line 218 was always true1opqrstabcdefuvwxyzghijABCDEFklmn
219 globalns = parent_frame.f_globals 1opqrstabcdefuvwxyzghijABCDEFklmn
220 # Do not provide a local ns if the type adapter happens to be instantiated at the module level:
221 localns = parent_frame.f_locals if parent_frame.f_locals is not globalns else {} 1opqrstabcdefuvwxyzghijABCDEFklmn
222 else:
223 globalns = {}
224 localns = {}
226 self._module_name = module or cast(str, globalns.get('__name__', '')) 1opqrstabcdefuvwxyzghijABCDEFklmn
227 self._init_core_attrs( 1opqrstabcdefuvwxyzghijABCDEFklmn
228 ns_resolver=_namespace_utils.NsResolver(
229 namespaces_tuple=_namespace_utils.NamespacesTuple(locals=localns, globals=globalns),
230 parent_namespace=localns,
231 ),
232 force=False,
233 )
235 def _fetch_parent_frame(self) -> FrameType | None: 1opqrstabcdefuvwxyzghijABCDEFklmn
236 frame = sys._getframe(self._parent_depth) 1opqrstabcdefuvwxyzghijABCDEFklmn
237 if frame.f_globals.get('__name__') == 'typing': 1opqrstabcdefuvwxyzghijABCDEFklmn
238 # Because `TypeAdapter` is generic, explicitly parametrizing the class results
239 # in a `typing._GenericAlias` instance, which proxies instantiation calls to the
240 # "real" `TypeAdapter` class and thus adding an extra frame to the call. To avoid
241 # pulling anything from the `typing` module, use the correct frame (the one before):
242 return frame.f_back 1opqrstabcdefuvwxyzghijABCDEFklmn
244 return frame 1opqrstabcdefuvwxyzghijABCDEFklmn
246 def _init_core_attrs( 1opqrstabcduvwxyzghijABCDEFklmn
247 self, ns_resolver: _namespace_utils.NsResolver, force: bool, raise_errors: bool = False
248 ) -> bool:
249 """Initialize the core schema, validator, and serializer for the type.
251 Args:
252 ns_resolver: The namespace resolver to use when building the core schema for the adapted type.
253 force: Whether to force the construction of the core schema, validator, and serializer.
254 If `force` is set to `False` and `_defer_build` is `True`, the core schema, validator, and serializer will be set to mocks.
255 raise_errors: Whether to raise errors if initializing any of the core attrs fails.
257 Returns:
258 `True` if the core schema, validator, and serializer were successfully initialized, otherwise `False`.
260 Raises:
261 PydanticUndefinedAnnotation: If `PydanticUndefinedAnnotation` occurs in`__get_pydantic_core_schema__`
262 and `raise_errors=True`.
263 """
264 if not force and self._defer_build: 1opqrstabcdefuvwxyzghijABCDEFklmn
265 _mock_val_ser.set_type_adapter_mocks(self) 1opqrstabcdefuvwxyzghijABCDEFklmn
266 self.pydantic_complete = False 1opqrstabcdefuvwxyzghijABCDEFklmn
267 return False 1opqrstabcdefuvwxyzghijABCDEFklmn
269 try: 1opqrstabcdefuvwxyzghijABCDEFklmn
270 self.core_schema = _getattr_no_parents(self._type, '__pydantic_core_schema__') 1opqrstabcdefuvwxyzghijABCDEFklmn
271 self.validator = _getattr_no_parents(self._type, '__pydantic_validator__') 1opqrstabcdefuvwxyzghijABCDEFklmn
272 self.serializer = _getattr_no_parents(self._type, '__pydantic_serializer__') 1opqrstabcdefuvwxyzghijABCDEFklmn
274 # TODO: we don't go through the rebuild logic here directly because we don't want
275 # to repeat all of the namespace fetching logic that we've already done
276 # so we simply skip to the block below that does the actual schema generation
277 if ( 1opqrstefuvwxyzABCDEF
278 isinstance(self.core_schema, _mock_val_ser.MockCoreSchema)
279 or isinstance(self.validator, _mock_val_ser.MockValSer)
280 or isinstance(self.serializer, _mock_val_ser.MockValSer)
281 ):
282 raise AttributeError() 1opqrstabcdefuvwxyzghijABCDEFklmn
283 except AttributeError: 1opqrstabcdefuvwxyzghijABCDEFklmn
284 config_wrapper = _config.ConfigWrapper(self._config) 1opqrstabcdefuvwxyzghijABCDEFklmn
286 schema_generator = _generate_schema.GenerateSchema(config_wrapper, ns_resolver=ns_resolver) 1opqrstabcdefuvwxyzghijABCDEFklmn
288 try: 1opqrstabcdefuvwxyzghijABCDEFklmn
289 core_schema = schema_generator.generate_schema(self._type) 1opqrstabcdefuvwxyzghijABCDEFklmn
290 except PydanticUndefinedAnnotation: 1opqrstabcdefuvwxyzghijABCDEFklmn
291 if raise_errors: 1opqrstabcdefuvwxyzghijABCDEFklmn
292 raise 1opqrstabcdefuvwxyzghijABCDEFklmn
293 _mock_val_ser.set_type_adapter_mocks(self) 1opqrstabcdefuvwxyzghijABCDEFklmn
294 return False 1opqrstabcdefuvwxyzghijABCDEFklmn
296 try: 1opqrstabcdefuvwxyzghijABCDEFklmn
297 self.core_schema = schema_generator.clean_schema(core_schema) 1opqrstabcdefuvwxyzghijABCDEFklmn
298 except _generate_schema.InvalidSchemaError:
299 _mock_val_ser.set_type_adapter_mocks(self)
300 return False
302 core_config = config_wrapper.core_config(None) 1opqrstabcdefuvwxyzghijABCDEFklmn
304 self.validator = create_schema_validator( 1opqrstabcdefuvwxyzghijABCDEFklmn
305 schema=self.core_schema,
306 schema_type=self._type,
307 schema_type_module=self._module_name,
308 schema_type_name=str(self._type),
309 schema_kind='TypeAdapter',
310 config=core_config,
311 plugin_settings=config_wrapper.plugin_settings,
312 )
313 self.serializer = SchemaSerializer(self.core_schema, core_config) 1opqrstabcdefuvwxyzghijABCDEFklmn
315 self.pydantic_complete = True 1opqrstabcdefuvwxyzghijABCDEFklmn
316 return True 1opqrstabcdefuvwxyzghijABCDEFklmn
318 @property 1opqrstabcdefuvwxyzghijABCDEFklmn
319 def _defer_build(self) -> bool: 1opqrstabcdefuvwxyzghijABCDEFklmn
320 config = self._config if self._config is not None else self._model_config 1opqrstabcdefuvwxyzghijABCDEFklmn
321 if config: 1opqrstabcdefuvwxyzghijABCDEFklmn
322 return config.get('defer_build') is True 1opqrstabcdefuvwxyzghijABCDEFklmn
323 return False 1opqrstabcdefuvwxyzghijABCDEFklmn
325 @property 1opqrstabcdefuvwxyzghijABCDEFklmn
326 def _model_config(self) -> ConfigDict | None: 1opqrstabcdefuvwxyzghijABCDEFklmn
327 type_: Any = _typing_extra.annotated_type(self._type) or self._type # Eg FastAPI heavily uses Annotated 1opqrstabcdefuvwxyzghijABCDEFklmn
328 if _utils.lenient_issubclass(type_, BaseModel): 1opqrstabcdefuvwxyzghijABCDEFklmn
329 return type_.model_config 1opqrstabcdefuvwxyzghijABCDEFklmn
330 return getattr(type_, '__pydantic_config__', None) 1opqrstabcdefuvwxyzghijABCDEFklmn
332 def __repr__(self) -> str: 1opqrstabcdefuvwxyzghijABCDEFklmn
333 return f'TypeAdapter({_repr.display_as_type(self._type)})' 1opqrstabcdefuvwxyzghijABCDEFklmn
335 def rebuild( 1opqrstabcdefuvwxyzghijABCDEFklmn
336 self,
337 *,
338 force: bool = False,
339 raise_errors: bool = True,
340 _parent_namespace_depth: int = 2,
341 _types_namespace: _namespace_utils.MappingNamespace | None = None,
342 ) -> bool | None:
343 """Try to rebuild the pydantic-core schema for the adapter's type.
345 This may be necessary when one of the annotations is a ForwardRef which could not be resolved during
346 the initial attempt to build the schema, and automatic rebuilding fails.
348 Args:
349 force: Whether to force the rebuilding of the type adapter's schema, defaults to `False`.
350 raise_errors: Whether to raise errors, defaults to `True`.
351 _parent_namespace_depth: Depth at which to search for the [parent frame][frame-objects]. This
352 frame is used when resolving forward annotations during schema rebuilding, by looking for
353 the locals of this frame. Defaults to 2, which will result in the frame where the method
354 was called.
355 _types_namespace: An explicit types namespace to use, instead of using the local namespace
356 from the parent frame. Defaults to `None`.
358 Returns:
359 Returns `None` if the schema is already "complete" and rebuilding was not required.
360 If rebuilding _was_ required, returns `True` if rebuilding was successful, otherwise `False`.
361 """
362 if not force and self.pydantic_complete: 362 ↛ 363line 362 didn't jump to line 363 because the condition on line 362 was never true1opqrstabcdefuvwxyzghijABCDEFklmn
363 return None
365 if _types_namespace is not None: 365 ↛ 366line 365 didn't jump to line 366 because the condition on line 365 was never true1opqrstabcdefuvwxyzghijABCDEFklmn
366 rebuild_ns = _types_namespace
367 elif _parent_namespace_depth > 0: 367 ↛ 370line 367 didn't jump to line 370 because the condition on line 367 was always true1opqrstabcdefuvwxyzghijABCDEFklmn
368 rebuild_ns = _typing_extra.parent_frame_namespace(parent_depth=_parent_namespace_depth, force=True) or {} 1opqrstabcdefuvwxyzghijABCDEFklmn
369 else:
370 rebuild_ns = {}
372 # we have to manually fetch globals here because there's no type on the stack of the NsResolver
373 # and so we skip the globalns = get_module_ns_of(typ) call that would normally happen
374 globalns = sys._getframe(max(_parent_namespace_depth - 1, 1)).f_globals 1opqrstabcdefuvwxyzghijABCDEFklmn
375 ns_resolver = _namespace_utils.NsResolver( 1opqrstabcdefuvwxyzghijABCDEFklmn
376 namespaces_tuple=_namespace_utils.NamespacesTuple(locals=rebuild_ns, globals=globalns),
377 parent_namespace=rebuild_ns,
378 )
379 return self._init_core_attrs(ns_resolver=ns_resolver, force=True, raise_errors=raise_errors) 1opqrstabcdefuvwxyzghijABCDEFklmn
381 def validate_python( 1opqrstabcdefuvwxyzghijABCDEFklmn
382 self,
383 object: Any,
384 /,
385 *,
386 strict: bool | None = None,
387 from_attributes: bool | None = None,
388 context: dict[str, Any] | None = None,
389 experimental_allow_partial: bool | Literal['off', 'on', 'trailing-strings'] = False,
390 ) -> T:
391 """Validate a Python object against the model.
393 Args:
394 object: The Python object to validate against the model.
395 strict: Whether to strictly check types.
396 from_attributes: Whether to extract data from object attributes.
397 context: Additional context to pass to the validator.
398 experimental_allow_partial: **Experimental** whether to enable
399 [partial validation](../concepts/experimental.md#partial-validation), e.g. to process streams.
400 * False / 'off': Default behavior, no partial validation.
401 * True / 'on': Enable partial validation.
402 * 'trailing-strings': Enable partial validation and allow trailing strings in the input.
404 !!! note
405 When using `TypeAdapter` with a Pydantic `dataclass`, the use of the `from_attributes`
406 argument is not supported.
408 Returns:
409 The validated object.
410 """
411 return self.validator.validate_python( 1opqrstabcdefuvwxyzghijABCDEFklmn
412 object,
413 strict=strict,
414 from_attributes=from_attributes,
415 context=context,
416 allow_partial=experimental_allow_partial,
417 )
419 def validate_json( 1opqrstabcdefuvwxyzghijABCDEFklmn
420 self,
421 data: str | bytes | bytearray,
422 /,
423 *,
424 strict: bool | None = None,
425 context: dict[str, Any] | None = None,
426 experimental_allow_partial: bool | Literal['off', 'on', 'trailing-strings'] = False,
427 ) -> T:
428 """!!! abstract "Usage Documentation"
429 [JSON Parsing](../concepts/json.md#json-parsing)
431 Validate a JSON string or bytes against the model.
433 Args:
434 data: The JSON data to validate against the model.
435 strict: Whether to strictly check types.
436 context: Additional context to use during validation.
437 experimental_allow_partial: **Experimental** whether to enable
438 [partial validation](../concepts/experimental.md#partial-validation), e.g. to process streams.
439 * False / 'off': Default behavior, no partial validation.
440 * True / 'on': Enable partial validation.
441 * 'trailing-strings': Enable partial validation and allow trailing strings in the input.
443 Returns:
444 The validated object.
445 """
446 return self.validator.validate_json( 1opqrstabcdefuvwxyzghijABCDEFklmn
447 data, strict=strict, context=context, allow_partial=experimental_allow_partial
448 )
450 def validate_strings( 1opqrstabcdefuvwxyzghijABCDEFklmn
451 self,
452 obj: Any,
453 /,
454 *,
455 strict: bool | None = None,
456 context: dict[str, Any] | None = None,
457 experimental_allow_partial: bool | Literal['off', 'on', 'trailing-strings'] = False,
458 ) -> T:
459 """Validate object contains string data against the model.
461 Args:
462 obj: The object contains string data to validate.
463 strict: Whether to strictly check types.
464 context: Additional context to use during validation.
465 experimental_allow_partial: **Experimental** whether to enable
466 [partial validation](../concepts/experimental.md#partial-validation), e.g. to process streams.
467 * False / 'off': Default behavior, no partial validation.
468 * True / 'on': Enable partial validation.
469 * 'trailing-strings': Enable partial validation and allow trailing strings in the input.
471 Returns:
472 The validated object.
473 """
474 return self.validator.validate_strings( 1opqrstabcdefuvwxyzghijABCDEFklmn
475 obj, strict=strict, context=context, allow_partial=experimental_allow_partial
476 )
478 def get_default_value(self, *, strict: bool | None = None, context: dict[str, Any] | None = None) -> Some[T] | None: 1opqrstabcdefuvwxyzghijABCDEFklmn
479 """Get the default value for the wrapped type.
481 Args:
482 strict: Whether to strictly check types.
483 context: Additional context to pass to the validator.
485 Returns:
486 The default value wrapped in a `Some` if there is one or None if not.
487 """
488 return self.validator.get_default_value(strict=strict, context=context) 1opqrstabcdefuvwxyzghijABCDEFklmn
490 def dump_python( 1opqrstabcdefuvwxyzghijABCDEFklmn
491 self,
492 instance: T,
493 /,
494 *,
495 mode: Literal['json', 'python'] = 'python',
496 include: IncEx | None = None,
497 exclude: IncEx | None = None,
498 by_alias: bool = False,
499 exclude_unset: bool = False,
500 exclude_defaults: bool = False,
501 exclude_none: bool = False,
502 round_trip: bool = False,
503 warnings: bool | Literal['none', 'warn', 'error'] = True,
504 fallback: Callable[[Any], Any] | None = None,
505 serialize_as_any: bool = False,
506 context: dict[str, Any] | None = None,
507 ) -> Any:
508 """Dump an instance of the adapted type to a Python object.
510 Args:
511 instance: The Python object to serialize.
512 mode: The output format.
513 include: Fields to include in the output.
514 exclude: Fields to exclude from the output.
515 by_alias: Whether to use alias names for field names.
516 exclude_unset: Whether to exclude unset fields.
517 exclude_defaults: Whether to exclude fields with default values.
518 exclude_none: Whether to exclude fields with None values.
519 round_trip: Whether to output the serialized data in a way that is compatible with deserialization.
520 warnings: How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors,
521 "error" raises a [`PydanticSerializationError`][pydantic_core.PydanticSerializationError].
522 fallback: A function to call when an unknown value is encountered. If not provided,
523 a [`PydanticSerializationError`][pydantic_core.PydanticSerializationError] error is raised.
524 serialize_as_any: Whether to serialize fields with duck-typing serialization behavior.
525 context: Additional context to pass to the serializer.
527 Returns:
528 The serialized object.
529 """
530 return self.serializer.to_python( 1opqrstabcdefuvwxyzghijABCDEFklmn
531 instance,
532 mode=mode,
533 by_alias=by_alias,
534 include=include,
535 exclude=exclude,
536 exclude_unset=exclude_unset,
537 exclude_defaults=exclude_defaults,
538 exclude_none=exclude_none,
539 round_trip=round_trip,
540 warnings=warnings,
541 fallback=fallback,
542 serialize_as_any=serialize_as_any,
543 context=context,
544 )
546 def dump_json( 1opqrstabcdefuvwxyzghijABCDEFklmn
547 self,
548 instance: T,
549 /,
550 *,
551 indent: int | None = None,
552 include: IncEx | None = None,
553 exclude: IncEx | None = None,
554 by_alias: bool = False,
555 exclude_unset: bool = False,
556 exclude_defaults: bool = False,
557 exclude_none: bool = False,
558 round_trip: bool = False,
559 warnings: bool | Literal['none', 'warn', 'error'] = True,
560 fallback: Callable[[Any], Any] | None = None,
561 serialize_as_any: bool = False,
562 context: dict[str, Any] | None = None,
563 ) -> bytes:
564 """!!! abstract "Usage Documentation"
565 [JSON Serialization](../concepts/json.md#json-serialization)
567 Serialize an instance of the adapted type to JSON.
569 Args:
570 instance: The instance to be serialized.
571 indent: Number of spaces for JSON indentation.
572 include: Fields to include.
573 exclude: Fields to exclude.
574 by_alias: Whether to use alias names for field names.
575 exclude_unset: Whether to exclude unset fields.
576 exclude_defaults: Whether to exclude fields with default values.
577 exclude_none: Whether to exclude fields with a value of `None`.
578 round_trip: Whether to serialize and deserialize the instance to ensure round-tripping.
579 warnings: How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors,
580 "error" raises a [`PydanticSerializationError`][pydantic_core.PydanticSerializationError].
581 fallback: A function to call when an unknown value is encountered. If not provided,
582 a [`PydanticSerializationError`][pydantic_core.PydanticSerializationError] error is raised.
583 serialize_as_any: Whether to serialize fields with duck-typing serialization behavior.
584 context: Additional context to pass to the serializer.
586 Returns:
587 The JSON representation of the given instance as bytes.
588 """
589 return self.serializer.to_json( 1opqrstabcdefuvwxyzghijABCDEFklmn
590 instance,
591 indent=indent,
592 include=include,
593 exclude=exclude,
594 by_alias=by_alias,
595 exclude_unset=exclude_unset,
596 exclude_defaults=exclude_defaults,
597 exclude_none=exclude_none,
598 round_trip=round_trip,
599 warnings=warnings,
600 fallback=fallback,
601 serialize_as_any=serialize_as_any,
602 context=context,
603 )
605 def json_schema( 1opqrstabcdefuvwxyzghijABCDEFklmn
606 self,
607 *,
608 by_alias: bool = True,
609 ref_template: str = DEFAULT_REF_TEMPLATE,
610 schema_generator: type[GenerateJsonSchema] = GenerateJsonSchema,
611 mode: JsonSchemaMode = 'validation',
612 ) -> dict[str, Any]:
613 """Generate a JSON schema for the adapted type.
615 Args:
616 by_alias: Whether to use alias names for field names.
617 ref_template: The format string used for generating $ref strings.
618 schema_generator: The generator class used for creating the schema.
619 mode: The mode to use for schema generation.
621 Returns:
622 The JSON schema for the model as a dictionary.
623 """
624 schema_generator_instance = schema_generator(by_alias=by_alias, ref_template=ref_template) 1opqrstabcdefuvwxyzghijABCDEFklmn
625 if isinstance(self.core_schema, _mock_val_ser.MockCoreSchema): 1opqrstabcdefuvwxyzghijABCDEFklmn
626 self.core_schema.rebuild() 1opqrstabcdefuvwxyzghijABCDEFklmn
627 assert not isinstance(self.core_schema, _mock_val_ser.MockCoreSchema), 'this is a bug! please report it' 1opqrstabcdefuvwxyzghijABCDEFklmn
628 return schema_generator_instance.generate(self.core_schema, mode=mode) 1opqrstabcdefuvwxyzghijABCDEFklmn
630 @staticmethod 1opqrstabcdefuvwxyzghijABCDEFklmn
631 def json_schemas( 1opqrstabcdefuvwxyzghijABCDEFklmn
632 inputs: Iterable[tuple[JsonSchemaKeyT, JsonSchemaMode, TypeAdapter[Any]]],
633 /,
634 *,
635 by_alias: bool = True,
636 title: str | None = None,
637 description: str | None = None,
638 ref_template: str = DEFAULT_REF_TEMPLATE,
639 schema_generator: type[GenerateJsonSchema] = GenerateJsonSchema,
640 ) -> tuple[dict[tuple[JsonSchemaKeyT, JsonSchemaMode], JsonSchemaValue], JsonSchemaValue]:
641 """Generate a JSON schema including definitions from multiple type adapters.
643 Args:
644 inputs: Inputs to schema generation. The first two items will form the keys of the (first)
645 output mapping; the type adapters will provide the core schemas that get converted into
646 definitions in the output JSON schema.
647 by_alias: Whether to use alias names.
648 title: The title for the schema.
649 description: The description for the schema.
650 ref_template: The format string used for generating $ref strings.
651 schema_generator: The generator class used for creating the schema.
653 Returns:
654 A tuple where:
656 - The first element is a dictionary whose keys are tuples of JSON schema key type and JSON mode, and
657 whose values are the JSON schema corresponding to that pair of inputs. (These schemas may have
658 JsonRef references to definitions that are defined in the second returned element.)
659 - The second element is a JSON schema containing all definitions referenced in the first returned
660 element, along with the optional title and description keys.
662 """
663 schema_generator_instance = schema_generator(by_alias=by_alias, ref_template=ref_template) 1opqrstabcdefuvwxyzghijABCDEFklmn
665 inputs_ = [] 1opqrstabcdefuvwxyzghijABCDEFklmn
666 for key, mode, adapter in inputs: 1opqrstabcdefuvwxyzghijABCDEFklmn
667 # This is the same pattern we follow for model json schemas - we attempt a core schema rebuild if we detect a mock
668 if isinstance(adapter.core_schema, _mock_val_ser.MockCoreSchema): 1opqrstabcdefuvwxyzghijABCDEFklmn
669 adapter.core_schema.rebuild() 1opqrstabcdefuvwxyzghijABCDEFklmn
670 assert not isinstance(adapter.core_schema, _mock_val_ser.MockCoreSchema), ( 1opqrstabcdefuvwxyzghijABCDEFklmn
671 'this is a bug! please report it'
672 )
673 inputs_.append((key, mode, adapter.core_schema)) 1opqrstabcdefuvwxyzghijABCDEFklmn
675 json_schemas_map, definitions = schema_generator_instance.generate_definitions(inputs_) 1opqrstabcdefuvwxyzghijABCDEFklmn
677 json_schema: dict[str, Any] = {} 1opqrstabcdefuvwxyzghijABCDEFklmn
678 if definitions: 1opqrstabcdefuvwxyzghijABCDEFklmn
679 json_schema['$defs'] = definitions 1opqrstabcdefuvwxyzghijABCDEFklmn
680 if title: 1opqrstabcdefuvwxyzghijABCDEFklmn
681 json_schema['title'] = title 1opqrstabcdefuvwxyzghijABCDEFklmn
682 if description: 1opqrstabcdefuvwxyzghijABCDEFklmn
683 json_schema['description'] = description 1opqrstabcdefuvwxyzghijABCDEFklmn
685 return json_schemas_map, json_schema 1opqrstabcdefuvwxyzghijABCDEFklmn