Coverage for pydantic/type_adapter.py: 99.17%

162 statements  

« prev     ^ index     » next       coverage.py v7.5.4, created at 2024-07-03 19:29 +0000

1"""Type adapter specification.""" 

2 

3from __future__ import annotations as _annotations 1opqrstuvabcdefwxyzABCDghijOPQRSTUVMNEFGHIJKLklmn

4 

5import sys 1opqrstuvabcdefwxyzABCDghijOPQRSTUVMNEFGHIJKLklmn

6from contextlib import contextmanager 1opqrstuvabcdefwxyzABCDghijOPQRSTUVMNEFGHIJKLklmn

7from dataclasses import is_dataclass 1opqrstuvabcdefwxyzABCDghijOPQRSTUVMNEFGHIJKLklmn

8from functools import cached_property, wraps 1opqrstuvabcdefwxyzABCDghijOPQRSTUVMNEFGHIJKLklmn

9from typing import ( 1opqrstuvabcdefwxyzABCDghijOPQRSTUVMNEFGHIJKLklmn

10 TYPE_CHECKING, 

11 Any, 

12 Callable, 

13 Dict, 

14 Generic, 

15 Iterable, 

16 Iterator, 

17 Literal, 

18 Set, 

19 TypeVar, 

20 Union, 

21 cast, 

22 final, 

23 overload, 

24) 

25 

26from pydantic_core import CoreSchema, SchemaSerializer, SchemaValidator, Some 1opqrstuvabcdefwxyzABCDghijOPQRSTUVMNEFGHIJKLklmn

27from typing_extensions import Concatenate, ParamSpec, is_typeddict 1opqrstuvabcdefwxyzABCDghijOPQRSTUVMNEFGHIJKLklmn

28 

29from pydantic.errors import PydanticUserError 1opqrstuvabcdefwxyzABCDghijOPQRSTUVMNEFGHIJKLklmn

30from pydantic.main import BaseModel 1opqrstuvabcdefwxyzABCDghijOPQRSTUVMNEFGHIJKLklmn

31 

32from ._internal import _config, _generate_schema, _mock_val_ser, _typing_extra, _utils 1opqrstuvabcdefwxyzABCDghijOPQRSTUVMNEFGHIJKLklmn

33from .config import ConfigDict 1opqrstuvabcdefwxyzABCDghijOPQRSTUVMNEFGHIJKLklmn

34from .json_schema import ( 1opqrstuvabcdefwxyzABCDghijOPQRSTUVMNEFGHIJKLklmn

35 DEFAULT_REF_TEMPLATE, 

36 GenerateJsonSchema, 

37 JsonSchemaKeyT, 

38 JsonSchemaMode, 

39 JsonSchemaValue, 

40) 

41from .plugin._schema_validator import create_schema_validator 1opqrstuvabcdefwxyzABCDghijOPQRSTUVMNEFGHIJKLklmn

42 

43T = TypeVar('T') 1opqrstuvabcdefwxyzABCDghijOPQRSTUVMNEFGHIJKLklmn

44R = TypeVar('R') 1opqrstuvabcdefwxyzABCDghijOPQRSTUVMNEFGHIJKLklmn

45P = ParamSpec('P') 1opqrstuvabcdefwxyzABCDghijOPQRSTUVMNEFGHIJKLklmn

46TypeAdapterT = TypeVar('TypeAdapterT', bound='TypeAdapter') 1opqrstuvabcdefwxyzABCDghijOPQRSTUVMNEFGHIJKLklmn

47 

48 

49if TYPE_CHECKING: 1opqrstuvabcdefwxyzABCDghijOPQRSTUVMNEFGHIJKLklmn

50 # should be `set[int] | set[str] | dict[int, IncEx] | dict[str, IncEx] | None`, but mypy can't cope 

51 IncEx = Union[Set[int], Set[str], Dict[int, Any], Dict[str, Any]] 

52 

53 

54def _get_schema(type_: Any, config_wrapper: _config.ConfigWrapper, parent_depth: int) -> CoreSchema: 1opqrstuvabcdefwxyzABCDghijOPQRSTUVMNEFGHIJKLklmn

55 """`BaseModel` uses its own `__module__` to find out where it was defined 

56 and then looks for symbols to resolve forward references in those globals. 

57 On the other hand this function can be called with arbitrary objects, 

58 including type aliases, where `__module__` (always `typing.py`) is not useful. 

59 So instead we look at the globals in our parent stack frame. 

60 

61 This works for the case where this function is called in a module that 

62 has the target of forward references in its scope, but 

63 does not always work for more complex cases. 

64 

65 For example, take the following: 

66 

67 a.py 

68 ```python 

69 from typing import Dict, List 

70 

71 IntList = List[int] 

72 OuterDict = Dict[str, 'IntList'] 

73 ``` 

74 

75 b.py 

76 ```python test="skip" 

77 from a import OuterDict 

78 

79 from pydantic import TypeAdapter 

80 

81 IntList = int # replaces the symbol the forward reference is looking for 

82 v = TypeAdapter(OuterDict) 

83 v({'x': 1}) # should fail but doesn't 

84 ``` 

85 

86 If `OuterDict` were a `BaseModel`, this would work because it would resolve 

87 the forward reference within the `a.py` namespace. 

88 But `TypeAdapter(OuterDict)` can't determine what module `OuterDict` came from. 

89 

90 In other words, the assumption that _all_ forward references exist in the 

91 module we are being called from is not technically always true. 

92 Although most of the time it is and it works fine for recursive models and such, 

93 `BaseModel`'s behavior isn't perfect either and _can_ break in similar ways, 

94 so there is no right or wrong between the two. 

95 

96 But at the very least this behavior is _subtly_ different from `BaseModel`'s. 

97 """ 

98 local_ns = _typing_extra.parent_frame_namespace(parent_depth=parent_depth) 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

99 global_ns = sys._getframe(max(parent_depth - 1, 1)).f_globals.copy() 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

100 global_ns.update(local_ns or {}) 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

101 gen = _generate_schema.GenerateSchema(config_wrapper, types_namespace=global_ns, typevars_map={}) 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

102 schema = gen.generate_schema(type_) 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

103 schema = gen.clean_schema(schema) 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

104 return schema 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

105 

106 

107def _getattr_no_parents(obj: Any, attribute: str) -> Any: 1opqrstuvabcdefwxyzABCDghijOPQRSTUVMNEFGHIJKLklmn

108 """Returns the attribute value without attempting to look up attributes from parent types.""" 

109 if hasattr(obj, '__dict__'): 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

110 try: 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

111 return obj.__dict__[attribute] 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

112 except KeyError: 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

113 pass 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

114 

115 slots = getattr(obj, '__slots__', None) 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

116 if slots is not None and attribute in slots: 116 ↛ 117line 116 didn't jump to line 117 because the condition on line 116 was never true1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

117 return getattr(obj, attribute) 

118 else: 

119 raise AttributeError(attribute) 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

120 

121 

122def _type_has_config(type_: Any) -> bool: 1opqrstuvabcdefwxyzABCDghijOPQRSTUVMNEFGHIJKLklmn

123 """Returns whether the type has config.""" 

124 type_ = _typing_extra.annotated_type(type_) or type_ 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

125 try: 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

126 return issubclass(type_, BaseModel) or is_dataclass(type_) or is_typeddict(type_) 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

127 except TypeError: 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

128 # type is not a class 

129 return False 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

130 

131 

132# This is keeping track of the frame depth for the TypeAdapter functions. This is required for _parent_depth used for 

133# ForwardRef resolution. We may enter the TypeAdapter schema building via different TypeAdapter functions. Hence, we 

134# need to keep track of the frame depth relative to the originally provided _parent_depth. 

135def _frame_depth( 1opqrstuvabcdefwxyzABCDghijOPQRSTUVMNEFGHIJKLklmn

136 depth: int, 

137) -> Callable[[Callable[Concatenate[TypeAdapterT, P], R]], Callable[Concatenate[TypeAdapterT, P], R]]: 

138 def wrapper(func: Callable[Concatenate[TypeAdapterT, P], R]) -> Callable[Concatenate[TypeAdapterT, P], R]: 1opqrstuvabcdefwxyzABCDghijOPQRSTUVMNEFGHIJKLklmn

139 @wraps(func) 1opqrstuvabcdefwxyzABCDghijOPQRSTUVMNEFGHIJKLklmn

140 def wrapped(self: TypeAdapterT, *args: P.args, **kwargs: P.kwargs) -> R: 1opqrstuvabcdefwxyzABCDghijOPQRSTUVMNEFGHIJKLklmn

141 with self._with_frame_depth(depth + 1): # depth + 1 for the wrapper function 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

142 return func(self, *args, **kwargs) 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

143 

144 return wrapped 1opqrstuvabcdefwxyzABCDghijOPQRSTUVMNEFGHIJKLklmn

145 

146 return wrapper 1opqrstuvabcdefwxyzABCDghijOPQRSTUVMNEFGHIJKLklmn

147 

148 

149@final 1opqrstuvabcdefwxyzABCDghijOPQRSTUVMNEFGHIJKLklmn

150class TypeAdapter(Generic[T]): 1opqrstuvabcdefwxyzABCDghijOPQRSTUVMNEFGHIJKLklmn

151 """Usage docs: https://docs.pydantic.dev/2.8/concepts/type_adapter/ 

152 

153 Type adapters provide a flexible way to perform validation and serialization based on a Python type. 

154 

155 A `TypeAdapter` instance exposes some of the functionality from `BaseModel` instance methods 

156 for types that do not have such methods (such as dataclasses, primitive types, and more). 

157 

158 **Note:** `TypeAdapter` instances are not types, and cannot be used as type annotations for fields. 

159 

160 **Note:** By default, `TypeAdapter` does not respect the 

161 [`defer_build=True`][pydantic.config.ConfigDict.defer_build] setting in the 

162 [`model_config`][pydantic.BaseModel.model_config] or in the `TypeAdapter` constructor `config`. You need to also 

163 explicitly set [`experimental_defer_build_mode=('model', 'type_adapter')`][pydantic.config.ConfigDict.experimental_defer_build_mode] of the 

164 config to defer the model validator and serializer construction. Thus, this feature is opt-in to ensure backwards 

165 compatibility. 

166 

167 Attributes: 

168 core_schema: The core schema for the type. 

169 validator (SchemaValidator): The schema validator for the type. 

170 serializer: The schema serializer for the type. 

171 """ 

172 

173 @overload 1opqrstuvabcdefwxyzABCDghijOPQRSTUVMNEFGHIJKLklmn

174 def __init__( 1opqrstuvabcdefwxyzABCDghijOPQRSTUVMNEFGHIJKLklmn

175 self, 

176 type: type[T], 1abcdefghijMNklmn

177 *, 

178 config: ConfigDict | None = ..., 1opqrstuvabcdefwxyzABCDghijOPQRSTUVMNEFGHIJKLklmn

179 _parent_depth: int = ..., 1opqrstuvabcdefwxyzABCDghijOPQRSTUVMNEFGHIJKLklmn

180 module: str | None = ..., 1opqrstuvabcdefwxyzABCDghijOPQRSTUVMNEFGHIJKLklmn

181 ) -> None: ... 1abcdefghijMNklmn

182 

183 # This second overload is for unsupported special forms (such as Annotated, Union, etc.) 

184 # Currently there is no way to type this correctly 

185 # See https://github.com/python/typing/pull/1618 

186 @overload 1opqrstuvabcdefwxyzABCDghijOPQRSTUVMNEFGHIJKLklmn

187 def __init__( 1opqrstuvabcdefwxyzABCDghijOPQRSTUVMNEFGHIJKLklmn

188 self, 

189 type: Any, 1abcdefghijMNklmn

190 *, 

191 config: ConfigDict | None = ..., 1opqrstuvabcdefwxyzABCDghijOPQRSTUVMNEFGHIJKLklmn

192 _parent_depth: int = ..., 1opqrstuvabcdefwxyzABCDghijOPQRSTUVMNEFGHIJKLklmn

193 module: str | None = ..., 1opqrstuvabcdefwxyzABCDghijOPQRSTUVMNEFGHIJKLklmn

194 ) -> None: ... 1abcdefghijMNklmn

195 

196 def __init__( 1opqrstuvabcdefwxyzABCDghijOPQRSTUVMNEFGHIJKLklmn

197 self, 

198 type: Any, 

199 *, 

200 config: ConfigDict | None = None, 

201 _parent_depth: int = 2, 

202 module: str | None = None, 

203 ) -> None: 

204 """Initializes the TypeAdapter object. 

205 

206 Args: 

207 type: The type associated with the `TypeAdapter`. 

208 config: Configuration for the `TypeAdapter`, should be a dictionary conforming to [`ConfigDict`][pydantic.config.ConfigDict]. 

209 _parent_depth: depth at which to search the parent namespace to construct the local namespace. 

210 module: The module that passes to plugin if provided. 

211 

212 !!! note 

213 You cannot use the `config` argument when instantiating a `TypeAdapter` if the type you're using has its own 

214 config that cannot be overridden (ex: `BaseModel`, `TypedDict`, and `dataclass`). A 

215 [`type-adapter-config-unused`](../errors/usage_errors.md#type-adapter-config-unused) error will be raised in this case. 

216 

217 !!! note 

218 The `_parent_depth` argument is named with an underscore to suggest its private nature and discourage use. 

219 It may be deprecated in a minor version, so we only recommend using it if you're 

220 comfortable with potential change in behavior / support. 

221 

222 ??? tip "Compatibility with `mypy`" 

223 Depending on the type used, `mypy` might raise an error when instantiating a `TypeAdapter`. As a workaround, you can explicitly 

224 annotate your variable: 

225 

226 ```py 

227 from typing import Union 

228 

229 from pydantic import TypeAdapter 

230 

231 ta: TypeAdapter[Union[str, int]] = TypeAdapter(Union[str, int]) # type: ignore[arg-type] 

232 ``` 

233 

234 Returns: 

235 A type adapter configured for the specified `type`. 

236 """ 

237 if _type_has_config(type) and config is not None: 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

238 raise PydanticUserError( 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

239 'Cannot use `config` when the type is a BaseModel, dataclass or TypedDict.' 

240 ' These types can have their own config and setting the config via the `config`' 

241 ' parameter to TypeAdapter will not override it, thus the `config` you passed to' 

242 ' TypeAdapter becomes meaningless, which is probably not what you want.', 

243 code='type-adapter-config-unused', 

244 ) 

245 

246 self._type = type 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

247 self._config = config 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

248 self._parent_depth = _parent_depth 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

249 if module is None: 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

250 f = sys._getframe(1) 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

251 self._module_name = cast(str, f.f_globals.get('__name__', '')) 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

252 else: 

253 self._module_name = module 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

254 

255 self._core_schema: CoreSchema | None = None 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

256 self._validator: SchemaValidator | None = None 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

257 self._serializer: SchemaSerializer | None = None 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

258 

259 if not self._defer_build(): 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

260 # Immediately initialize the core schema, validator and serializer 

261 with self._with_frame_depth(1): # +1 frame depth for this __init__ 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

262 # Model itself may be using deferred building. For backward compatibility we don't rebuild model mocks 

263 # here as part of __init__ even though TypeAdapter itself is not using deferred building. 

264 self._init_core_attrs(rebuild_mocks=False) 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

265 

266 @contextmanager 1opqrstuvabcdefwxyzABCDghijOPQRSTUVMNEFGHIJKLklmn

267 def _with_frame_depth(self, depth: int) -> Iterator[None]: 1opqrstuvabcdefwxyzABCDghijOPQRSTUVMNEFGHIJKLklmn

268 self._parent_depth += depth 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

269 try: 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

270 yield 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

271 finally: 

272 self._parent_depth -= depth 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

273 

274 @_frame_depth(1) 1opqrstuvabcdefwxyzABCDghijOPQRSTUVMNEFGHIJKLklmn

275 def _init_core_attrs(self, rebuild_mocks: bool) -> None: 1opqrstuvabcdefwxyzABCDghijOPQRSTUVMNEFGHIJKLklmn

276 try: 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

277 self._core_schema = _getattr_no_parents(self._type, '__pydantic_core_schema__') 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

278 self._validator = _getattr_no_parents(self._type, '__pydantic_validator__') 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

279 self._serializer = _getattr_no_parents(self._type, '__pydantic_serializer__') 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

280 except AttributeError: 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

281 config_wrapper = _config.ConfigWrapper(self._config) 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

282 core_config = config_wrapper.core_config(None) 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

283 

284 self._core_schema = _get_schema(self._type, config_wrapper, parent_depth=self._parent_depth) 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

285 self._validator = create_schema_validator( 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

286 schema=self._core_schema, 

287 schema_type=self._type, 

288 schema_type_module=self._module_name, 

289 schema_type_name=str(self._type), 

290 schema_kind='TypeAdapter', 

291 config=core_config, 

292 plugin_settings=config_wrapper.plugin_settings, 

293 ) 

294 self._serializer = SchemaSerializer(self._core_schema, core_config) 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

295 

296 if rebuild_mocks and isinstance(self._core_schema, _mock_val_ser.MockCoreSchema): 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

297 self._core_schema.rebuild() 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

298 self._init_core_attrs(rebuild_mocks=False) 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

299 assert not isinstance(self._core_schema, _mock_val_ser.MockCoreSchema) 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

300 assert not isinstance(self._validator, _mock_val_ser.MockValSer) 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

301 assert not isinstance(self._serializer, _mock_val_ser.MockValSer) 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

302 

303 @cached_property 1opqrstuvabcdefwxyzABCDghijOPQRSTUVMNEFGHIJKLklmn

304 @_frame_depth(2) # +2 for @cached_property and core_schema(self) 1opqrstuvabcdefwxyzABCDghijOPQRSTUVMNEFGHIJKLklmn

305 def core_schema(self) -> CoreSchema: 1opqrstuvabcdefwxyzABCDghijOPQRSTUVMNEFGHIJKLklmn

306 """The pydantic-core schema used to build the SchemaValidator and SchemaSerializer.""" 

307 if self._core_schema is None or isinstance(self._core_schema, _mock_val_ser.MockCoreSchema): 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

308 self._init_core_attrs(rebuild_mocks=True) # Do not expose MockCoreSchema from public function 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

309 assert self._core_schema is not None and not isinstance(self._core_schema, _mock_val_ser.MockCoreSchema) 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

310 return self._core_schema 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

311 

312 @cached_property 1opqrstuvabcdefwxyzABCDghijOPQRSTUVMNEFGHIJKLklmn

313 @_frame_depth(2) # +2 for @cached_property + validator(self) 1opqrstuvabcdefwxyzABCDghijOPQRSTUVMNEFGHIJKLklmn

314 def validator(self) -> SchemaValidator: 1opqrstuvabcdefwxyzABCDghijOPQRSTUVMNEFGHIJKLklmn

315 """The pydantic-core SchemaValidator used to validate instances of the model.""" 

316 if not isinstance(self._validator, SchemaValidator): 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

317 self._init_core_attrs(rebuild_mocks=True) # Do not expose MockValSer from public function 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

318 assert isinstance(self._validator, SchemaValidator) 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

319 return self._validator 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

320 

321 @cached_property 1opqrstuvabcdefwxyzABCDghijOPQRSTUVMNEFGHIJKLklmn

322 @_frame_depth(2) # +2 for @cached_property + serializer(self) 1opqrstuvabcdefwxyzABCDghijOPQRSTUVMNEFGHIJKLklmn

323 def serializer(self) -> SchemaSerializer: 1opqrstuvabcdefwxyzABCDghijOPQRSTUVMNEFGHIJKLklmn

324 """The pydantic-core SchemaSerializer used to dump instances of the model.""" 

325 if not isinstance(self._serializer, SchemaSerializer): 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

326 self._init_core_attrs(rebuild_mocks=True) # Do not expose MockValSer from public function 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

327 assert isinstance(self._serializer, SchemaSerializer) 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

328 return self._serializer 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

329 

330 def _defer_build(self) -> bool: 1opqrstuvabcdefwxyzABCDghijOPQRSTUVMNEFGHIJKLklmn

331 config = self._config if self._config is not None else self._model_config() 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

332 return self._is_defer_build_config(config) if config is not None else False 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

333 

334 def _model_config(self) -> ConfigDict | None: 1opqrstuvabcdefwxyzABCDghijOPQRSTUVMNEFGHIJKLklmn

335 type_: Any = _typing_extra.annotated_type(self._type) or self._type # Eg FastAPI heavily uses Annotated 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

336 if _utils.lenient_issubclass(type_, BaseModel): 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

337 return type_.model_config 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

338 return getattr(type_, '__pydantic_config__', None) 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

339 

340 @staticmethod 1opqrstuvabcdefwxyzABCDghijOPQRSTUVMNEFGHIJKLklmn

341 def _is_defer_build_config(config: ConfigDict) -> bool: 1opqrstuvabcdefwxyzABCDghijOPQRSTUVMNEFGHIJKLklmn

342 # TODO reevaluate this logic when we have a better understanding of how defer_build should work with TypeAdapter 

343 # Should we drop the special experimental_defer_build_mode check? 

344 return config.get('defer_build', False) is True and 'type_adapter' in config.get( 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

345 'experimental_defer_build_mode', tuple() 

346 ) 

347 

348 @_frame_depth(1) 1opqrstuvabcdefwxyzABCDghijOPQRSTUVMNEFGHIJKLklmn

349 def validate_python( 1opqrstuvabcdefwxyzABCDghijOPQRSTUVMNEFGHIJKLklmn

350 self, 

351 object: Any, 

352 /, 

353 *, 

354 strict: bool | None = None, 

355 from_attributes: bool | None = None, 

356 context: dict[str, Any] | None = None, 

357 ) -> T: 

358 """Validate a Python object against the model. 

359 

360 Args: 

361 object: The Python object to validate against the model. 

362 strict: Whether to strictly check types. 

363 from_attributes: Whether to extract data from object attributes. 

364 context: Additional context to pass to the validator. 

365 

366 !!! note 

367 When using `TypeAdapter` with a Pydantic `dataclass`, the use of the `from_attributes` 

368 argument is not supported. 

369 

370 Returns: 

371 The validated object. 

372 """ 

373 return self.validator.validate_python(object, strict=strict, from_attributes=from_attributes, context=context) 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

374 

375 @_frame_depth(1) 1opqrstuvabcdefwxyzABCDghijOPQRSTUVMNEFGHIJKLklmn

376 def validate_json( 1opqrstuvabcdefwxyzABCDghijOPQRSTUVMNEFGHIJKLklmn

377 self, data: str | bytes, /, *, strict: bool | None = None, context: dict[str, Any] | None = None 

378 ) -> T: 

379 """Usage docs: https://docs.pydantic.dev/2.8/concepts/json/#json-parsing 

380 

381 Validate a JSON string or bytes against the model. 

382 

383 Args: 

384 data: The JSON data to validate against the model. 

385 strict: Whether to strictly check types. 

386 context: Additional context to use during validation. 

387 

388 Returns: 

389 The validated object. 

390 """ 

391 return self.validator.validate_json(data, strict=strict, context=context) 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

392 

393 @_frame_depth(1) 1opqrstuvabcdefwxyzABCDghijOPQRSTUVMNEFGHIJKLklmn

394 def validate_strings(self, obj: Any, /, *, strict: bool | None = None, context: dict[str, Any] | None = None) -> T: 1opqrstuvabcdefwxyzABCDghijOPQRSTUVMNEFGHIJKLklmn

395 """Validate object contains string data against the model. 

396 

397 Args: 

398 obj: The object contains string data to validate. 

399 strict: Whether to strictly check types. 

400 context: Additional context to use during validation. 

401 

402 Returns: 

403 The validated object. 

404 """ 

405 return self.validator.validate_strings(obj, strict=strict, context=context) 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

406 

407 @_frame_depth(1) 1opqrstuvabcdefwxyzABCDghijOPQRSTUVMNEFGHIJKLklmn

408 def get_default_value(self, *, strict: bool | None = None, context: dict[str, Any] | None = None) -> Some[T] | None: 1opqrstuvabcdefwxyzABCDghijOPQRSTUVMNEFGHIJKLklmn

409 """Get the default value for the wrapped type. 

410 

411 Args: 

412 strict: Whether to strictly check types. 

413 context: Additional context to pass to the validator. 

414 

415 Returns: 

416 The default value wrapped in a `Some` if there is one or None if not. 

417 """ 

418 return self.validator.get_default_value(strict=strict, context=context) 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

419 

420 @_frame_depth(1) 1opqrstuvabcdefwxyzABCDghijOPQRSTUVMNEFGHIJKLklmn

421 def dump_python( 1opqrstuvabcdefwxyzABCDghijOPQRSTUVMNEFGHIJKLklmn

422 self, 

423 instance: T, 

424 /, 

425 *, 

426 mode: Literal['json', 'python'] = 'python', 

427 include: IncEx | None = None, 

428 exclude: IncEx | None = None, 

429 by_alias: bool = False, 

430 exclude_unset: bool = False, 

431 exclude_defaults: bool = False, 

432 exclude_none: bool = False, 

433 round_trip: bool = False, 

434 warnings: bool | Literal['none', 'warn', 'error'] = True, 

435 serialize_as_any: bool = False, 

436 context: dict[str, Any] | None = None, 

437 ) -> Any: 

438 """Dump an instance of the adapted type to a Python object. 

439 

440 Args: 

441 instance: The Python object to serialize. 

442 mode: The output format. 

443 include: Fields to include in the output. 

444 exclude: Fields to exclude from the output. 

445 by_alias: Whether to use alias names for field names. 

446 exclude_unset: Whether to exclude unset fields. 

447 exclude_defaults: Whether to exclude fields with default values. 

448 exclude_none: Whether to exclude fields with None values. 

449 round_trip: Whether to output the serialized data in a way that is compatible with deserialization. 

450 warnings: How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors, 

451 "error" raises a [`PydanticSerializationError`][pydantic_core.PydanticSerializationError]. 

452 serialize_as_any: Whether to serialize fields with duck-typing serialization behavior. 

453 context: Additional context to pass to the serializer. 

454 

455 Returns: 

456 The serialized object. 

457 """ 

458 return self.serializer.to_python( 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

459 instance, 

460 mode=mode, 

461 by_alias=by_alias, 

462 include=include, 

463 exclude=exclude, 

464 exclude_unset=exclude_unset, 

465 exclude_defaults=exclude_defaults, 

466 exclude_none=exclude_none, 

467 round_trip=round_trip, 

468 warnings=warnings, 

469 serialize_as_any=serialize_as_any, 

470 context=context, 

471 ) 

472 

473 @_frame_depth(1) 1opqrstuvabcdefwxyzABCDghijOPQRSTUVMNEFGHIJKLklmn

474 def dump_json( 1opqrstuvabcdefwxyzABCDghijOPQRSTUVMNEFGHIJKLklmn

475 self, 

476 instance: T, 

477 /, 

478 *, 

479 indent: int | None = None, 

480 include: IncEx | None = None, 

481 exclude: IncEx | None = None, 

482 by_alias: bool = False, 

483 exclude_unset: bool = False, 

484 exclude_defaults: bool = False, 

485 exclude_none: bool = False, 

486 round_trip: bool = False, 

487 warnings: bool | Literal['none', 'warn', 'error'] = True, 

488 serialize_as_any: bool = False, 

489 context: dict[str, Any] | None = None, 

490 ) -> bytes: 

491 """Usage docs: https://docs.pydantic.dev/2.8/concepts/json/#json-serialization 

492 

493 Serialize an instance of the adapted type to JSON. 

494 

495 Args: 

496 instance: The instance to be serialized. 

497 indent: Number of spaces for JSON indentation. 

498 include: Fields to include. 

499 exclude: Fields to exclude. 

500 by_alias: Whether to use alias names for field names. 

501 exclude_unset: Whether to exclude unset fields. 

502 exclude_defaults: Whether to exclude fields with default values. 

503 exclude_none: Whether to exclude fields with a value of `None`. 

504 round_trip: Whether to serialize and deserialize the instance to ensure round-tripping. 

505 warnings: How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors, 

506 "error" raises a [`PydanticSerializationError`][pydantic_core.PydanticSerializationError]. 

507 serialize_as_any: Whether to serialize fields with duck-typing serialization behavior. 

508 context: Additional context to pass to the serializer. 

509 

510 Returns: 

511 The JSON representation of the given instance as bytes. 

512 """ 

513 return self.serializer.to_json( 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

514 instance, 

515 indent=indent, 

516 include=include, 

517 exclude=exclude, 

518 by_alias=by_alias, 

519 exclude_unset=exclude_unset, 

520 exclude_defaults=exclude_defaults, 

521 exclude_none=exclude_none, 

522 round_trip=round_trip, 

523 warnings=warnings, 

524 serialize_as_any=serialize_as_any, 

525 context=context, 

526 ) 

527 

528 @_frame_depth(1) 1opqrstuvabcdefwxyzABCDghijOPQRSTUVMNEFGHIJKLklmn

529 def json_schema( 1opqrstuvabcdefwxyzABCDghijOPQRSTUVMNEFGHIJKLklmn

530 self, 

531 *, 

532 by_alias: bool = True, 

533 ref_template: str = DEFAULT_REF_TEMPLATE, 

534 schema_generator: type[GenerateJsonSchema] = GenerateJsonSchema, 

535 mode: JsonSchemaMode = 'validation', 

536 ) -> dict[str, Any]: 

537 """Generate a JSON schema for the adapted type. 

538 

539 Args: 

540 by_alias: Whether to use alias names for field names. 

541 ref_template: The format string used for generating $ref strings. 

542 schema_generator: The generator class used for creating the schema. 

543 mode: The mode to use for schema generation. 

544 

545 Returns: 

546 The JSON schema for the model as a dictionary. 

547 """ 

548 schema_generator_instance = schema_generator(by_alias=by_alias, ref_template=ref_template) 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

549 return schema_generator_instance.generate(self.core_schema, mode=mode) 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

550 

551 @staticmethod 1opqrstuvabcdefwxyzABCDghijOPQRSTUVMNEFGHIJKLklmn

552 def json_schemas( 1opqrstuvabcdefwxyzABCDghijOPQRSTUVMNEFGHIJKLklmn

553 inputs: Iterable[tuple[JsonSchemaKeyT, JsonSchemaMode, TypeAdapter[Any]]], 

554 /, 

555 *, 

556 by_alias: bool = True, 

557 title: str | None = None, 

558 description: str | None = None, 

559 ref_template: str = DEFAULT_REF_TEMPLATE, 

560 schema_generator: type[GenerateJsonSchema] = GenerateJsonSchema, 

561 ) -> tuple[dict[tuple[JsonSchemaKeyT, JsonSchemaMode], JsonSchemaValue], JsonSchemaValue]: 

562 """Generate a JSON schema including definitions from multiple type adapters. 

563 

564 Args: 

565 inputs: Inputs to schema generation. The first two items will form the keys of the (first) 

566 output mapping; the type adapters will provide the core schemas that get converted into 

567 definitions in the output JSON schema. 

568 by_alias: Whether to use alias names. 

569 title: The title for the schema. 

570 description: The description for the schema. 

571 ref_template: The format string used for generating $ref strings. 

572 schema_generator: The generator class used for creating the schema. 

573 

574 Returns: 

575 A tuple where: 

576 

577 - The first element is a dictionary whose keys are tuples of JSON schema key type and JSON mode, and 

578 whose values are the JSON schema corresponding to that pair of inputs. (These schemas may have 

579 JsonRef references to definitions that are defined in the second returned element.) 

580 - The second element is a JSON schema containing all definitions referenced in the first returned 

581 element, along with the optional title and description keys. 

582 

583 """ 

584 schema_generator_instance = schema_generator(by_alias=by_alias, ref_template=ref_template) 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

585 

586 inputs_ = [] 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

587 for key, mode, adapter in inputs: 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

588 with adapter._with_frame_depth(1): # +1 for json_schemas staticmethod 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

589 inputs_.append((key, mode, adapter.core_schema)) 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

590 

591 json_schemas_map, definitions = schema_generator_instance.generate_definitions(inputs_) 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

592 

593 json_schema: dict[str, Any] = {} 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

594 if definitions: 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

595 json_schema['$defs'] = definitions 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

596 if title: 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

597 json_schema['title'] = title 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

598 if description: 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

599 json_schema['description'] = description 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn

600 

601 return json_schemas_map, json_schema 1opqrstuvabcdefwxyzABCDghijEFGHIJKLklmn