Coverage for pydantic/_internal/_std_types_schema.py: 95.27%
306 statements
« prev ^ index » next coverage.py v7.5.4, created at 2024-07-03 19:29 +0000
« prev ^ index » next coverage.py v7.5.4, created at 2024-07-03 19:29 +0000
1"""Logic for generating pydantic-core schemas for standard library types.
3Import of this module is deferred since it contains imports of many standard library modules.
4"""
6from __future__ import annotations as _annotations 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
8import collections 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
9import collections.abc 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
10import dataclasses 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
11import decimal 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
12import inspect 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
13import os 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
14import typing 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
15from enum import Enum 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
16from functools import partial 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
17from ipaddress import IPv4Address, IPv4Interface, IPv4Network, IPv6Address, IPv6Interface, IPv6Network 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
18from operator import attrgetter 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
19from typing import Any, Callable, Iterable, Literal, Tuple, TypeVar 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
21import typing_extensions 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
22from pydantic_core import ( 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
23 CoreSchema,
24 MultiHostUrl,
25 PydanticCustomError,
26 PydanticOmit,
27 Url,
28 core_schema,
29)
30from typing_extensions import get_args, get_origin 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
32from pydantic.errors import PydanticSchemaGenerationError 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
33from pydantic.fields import FieldInfo 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
34from pydantic.types import Strict 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
36from ..config import ConfigDict 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
37from ..json_schema import JsonSchemaValue 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
38from . import _known_annotated_metadata, _typing_extra, _validators 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
39from ._core_utils import get_type_ref 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
40from ._internal_dataclass import slots_true 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
41from ._schema_generation_shared import GetCoreSchemaHandler, GetJsonSchemaHandler 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
43if typing.TYPE_CHECKING: 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
44 from ._generate_schema import GenerateSchema
46 StdSchemaFunction = Callable[[GenerateSchema, type[Any]], core_schema.CoreSchema]
49@dataclasses.dataclass(**slots_true) 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
50class SchemaTransformer: 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
51 get_core_schema: Callable[[Any, GetCoreSchemaHandler], CoreSchema] 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
52 get_json_schema: Callable[[CoreSchema, GetJsonSchemaHandler], JsonSchemaValue] 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
54 def __get_pydantic_core_schema__(self, source_type: Any, handler: GetCoreSchemaHandler) -> CoreSchema: 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
55 return self.get_core_schema(source_type, handler) 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
57 def __get_pydantic_json_schema__(self, schema: CoreSchema, handler: GetJsonSchemaHandler) -> JsonSchemaValue: 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
58 return self.get_json_schema(schema, handler) 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
61def get_enum_core_schema(enum_type: type[Enum], config: ConfigDict) -> CoreSchema: 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
62 cases: list[Any] = list(enum_type.__members__.values()) 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
64 enum_ref = get_type_ref(enum_type) 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
65 description = None if not enum_type.__doc__ else inspect.cleandoc(enum_type.__doc__) 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
66 if description == 'An enumeration.': # This is the default value provided by enum.EnumMeta.__new__; don't use it 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
67 description = None 1GHabcdefIJghijKLklmn
68 js_updates = {'title': enum_type.__name__, 'description': description} 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
69 js_updates = {k: v for k, v in js_updates.items() if v is not None} 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
71 sub_type: Literal['str', 'int', 'float'] | None = None 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
72 if issubclass(enum_type, int): 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
73 sub_type = 'int' 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
74 value_ser_type: core_schema.SerSchema = core_schema.simple_ser_schema('int') 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
75 elif issubclass(enum_type, str): 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
76 # this handles `StrEnum` (3.11 only), and also `Foobar(str, Enum)`
77 sub_type = 'str' 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
78 value_ser_type = core_schema.simple_ser_schema('str') 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
79 elif issubclass(enum_type, float): 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
80 sub_type = 'float' 1abcdopqrstefghijuvwxyzklmnABCDEF
81 value_ser_type = core_schema.simple_ser_schema('float') 1abcdopqrstefghijuvwxyzklmnABCDEF
82 else:
83 # TODO this is an ugly hack, how do we trigger an Any schema for serialization?
84 value_ser_type = core_schema.plain_serializer_function_ser_schema(lambda x: x) 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
86 if cases: 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
88 def get_json_schema(schema: CoreSchema, handler: GetJsonSchemaHandler) -> JsonSchemaValue: 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
89 json_schema = handler(schema) 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
90 original_schema = handler.resolve_ref_schema(json_schema) 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
91 original_schema.update(js_updates) 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
92 return json_schema 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
94 # we don't want to add the missing to the schema if it's the default one
95 default_missing = getattr(enum_type._missing_, '__func__', None) == Enum._missing_.__func__ # type: ignore 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
96 enum_schema = core_schema.enum_schema( 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
97 enum_type,
98 cases,
99 sub_type=sub_type,
100 missing=None if default_missing else enum_type._missing_,
101 ref=enum_ref,
102 metadata={'pydantic_js_functions': [get_json_schema]},
103 )
105 if config.get('use_enum_values', False): 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
106 enum_schema = core_schema.no_info_after_validator_function( 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
107 attrgetter('value'), enum_schema, serialization=value_ser_type
108 )
110 return enum_schema 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
112 else:
114 def get_json_schema_no_cases(_, handler: GetJsonSchemaHandler) -> JsonSchemaValue: 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
115 json_schema = handler(core_schema.enum_schema(enum_type, cases, sub_type=sub_type, ref=enum_ref)) 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
116 original_schema = handler.resolve_ref_schema(json_schema) 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
117 original_schema.update(js_updates) 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
118 return json_schema 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
120 # Use an isinstance check for enums with no cases.
121 # The most important use case for this is creating TypeVar bounds for generics that should
122 # be restricted to enums. This is more consistent than it might seem at first, since you can only
123 # subclass enum.Enum (or subclasses of enum.Enum) if all parent classes have no cases.
124 # We use the get_json_schema function when an Enum subclass has been declared with no cases
125 # so that we can still generate a valid json schema.
126 return core_schema.is_instance_schema( 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
127 enum_type,
128 metadata={'pydantic_js_functions': [get_json_schema_no_cases]},
129 )
132@dataclasses.dataclass(**slots_true) 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
133class InnerSchemaValidator: 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
134 """Use a fixed CoreSchema, avoiding interference from outward annotations."""
136 core_schema: CoreSchema 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
137 js_schema: JsonSchemaValue | None = None 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
138 js_core_schema: CoreSchema | None = None 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
139 js_schema_update: JsonSchemaValue | None = None 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
141 def __get_pydantic_json_schema__(self, _schema: CoreSchema, handler: GetJsonSchemaHandler) -> JsonSchemaValue: 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
142 if self.js_schema is not None: 142 ↛ 143line 142 didn't jump to line 143 because the condition on line 142 was never true1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
143 return self.js_schema
144 js_schema = handler(self.js_core_schema or self.core_schema) 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
145 if self.js_schema_update is not None: 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
146 js_schema.update(self.js_schema_update) 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
147 return js_schema 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
149 def __get_pydantic_core_schema__(self, _source_type: Any, _handler: GetCoreSchemaHandler) -> CoreSchema: 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
150 return self.core_schema 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
153def decimal_prepare_pydantic_annotations( 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
154 source: Any, annotations: Iterable[Any], config: ConfigDict
155) -> tuple[Any, list[Any]] | None:
156 if source is not decimal.Decimal: 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
157 return None 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
159 metadata, remaining_annotations = _known_annotated_metadata.collect_known_metadata(annotations) 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
161 config_allow_inf_nan = config.get('allow_inf_nan') 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
162 if config_allow_inf_nan is not None: 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
163 metadata.setdefault('allow_inf_nan', config_allow_inf_nan) 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
165 _known_annotated_metadata.check_metadata( 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
166 metadata, {*_known_annotated_metadata.FLOAT_CONSTRAINTS, 'max_digits', 'decimal_places'}, decimal.Decimal
167 )
168 return source, [InnerSchemaValidator(core_schema.decimal_schema(**metadata)), *remaining_annotations] 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
171def datetime_prepare_pydantic_annotations( 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
172 source_type: Any, annotations: Iterable[Any], _config: ConfigDict
173) -> tuple[Any, list[Any]] | None:
174 import datetime 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
176 metadata, remaining_annotations = _known_annotated_metadata.collect_known_metadata(annotations) 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
177 if source_type is datetime.date: 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
178 sv = InnerSchemaValidator(core_schema.date_schema(**metadata)) 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
179 elif source_type is datetime.datetime: 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
180 sv = InnerSchemaValidator(core_schema.datetime_schema(**metadata)) 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
181 elif source_type is datetime.time: 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
182 sv = InnerSchemaValidator(core_schema.time_schema(**metadata)) 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
183 elif source_type is datetime.timedelta: 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
184 sv = InnerSchemaValidator(core_schema.timedelta_schema(**metadata)) 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
185 else:
186 return None 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
187 # check now that we know the source type is correct
188 _known_annotated_metadata.check_metadata(metadata, _known_annotated_metadata.DATE_TIME_CONSTRAINTS, source_type) 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
189 return (source_type, [sv, *remaining_annotations]) 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
192def uuid_prepare_pydantic_annotations( 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
193 source_type: Any, annotations: Iterable[Any], _config: ConfigDict
194) -> tuple[Any, list[Any]] | None:
195 # UUIDs have no constraints - they are fixed length, constructing a UUID instance checks the length
197 from uuid import UUID 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
199 if source_type is not UUID: 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
200 return None 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
202 return (source_type, [InnerSchemaValidator(core_schema.uuid_schema()), *annotations]) 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
205def path_schema_prepare_pydantic_annotations( 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
206 source_type: Any, annotations: Iterable[Any], _config: ConfigDict
207) -> tuple[Any, list[Any]] | None:
208 import pathlib 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
210 if source_type not in { 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
211 os.PathLike,
212 pathlib.Path,
213 pathlib.PurePath,
214 pathlib.PosixPath,
215 pathlib.PurePosixPath,
216 pathlib.PureWindowsPath,
217 }:
218 return None 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
220 metadata, remaining_annotations = _known_annotated_metadata.collect_known_metadata(annotations) 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
221 _known_annotated_metadata.check_metadata(metadata, _known_annotated_metadata.STR_CONSTRAINTS, source_type) 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
223 construct_path = pathlib.PurePath if source_type is os.PathLike else source_type 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
225 def path_validator(input_value: str) -> os.PathLike[Any]: 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
226 try: 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
227 return construct_path(input_value) 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
228 except TypeError as e:
229 raise PydanticCustomError('path_type', 'Input is not a valid path') from e
231 constrained_str_schema = core_schema.str_schema(**metadata) 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
233 instance_schema = core_schema.json_or_python_schema( 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
234 json_schema=core_schema.no_info_after_validator_function(path_validator, constrained_str_schema),
235 python_schema=core_schema.is_instance_schema(source_type),
236 )
238 strict: bool | None = None 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
239 for annotation in annotations: 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
240 if isinstance(annotation, Strict): 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
241 strict = annotation.strict 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
243 schema = core_schema.lax_or_strict_schema( 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
244 lax_schema=core_schema.union_schema(
245 [
246 instance_schema,
247 core_schema.no_info_after_validator_function(path_validator, constrained_str_schema),
248 ],
249 custom_error_type='path_type',
250 custom_error_message='Input is not a valid path',
251 strict=True,
252 ),
253 strict_schema=instance_schema,
254 serialization=core_schema.to_string_ser_schema(),
255 strict=strict,
256 )
258 return ( 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
259 source_type,
260 [
261 InnerSchemaValidator(schema, js_core_schema=constrained_str_schema, js_schema_update={'format': 'path'}),
262 *remaining_annotations,
263 ],
264 )
267def dequeue_validator( 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
268 input_value: Any, handler: core_schema.ValidatorFunctionWrapHandler, maxlen: None | int
269) -> collections.deque[Any]:
270 if isinstance(input_value, collections.deque): 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
271 maxlens = [v for v in (input_value.maxlen, maxlen) if v is not None] 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
272 if maxlens: 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
273 maxlen = min(maxlens) 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
274 return collections.deque(handler(input_value), maxlen=maxlen) 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
275 else:
276 return collections.deque(handler(input_value), maxlen=maxlen) 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
279def serialize_sequence_via_list( 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
280 v: Any, handler: core_schema.SerializerFunctionWrapHandler, info: core_schema.SerializationInfo
281) -> Any:
282 items: list[Any] = [] 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
284 mapped_origin = SEQUENCE_ORIGIN_MAP.get(type(v), None) 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
285 if mapped_origin is None: 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
286 # we shouldn't hit this branch, should probably add a serialization error or something
287 return v 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
289 for index, item in enumerate(v): 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
290 try: 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
291 v = handler(item, index) 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
292 except PydanticOmit:
293 pass
294 else:
295 items.append(v) 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
297 if info.mode_is_json(): 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
298 return items 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
299 else:
300 return mapped_origin(items) 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
303@dataclasses.dataclass(**slots_true) 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
304class SequenceValidator: 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
305 mapped_origin: type[Any] 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
306 item_source_type: type[Any] 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
307 min_length: int | None = None 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
308 max_length: int | None = None 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
309 strict: bool | None = None 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
310 fail_fast: bool | None = None 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
312 def __get_pydantic_core_schema__(self, source_type: Any, handler: GetCoreSchemaHandler) -> CoreSchema: 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
313 if self.item_source_type is Any: 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
314 items_schema = None 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
315 else:
316 items_schema = handler.generate_schema(self.item_source_type) 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
318 metadata = { 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
319 'min_length': self.min_length,
320 'max_length': self.max_length,
321 'strict': self.strict,
322 'fail_fast': self.fail_fast,
323 }
325 if self.mapped_origin in (list, set, frozenset): 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
326 if self.mapped_origin is list: 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
327 constrained_schema = core_schema.list_schema(items_schema, **metadata) 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
328 elif self.mapped_origin is set: 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
329 constrained_schema = core_schema.set_schema(items_schema, **metadata) 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
330 else:
331 assert self.mapped_origin is frozenset # safety check in case we forget to add a case 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
332 constrained_schema = core_schema.frozenset_schema(items_schema, **metadata) 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
334 schema = constrained_schema 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
335 else:
336 # safety check in case we forget to add a case
337 assert self.mapped_origin in (collections.deque, collections.Counter) 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
339 if self.mapped_origin is collections.deque: 339 ↛ 348line 339 didn't jump to line 348 because the condition on line 339 was always true1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
340 # if we have a MaxLen annotation might as well set that as the default maxlen on the deque
341 # this lets us re-use existing metadata annotations to let users set the maxlen on a dequeue
342 # that e.g. comes from JSON
343 coerce_instance_wrap = partial( 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
344 core_schema.no_info_wrap_validator_function,
345 partial(dequeue_validator, maxlen=metadata.get('max_length', None)),
346 )
347 else:
348 coerce_instance_wrap = partial(core_schema.no_info_after_validator_function, self.mapped_origin)
350 # we have to use a lax list schema here, because we need to validate the deque's
351 # items via a list schema, but it's ok if the deque itself is not a list (same for Counter)
352 metadata_with_strict_override = {**metadata, 'strict': False} 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
353 constrained_schema = core_schema.list_schema(items_schema, **metadata_with_strict_override) 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
355 check_instance = core_schema.json_or_python_schema( 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
356 json_schema=core_schema.list_schema(),
357 python_schema=core_schema.is_instance_schema(self.mapped_origin),
358 )
360 serialization = core_schema.wrap_serializer_function_ser_schema( 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
361 serialize_sequence_via_list, schema=items_schema or core_schema.any_schema(), info_arg=True
362 )
364 strict = core_schema.chain_schema([check_instance, coerce_instance_wrap(constrained_schema)]) 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
366 if metadata.get('strict', False): 366 ↛ 367line 366 didn't jump to line 367 because the condition on line 366 was never true1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
367 schema = strict
368 else:
369 lax = coerce_instance_wrap(constrained_schema) 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
370 schema = core_schema.lax_or_strict_schema(lax_schema=lax, strict_schema=strict) 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
371 schema['serialization'] = serialization 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
373 return schema 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
376SEQUENCE_ORIGIN_MAP: dict[Any, Any] = { 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
377 typing.Deque: collections.deque,
378 collections.deque: collections.deque,
379 list: list,
380 typing.List: list,
381 set: set,
382 typing.AbstractSet: set,
383 typing.Set: set,
384 frozenset: frozenset,
385 typing.FrozenSet: frozenset,
386 typing.Sequence: list,
387 typing.MutableSequence: list,
388 typing.MutableSet: set,
389 # this doesn't handle subclasses of these
390 # parametrized typing.Set creates one of these
391 collections.abc.MutableSet: set,
392 collections.abc.Set: frozenset,
393}
396def identity(s: CoreSchema) -> CoreSchema: 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
397 return s
400def sequence_like_prepare_pydantic_annotations( 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
401 source_type: Any, annotations: Iterable[Any], _config: ConfigDict
402) -> tuple[Any, list[Any]] | None:
403 origin: Any = get_origin(source_type) 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
405 mapped_origin = SEQUENCE_ORIGIN_MAP.get(origin, None) if origin else SEQUENCE_ORIGIN_MAP.get(source_type, None) 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
406 if mapped_origin is None: 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
407 return None 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
409 args = get_args(source_type) 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
411 if not args: 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
412 args = typing.cast(Tuple[Any], (Any,)) 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
413 elif len(args) != 1: 413 ↛ 414line 413 didn't jump to line 414 because the condition on line 413 was never true1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
414 raise ValueError('Expected sequence to have exactly 1 generic parameter')
416 item_source_type = args[0] 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
418 metadata, remaining_annotations = _known_annotated_metadata.collect_known_metadata(annotations) 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
419 _known_annotated_metadata.check_metadata(metadata, _known_annotated_metadata.SEQUENCE_CONSTRAINTS, source_type) 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
421 return (source_type, [SequenceValidator(mapped_origin, item_source_type, **metadata), *remaining_annotations]) 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
424MAPPING_ORIGIN_MAP: dict[Any, Any] = { 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
425 typing.DefaultDict: collections.defaultdict,
426 collections.defaultdict: collections.defaultdict,
427 collections.OrderedDict: collections.OrderedDict,
428 typing_extensions.OrderedDict: collections.OrderedDict,
429 dict: dict,
430 typing.Dict: dict,
431 collections.Counter: collections.Counter,
432 typing.Counter: collections.Counter,
433 # this doesn't handle subclasses of these
434 typing.Mapping: dict,
435 typing.MutableMapping: dict,
436 # parametrized typing.{Mutable}Mapping creates one of these
437 collections.abc.MutableMapping: dict,
438 collections.abc.Mapping: dict,
439}
442def defaultdict_validator( 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
443 input_value: Any, handler: core_schema.ValidatorFunctionWrapHandler, default_default_factory: Callable[[], Any]
444) -> collections.defaultdict[Any, Any]:
445 if isinstance(input_value, collections.defaultdict): 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
446 default_factory = input_value.default_factory 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
447 return collections.defaultdict(default_factory, handler(input_value)) 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
448 else:
449 return collections.defaultdict(default_default_factory, handler(input_value)) 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
452def get_defaultdict_default_default_factory(values_source_type: Any) -> Callable[[], Any]: 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
453 def infer_default() -> Callable[[], Any]: 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
454 allowed_default_types: dict[Any, Any] = { 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
455 typing.Tuple: tuple,
456 tuple: tuple,
457 collections.abc.Sequence: tuple,
458 collections.abc.MutableSequence: list,
459 typing.List: list,
460 list: list,
461 typing.Sequence: list,
462 typing.Set: set,
463 set: set,
464 typing.MutableSet: set,
465 collections.abc.MutableSet: set,
466 collections.abc.Set: frozenset,
467 typing.MutableMapping: dict,
468 typing.Mapping: dict,
469 collections.abc.Mapping: dict,
470 collections.abc.MutableMapping: dict,
471 float: float,
472 int: int,
473 str: str,
474 bool: bool,
475 }
476 values_type_origin = get_origin(values_source_type) or values_source_type 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
477 instructions = 'set using `DefaultDict[..., Annotated[..., Field(default_factory=...)]]`' 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
478 if isinstance(values_type_origin, TypeVar): 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
480 def type_var_default_factory() -> None: 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
481 raise RuntimeError(
482 'Generic defaultdict cannot be used without a concrete value type or an'
483 ' explicit default factory, ' + instructions
484 )
486 return type_var_default_factory 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
487 elif values_type_origin not in allowed_default_types: 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
488 # a somewhat subjective set of types that have reasonable default values
489 allowed_msg = ', '.join([t.__name__ for t in set(allowed_default_types.values())]) 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
490 raise PydanticSchemaGenerationError( 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
491 f'Unable to infer a default factory for keys of type {values_source_type}.'
492 f' Only {allowed_msg} are supported, other types require an explicit default factory'
493 ' ' + instructions
494 )
495 return allowed_default_types[values_type_origin] 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
497 # Assume Annotated[..., Field(...)]
498 if _typing_extra.is_annotated(values_source_type): 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
499 field_info = next((v for v in get_args(values_source_type) if isinstance(v, FieldInfo)), None) 499 ↛ exitline 499 didn't finish the generator expression on line 4991GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
500 else:
501 field_info = None 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
502 if field_info and field_info.default_factory: 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
503 default_default_factory = field_info.default_factory 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
504 else:
505 default_default_factory = infer_default() 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
506 return default_default_factory 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
509@dataclasses.dataclass(**slots_true) 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
510class MappingValidator: 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
511 mapped_origin: type[Any] 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
512 keys_source_type: type[Any] 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
513 values_source_type: type[Any] 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
514 min_length: int | None = None 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
515 max_length: int | None = None 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
516 strict: bool = False 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
518 def serialize_mapping_via_dict(self, v: Any, handler: core_schema.SerializerFunctionWrapHandler) -> Any: 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
519 return handler(v) 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
521 def __get_pydantic_core_schema__(self, source_type: Any, handler: GetCoreSchemaHandler) -> CoreSchema: 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
522 if self.keys_source_type is Any: 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
523 keys_schema = None 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
524 else:
525 keys_schema = handler.generate_schema(self.keys_source_type) 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
526 if self.values_source_type is Any: 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
527 values_schema = None 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
528 else:
529 values_schema = handler.generate_schema(self.values_source_type) 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
531 metadata = {'min_length': self.min_length, 'max_length': self.max_length, 'strict': self.strict} 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
533 if self.mapped_origin is dict: 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
534 schema = core_schema.dict_schema(keys_schema, values_schema, **metadata) 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
535 else:
536 constrained_schema = core_schema.dict_schema(keys_schema, values_schema, **metadata) 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
537 check_instance = core_schema.json_or_python_schema( 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
538 json_schema=core_schema.dict_schema(),
539 python_schema=core_schema.is_instance_schema(self.mapped_origin),
540 )
542 if self.mapped_origin is collections.defaultdict: 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
543 default_default_factory = get_defaultdict_default_default_factory(self.values_source_type) 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
544 coerce_instance_wrap = partial( 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
545 core_schema.no_info_wrap_validator_function,
546 partial(defaultdict_validator, default_default_factory=default_default_factory),
547 )
548 else:
549 coerce_instance_wrap = partial(core_schema.no_info_after_validator_function, self.mapped_origin) 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
551 serialization = core_schema.wrap_serializer_function_ser_schema( 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
552 self.serialize_mapping_via_dict,
553 schema=core_schema.dict_schema(
554 keys_schema or core_schema.any_schema(), values_schema or core_schema.any_schema()
555 ),
556 info_arg=False,
557 )
559 strict = core_schema.chain_schema([check_instance, coerce_instance_wrap(constrained_schema)]) 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
561 if metadata.get('strict', False): 561 ↛ 562line 561 didn't jump to line 562 because the condition on line 561 was never true1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
562 schema = strict
563 else:
564 lax = coerce_instance_wrap(constrained_schema) 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
565 schema = core_schema.lax_or_strict_schema(lax_schema=lax, strict_schema=strict) 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
566 schema['serialization'] = serialization 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
568 return schema 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
571def mapping_like_prepare_pydantic_annotations( 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
572 source_type: Any, annotations: Iterable[Any], _config: ConfigDict
573) -> tuple[Any, list[Any]] | None:
574 origin: Any = get_origin(source_type) 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
576 mapped_origin = MAPPING_ORIGIN_MAP.get(origin, None) if origin else MAPPING_ORIGIN_MAP.get(source_type, None) 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
577 if mapped_origin is None: 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
578 return None 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
580 args = get_args(source_type) 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
582 if not args: 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
583 args = typing.cast(Tuple[Any, Any], (Any, Any)) 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
584 elif mapped_origin is collections.Counter: 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
585 # a single generic
586 if len(args) != 1: 586 ↛ 587line 586 didn't jump to line 587 because the condition on line 586 was never true1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
587 raise ValueError('Expected Counter to have exactly 1 generic parameter')
588 args = (args[0], int) # keys are always an int 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
589 elif len(args) != 2: 589 ↛ 590line 589 didn't jump to line 590 because the condition on line 589 was never true1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
590 raise ValueError('Expected mapping to have exactly 2 generic parameters')
592 keys_source_type, values_source_type = args 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
594 metadata, remaining_annotations = _known_annotated_metadata.collect_known_metadata(annotations) 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
595 _known_annotated_metadata.check_metadata(metadata, _known_annotated_metadata.SEQUENCE_CONSTRAINTS, source_type) 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
597 return ( 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
598 source_type,
599 [
600 MappingValidator(mapped_origin, keys_source_type, values_source_type, **metadata),
601 *remaining_annotations,
602 ],
603 )
606def ip_prepare_pydantic_annotations( 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
607 source_type: Any, annotations: Iterable[Any], _config: ConfigDict
608) -> tuple[Any, list[Any]] | None:
609 def make_strict_ip_schema(tp: type[Any]) -> CoreSchema: 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
610 return core_schema.json_or_python_schema( 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
611 json_schema=core_schema.no_info_after_validator_function(tp, core_schema.str_schema()),
612 python_schema=core_schema.is_instance_schema(tp),
613 )
615 if source_type is IPv4Address: 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
616 return source_type, [ 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
617 SchemaTransformer(
618 lambda _1, _2: core_schema.lax_or_strict_schema(
619 lax_schema=core_schema.no_info_plain_validator_function(_validators.ip_v4_address_validator),
620 strict_schema=make_strict_ip_schema(IPv4Address),
621 serialization=core_schema.to_string_ser_schema(),
622 ),
623 lambda _1, _2: {'type': 'string', 'format': 'ipv4'},
624 ),
625 *annotations,
626 ]
627 if source_type is IPv4Network: 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
628 return source_type, [ 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
629 SchemaTransformer(
630 lambda _1, _2: core_schema.lax_or_strict_schema(
631 lax_schema=core_schema.no_info_plain_validator_function(_validators.ip_v4_network_validator),
632 strict_schema=make_strict_ip_schema(IPv4Network),
633 serialization=core_schema.to_string_ser_schema(),
634 ),
635 lambda _1, _2: {'type': 'string', 'format': 'ipv4network'},
636 ),
637 *annotations,
638 ]
639 if source_type is IPv4Interface: 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
640 return source_type, [ 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
641 SchemaTransformer(
642 lambda _1, _2: core_schema.lax_or_strict_schema(
643 lax_schema=core_schema.no_info_plain_validator_function(_validators.ip_v4_interface_validator),
644 strict_schema=make_strict_ip_schema(IPv4Interface),
645 serialization=core_schema.to_string_ser_schema(),
646 ),
647 lambda _1, _2: {'type': 'string', 'format': 'ipv4interface'},
648 ),
649 *annotations,
650 ]
652 if source_type is IPv6Address: 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
653 return source_type, [ 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
654 SchemaTransformer(
655 lambda _1, _2: core_schema.lax_or_strict_schema(
656 lax_schema=core_schema.no_info_plain_validator_function(_validators.ip_v6_address_validator),
657 strict_schema=make_strict_ip_schema(IPv6Address),
658 serialization=core_schema.to_string_ser_schema(),
659 ),
660 lambda _1, _2: {'type': 'string', 'format': 'ipv6'},
661 ),
662 *annotations,
663 ]
664 if source_type is IPv6Network: 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
665 return source_type, [ 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
666 SchemaTransformer(
667 lambda _1, _2: core_schema.lax_or_strict_schema(
668 lax_schema=core_schema.no_info_plain_validator_function(_validators.ip_v6_network_validator),
669 strict_schema=make_strict_ip_schema(IPv6Network),
670 serialization=core_schema.to_string_ser_schema(),
671 ),
672 lambda _1, _2: {'type': 'string', 'format': 'ipv6network'},
673 ),
674 *annotations,
675 ]
676 if source_type is IPv6Interface: 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
677 return source_type, [ 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
678 SchemaTransformer(
679 lambda _1, _2: core_schema.lax_or_strict_schema(
680 lax_schema=core_schema.no_info_plain_validator_function(_validators.ip_v6_interface_validator),
681 strict_schema=make_strict_ip_schema(IPv6Interface),
682 serialization=core_schema.to_string_ser_schema(),
683 ),
684 lambda _1, _2: {'type': 'string', 'format': 'ipv6interface'},
685 ),
686 *annotations,
687 ]
689 return None 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
692def url_prepare_pydantic_annotations( 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
693 source_type: Any, annotations: Iterable[Any], _config: ConfigDict
694) -> tuple[Any, list[Any]] | None:
695 if source_type is Url: 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
696 return source_type, [ 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
697 SchemaTransformer(
698 lambda _1, _2: core_schema.url_schema(),
699 lambda cs, handler: handler(cs),
700 ),
701 *annotations,
702 ]
703 if source_type is MultiHostUrl: 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
704 return source_type, [ 1GHabcdopqrstefIJghijuvwxyzKLklmnABCDEF
705 SchemaTransformer(
706 lambda _1, _2: core_schema.multi_host_url_schema(),
707 lambda cs, handler: handler(cs),
708 ),
709 *annotations,
710 ]
713PREPARE_METHODS: tuple[Callable[[Any, Iterable[Any], ConfigDict], tuple[Any, list[Any]] | None], ...] = ( 1GHabcdopqrstefIJghijuvwxyzMNOPQRSTUVKLklmnABCDEF
714 decimal_prepare_pydantic_annotations,
715 sequence_like_prepare_pydantic_annotations,
716 datetime_prepare_pydantic_annotations,
717 uuid_prepare_pydantic_annotations,
718 path_schema_prepare_pydantic_annotations,
719 mapping_like_prepare_pydantic_annotations,
720 ip_prepare_pydantic_annotations,
721 url_prepare_pydantic_annotations,
722)