Coverage for pydantic/_internal/_known_annotated_metadata.py: 91.15%
160 statements
« prev ^ index » next coverage.py v7.6.12, created at 2025-02-13 19:35 +0000
« prev ^ index » next coverage.py v7.6.12, created at 2025-02-13 19:35 +0000
1from __future__ import annotations 1arbsctduevfghwixjykzlAGHIJKLMmBnCoDpEqF
3from collections import defaultdict 1arbsctduevfghwixjykzlAGHIJKLMmBnCoDpEqF
4from collections.abc import Iterable 1arbsctduevfghwixjykzlAGHIJKLMmBnCoDpEqF
5from copy import copy 1arbsctduevfghwixjykzlAGHIJKLMmBnCoDpEqF
6from decimal import Decimal 1arbsctduevfghwixjykzlAGHIJKLMmBnCoDpEqF
7from functools import lru_cache, partial 1arbsctduevfghwixjykzlAGHIJKLMmBnCoDpEqF
8from typing import TYPE_CHECKING, Any 1arbsctduevfghwixjykzlAGHIJKLMmBnCoDpEqF
10from pydantic_core import CoreSchema, PydanticCustomError, ValidationError, to_jsonable_python 1arbsctduevfghwixjykzlAGHIJKLMmBnCoDpEqF
11from pydantic_core import core_schema as cs 1arbsctduevfghwixjykzlAGHIJKLMmBnCoDpEqF
13from ._fields import PydanticMetadata 1arbsctduevfghwixjykzlAGHIJKLMmBnCoDpEqF
14from ._import_utils import import_cached_field_info 1arbsctduevfghwixjykzlAGHIJKLMmBnCoDpEqF
16if TYPE_CHECKING: 1arbsctduevfghwixjykzlAGHIJKLMmBnCoDpEqF
17 pass
19STRICT = {'strict'} 1arbsctduevfghwixjykzlAGHIJKLMmBnCoDpEqF
20FAIL_FAST = {'fail_fast'} 1arbsctduevfghwixjykzlAGHIJKLMmBnCoDpEqF
21LENGTH_CONSTRAINTS = {'min_length', 'max_length'} 1arbsctduevfghwixjykzlAGHIJKLMmBnCoDpEqF
22INEQUALITY = {'le', 'ge', 'lt', 'gt'} 1arbsctduevfghwixjykzlAGHIJKLMmBnCoDpEqF
23NUMERIC_CONSTRAINTS = {'multiple_of', *INEQUALITY} 1arbsctduevfghwixjykzlAGHIJKLMmBnCoDpEqF
24ALLOW_INF_NAN = {'allow_inf_nan'} 1arbsctduevfghwixjykzlAGHIJKLMmBnCoDpEqF
26STR_CONSTRAINTS = { 1arbsctduevfghwixjykzlAGHIJKLMmBnCoDpEqF
27 *LENGTH_CONSTRAINTS,
28 *STRICT,
29 'strip_whitespace',
30 'to_lower',
31 'to_upper',
32 'pattern',
33 'coerce_numbers_to_str',
34}
35BYTES_CONSTRAINTS = {*LENGTH_CONSTRAINTS, *STRICT} 1arbsctduevfghwixjykzlAGHIJKLMmBnCoDpEqF
37LIST_CONSTRAINTS = {*LENGTH_CONSTRAINTS, *STRICT, *FAIL_FAST} 1arbsctduevfghwixjykzlAGHIJKLMmBnCoDpEqF
38TUPLE_CONSTRAINTS = {*LENGTH_CONSTRAINTS, *STRICT, *FAIL_FAST} 1arbsctduevfghwixjykzlAGHIJKLMmBnCoDpEqF
39SET_CONSTRAINTS = {*LENGTH_CONSTRAINTS, *STRICT, *FAIL_FAST} 1arbsctduevfghwixjykzlAGHIJKLMmBnCoDpEqF
40DICT_CONSTRAINTS = {*LENGTH_CONSTRAINTS, *STRICT} 1arbsctduevfghwixjykzlAGHIJKLMmBnCoDpEqF
41GENERATOR_CONSTRAINTS = {*LENGTH_CONSTRAINTS, *STRICT} 1arbsctduevfghwixjykzlAGHIJKLMmBnCoDpEqF
42SEQUENCE_CONSTRAINTS = {*LENGTH_CONSTRAINTS, *FAIL_FAST} 1arbsctduevfghwixjykzlAGHIJKLMmBnCoDpEqF
44FLOAT_CONSTRAINTS = {*NUMERIC_CONSTRAINTS, *ALLOW_INF_NAN, *STRICT} 1arbsctduevfghwixjykzlAGHIJKLMmBnCoDpEqF
45DECIMAL_CONSTRAINTS = {'max_digits', 'decimal_places', *FLOAT_CONSTRAINTS} 1arbsctduevfghwixjykzlAGHIJKLMmBnCoDpEqF
46INT_CONSTRAINTS = {*NUMERIC_CONSTRAINTS, *ALLOW_INF_NAN, *STRICT} 1arbsctduevfghwixjykzlAGHIJKLMmBnCoDpEqF
47BOOL_CONSTRAINTS = STRICT 1arbsctduevfghwixjykzlAGHIJKLMmBnCoDpEqF
48UUID_CONSTRAINTS = STRICT 1arbsctduevfghwixjykzlAGHIJKLMmBnCoDpEqF
50DATE_TIME_CONSTRAINTS = {*NUMERIC_CONSTRAINTS, *STRICT} 1arbsctduevfghwixjykzlAGHIJKLMmBnCoDpEqF
51TIMEDELTA_CONSTRAINTS = {*NUMERIC_CONSTRAINTS, *STRICT} 1arbsctduevfghwixjykzlAGHIJKLMmBnCoDpEqF
52TIME_CONSTRAINTS = {*NUMERIC_CONSTRAINTS, *STRICT} 1arbsctduevfghwixjykzlAGHIJKLMmBnCoDpEqF
53LAX_OR_STRICT_CONSTRAINTS = STRICT 1arbsctduevfghwixjykzlAGHIJKLMmBnCoDpEqF
54ENUM_CONSTRAINTS = STRICT 1arbsctduevfghwixjykzlAGHIJKLMmBnCoDpEqF
55COMPLEX_CONSTRAINTS = STRICT 1arbsctduevfghwixjykzlAGHIJKLMmBnCoDpEqF
57UNION_CONSTRAINTS = {'union_mode'} 1arbsctduevfghwixjykzlAGHIJKLMmBnCoDpEqF
58URL_CONSTRAINTS = { 1arbsctduevfghwixjykzlAGHIJKLMmBnCoDpEqF
59 'max_length',
60 'allowed_schemes',
61 'host_required',
62 'default_host',
63 'default_port',
64 'default_path',
65}
67TEXT_SCHEMA_TYPES = ('str', 'bytes', 'url', 'multi-host-url') 1arbsctduevfghwixjykzlAGHIJKLMmBnCoDpEqF
68SEQUENCE_SCHEMA_TYPES = ('list', 'tuple', 'set', 'frozenset', 'generator', *TEXT_SCHEMA_TYPES) 1arbsctduevfghwixjykzlAGHIJKLMmBnCoDpEqF
69NUMERIC_SCHEMA_TYPES = ('float', 'int', 'date', 'time', 'timedelta', 'datetime') 1arbsctduevfghwixjykzlAGHIJKLMmBnCoDpEqF
71CONSTRAINTS_TO_ALLOWED_SCHEMAS: dict[str, set[str]] = defaultdict(set) 1arbsctduevfghwixjykzlAGHIJKLMmBnCoDpEqF
73constraint_schema_pairings: list[tuple[set[str], tuple[str, ...]]] = [ 1arbsctduevfghwixjykzlAGHIJKLMmBnCoDpEqF
74 (STR_CONSTRAINTS, TEXT_SCHEMA_TYPES),
75 (BYTES_CONSTRAINTS, ('bytes',)),
76 (LIST_CONSTRAINTS, ('list',)),
77 (TUPLE_CONSTRAINTS, ('tuple',)),
78 (SET_CONSTRAINTS, ('set', 'frozenset')),
79 (DICT_CONSTRAINTS, ('dict',)),
80 (GENERATOR_CONSTRAINTS, ('generator',)),
81 (FLOAT_CONSTRAINTS, ('float',)),
82 (INT_CONSTRAINTS, ('int',)),
83 (DATE_TIME_CONSTRAINTS, ('date', 'time', 'datetime', 'timedelta')),
84 # TODO: this is a bit redundant, we could probably avoid some of these
85 (STRICT, (*TEXT_SCHEMA_TYPES, *SEQUENCE_SCHEMA_TYPES, *NUMERIC_SCHEMA_TYPES, 'typed-dict', 'model')),
86 (UNION_CONSTRAINTS, ('union',)),
87 (URL_CONSTRAINTS, ('url', 'multi-host-url')),
88 (BOOL_CONSTRAINTS, ('bool',)),
89 (UUID_CONSTRAINTS, ('uuid',)),
90 (LAX_OR_STRICT_CONSTRAINTS, ('lax-or-strict',)),
91 (ENUM_CONSTRAINTS, ('enum',)),
92 (DECIMAL_CONSTRAINTS, ('decimal',)),
93 (COMPLEX_CONSTRAINTS, ('complex',)),
94]
96for constraints, schemas in constraint_schema_pairings: 1arbsctduevfghwixjykzlAGHIJKLMmBnCoDpEqF
97 for c in constraints: 1arbsctduevfghwixjykzlAGHIJKLMmBnCoDpEqF
98 CONSTRAINTS_TO_ALLOWED_SCHEMAS[c].update(schemas) 1arbsctduevfghwixjykzlAGHIJKLMmBnCoDpEqF
101def as_jsonable_value(v: Any) -> Any: 1arbsctduevfghwixjykzlAGHIJKLMmBnCoDpEqF
102 if type(v) not in (int, str, float, bytes, bool, type(None)): 1arbsctduevfghwixjykzlAmBnCoDpEqF
103 return to_jsonable_python(v) 1arbsctduevfghwixjykzlAmBnCoDpEqF
104 return v 1arbsctduevfghwixjykzlAmBnCoDpEqF
107def expand_grouped_metadata(annotations: Iterable[Any]) -> Iterable[Any]: 1arbsctduevfghwixjykzlAGHIJKLMmBnCoDpEqF
108 """Expand the annotations.
110 Args:
111 annotations: An iterable of annotations.
113 Returns:
114 An iterable of expanded annotations.
116 Example:
117 ```python
118 from annotated_types import Ge, Len
120 from pydantic._internal._known_annotated_metadata import expand_grouped_metadata
122 print(list(expand_grouped_metadata([Ge(4), Len(5)])))
123 #> [Ge(ge=4), MinLen(min_length=5)]
124 ```
125 """
126 import annotated_types as at 1arbsctduevfghwixjykzlAGHIJKLMmBnCoDpEqF
128 FieldInfo = import_cached_field_info() 1arbsctduevfghwixjykzlAGHIJKLMmBnCoDpEqF
130 for annotation in annotations: 1arbsctduevfghwixjykzlAGHIJKLMmBnCoDpEqF
131 if isinstance(annotation, at.GroupedMetadata): 1arbsctduevfghwixjykzlAGHIJKLMmBnCoDpEqF
132 yield from annotation 1arbsctduevfghwixjykzlAmBnCoDpEqF
133 elif isinstance(annotation, FieldInfo): 1arbsctduevfghwixjykzlAGHIJKLMmBnCoDpEqF
134 yield from annotation.metadata 1arbsctduevfghwixjykzlAmBnCoDpEqF
135 # this is a bit problematic in that it results in duplicate metadata
136 # all of our "consumers" can handle it, but it is not ideal
137 # we probably should split up FieldInfo into:
138 # - annotated types metadata
139 # - individual metadata known only to Pydantic
140 annotation = copy(annotation) 1arbsctduevfghwixjykzlAmBnCoDpEqF
141 annotation.metadata = [] 1arbsctduevfghwixjykzlAmBnCoDpEqF
142 yield annotation 1arbsctduevfghwixjykzlAmBnCoDpEqF
143 else:
144 yield annotation 1arbsctduevfghwixjykzlAGHIJKLMmBnCoDpEqF
147@lru_cache 1arbsctduevfghwixjykzlAGHIJKLMmBnCoDpEqF
148def _get_at_to_constraint_map() -> dict[type, str]: 1arbsctduevfghwixjykzlAGHIJKLMmBnCoDpEqF
149 """Return a mapping of annotated types to constraints.
151 Normally, we would define a mapping like this in the module scope, but we can't do that
152 because we don't permit module level imports of `annotated_types`, in an attempt to speed up
153 the import time of `pydantic`. We still only want to have this dictionary defined in one place,
154 so we use this function to cache the result.
155 """
156 import annotated_types as at 1arbsctduevfghwixjykzlAmBnCoDpEqF
158 return { 1arbsctduevfghwixjykzlAmBnCoDpEqF
159 at.Gt: 'gt',
160 at.Ge: 'ge',
161 at.Lt: 'lt',
162 at.Le: 'le',
163 at.MultipleOf: 'multiple_of',
164 at.MinLen: 'min_length',
165 at.MaxLen: 'max_length',
166 }
169def apply_known_metadata(annotation: Any, schema: CoreSchema) -> CoreSchema | None: # noqa: C901 1arbsctduevfghwixjykzlAGHIJKLMmBnCoDpEqF
170 """Apply `annotation` to `schema` if it is an annotation we know about (Gt, Le, etc.).
171 Otherwise return `None`.
173 This does not handle all known annotations. If / when it does, it can always
174 return a CoreSchema and return the unmodified schema if the annotation should be ignored.
176 Assumes that GroupedMetadata has already been expanded via `expand_grouped_metadata`.
178 Args:
179 annotation: The annotation.
180 schema: The schema.
182 Returns:
183 An updated schema with annotation if it is an annotation we know about, `None` otherwise.
185 Raises:
186 PydanticCustomError: If `Predicate` fails.
187 """
188 import annotated_types as at 1arbsctduevfghwixjykzlAmBnCoDpEqF
190 from ._validators import NUMERIC_VALIDATOR_LOOKUP, forbid_inf_nan_check 1arbsctduevfghwixjykzlAmBnCoDpEqF
192 schema = schema.copy() 1arbsctduevfghwixjykzlAmBnCoDpEqF
193 schema_update, other_metadata = collect_known_metadata([annotation]) 1arbsctduevfghwixjykzlAmBnCoDpEqF
194 schema_type = schema['type'] 1arbsctduevfghwixjykzlAmBnCoDpEqF
196 chain_schema_constraints: set[str] = { 1arbsctduevfghwixjykzlAmBnCoDpEqF
197 'pattern',
198 'strip_whitespace',
199 'to_lower',
200 'to_upper',
201 'coerce_numbers_to_str',
202 }
203 chain_schema_steps: list[CoreSchema] = [] 1arbsctduevfghwixjykzlAmBnCoDpEqF
205 for constraint, value in schema_update.items(): 1arbsctduevfghwixjykzlAmBnCoDpEqF
206 if constraint not in CONSTRAINTS_TO_ALLOWED_SCHEMAS: 206 ↛ 207line 206 didn't jump to line 207 because the condition on line 206 was never true1arbsctduevfghwixjykzlAmBnCoDpEqF
207 raise ValueError(f'Unknown constraint {constraint}')
208 allowed_schemas = CONSTRAINTS_TO_ALLOWED_SCHEMAS[constraint] 1arbsctduevfghwixjykzlAmBnCoDpEqF
210 # if it becomes necessary to handle more than one constraint
211 # in this recursive case with function-after or function-wrap, we should refactor
212 # this is a bit challenging because we sometimes want to apply constraints to the inner schema,
213 # whereas other times we want to wrap the existing schema with a new one that enforces a new constraint.
214 if schema_type in {'function-before', 'function-wrap', 'function-after'} and constraint == 'strict': 1arbsctduevfghwixjykzlAmBnCoDpEqF
215 schema['schema'] = apply_known_metadata(annotation, schema['schema']) # type: ignore # schema is function schema 1arbsctduevfghwixjykzlAmBnCoDpEqF
216 return schema 1arbsctduevfghwixjykzlAmBnCoDpEqF
218 # if we're allowed to apply constraint directly to the schema, like le to int, do that
219 if schema_type in allowed_schemas: 1arbsctduevfghwixjykzlAmBnCoDpEqF
220 if constraint == 'union_mode' and schema_type == 'union': 1arbsctduevfghwixjykzlAmBnCoDpEqF
221 schema['mode'] = value # type: ignore # schema is UnionSchema 1arbsctduevfghwixjykzlAmBnCoDpEqF
222 else:
223 if schema_type == 'decimal' and constraint in {'multiple_of', 'le', 'ge', 'lt', 'gt'}: 1arbsctduevfghwixjykzlAmBnCoDpEqF
224 schema[constraint] = Decimal(value) 1arbsctduevfghwixjykzlAmBnCoDpEqF
225 else:
226 schema[constraint] = value 1arbsctduevfghwixjykzlAmBnCoDpEqF
227 continue 1arbsctduevfghwixjykzlAmBnCoDpEqF
229 # else, apply a function after validator to the schema to enforce the corresponding constraint
230 if constraint in chain_schema_constraints: 1arbsctduevfghwixjykzlAmBnCoDpEqF
232 def _apply_constraint_with_incompatibility_info( 1arbsctduevfghwixjykzlAmBnCoDpEqF
233 value: Any, handler: cs.ValidatorFunctionWrapHandler
234 ) -> Any:
235 try: 1arbsctduevfghwixjykzlAmBnCoDpEqF
236 x = handler(value) 1arbsctduevfghwixjykzlAmBnCoDpEqF
237 except ValidationError as ve: 1arbsctduevfghwixjykzlAmBnCoDpEqF
238 # if the error is about the type, it's likely that the constraint is incompatible the type of the field
239 # for example, the following invalid schema wouldn't be caught during schema build, but rather at this point
240 # with a cryptic 'string_type' error coming from the string validator,
241 # that we'd rather express as a constraint incompatibility error (TypeError)
242 # Annotated[list[int], Field(pattern='abc')]
243 if 'type' in ve.errors()[0]['type']: 1arbsctduevfghwixjykzlAmBnCoDpEqF
244 raise TypeError( 1arbsctduevfghwixjykzlAmBnCoDpEqF
245 f"Unable to apply constraint '{constraint}' to supplied value {value} for schema of type '{schema_type}'" # noqa: B023
246 )
247 raise ve 1arbsctduevfghwixjykzlAmBnCoDpEqF
248 return x 1abcdefghijklmnopq
250 chain_schema_steps.append( 1arbsctduevfghwixjykzlAmBnCoDpEqF
251 cs.no_info_wrap_validator_function(
252 _apply_constraint_with_incompatibility_info, cs.str_schema(**{constraint: value})
253 )
254 )
255 elif constraint in NUMERIC_VALIDATOR_LOOKUP: 1arbsctduevfghwixjykzlAmBnCoDpEqF
256 if constraint in LENGTH_CONSTRAINTS: 1arbsctduevfghwixjykzlAmBnCoDpEqF
257 inner_schema = schema 1arbsctduevfghwixjykzlAmBnCoDpEqF
258 while inner_schema['type'] in {'function-before', 'function-wrap', 'function-after'}: 1arbsctduevfghwixjykzlAmBnCoDpEqF
259 inner_schema = inner_schema['schema'] # type: ignore 1arbsctduevfghwixjykzlAmBnCoDpEqF
260 inner_schema_type = inner_schema['type'] 1arbsctduevfghwixjykzlAmBnCoDpEqF
261 if inner_schema_type == 'list' or ( 1arbsctduevfghwixjykzlAmBnCoDpEqF
262 inner_schema_type == 'json-or-python' and inner_schema['json_schema']['type'] == 'list' # type: ignore
263 ):
264 js_constraint_key = 'minItems' if constraint == 'min_length' else 'maxItems' 1arbsctduevfghwixjykzlAmBnCoDpEqF
265 else:
266 js_constraint_key = 'minLength' if constraint == 'min_length' else 'maxLength' 1arbsctduevfghwixjykzlAmBnCoDpEqF
267 else:
268 js_constraint_key = constraint 1arbsctduevfghwixjykzlAmBnCoDpEqF
270 schema = cs.no_info_after_validator_function( 1arbsctduevfghwixjykzlAmBnCoDpEqF
271 partial(NUMERIC_VALIDATOR_LOOKUP[constraint], **{constraint: value}), schema
272 )
273 metadata = schema.get('metadata', {}) 1arbsctduevfghwixjykzlAmBnCoDpEqF
274 if (existing_json_schema_updates := metadata.get('pydantic_js_updates')) is not None: 274 ↛ 275line 274 didn't jump to line 275 because the condition on line 274 was never true1arbsctduevfghwixjykzlAmBnCoDpEqF
275 metadata['pydantic_js_updates'] = {
276 **existing_json_schema_updates,
277 **{js_constraint_key: as_jsonable_value(value)},
278 }
279 else:
280 metadata['pydantic_js_updates'] = {js_constraint_key: as_jsonable_value(value)} 1arbsctduevfghwixjykzlAmBnCoDpEqF
281 schema['metadata'] = metadata 1arbsctduevfghwixjykzlAmBnCoDpEqF
282 elif constraint == 'allow_inf_nan' and value is False: 282 ↛ 290line 282 didn't jump to line 290 because the condition on line 282 was always true1arbsctduevfghwixjykzlAmBnCoDpEqF
283 schema = cs.no_info_after_validator_function( 1arbsctduevfghwixjykzlAmBnCoDpEqF
284 forbid_inf_nan_check,
285 schema,
286 )
287 else:
288 # It's rare that we'd get here, but it's possible if we add a new constraint and forget to handle it
289 # Most constraint errors are caught at runtime during attempted application
290 raise RuntimeError(f"Unable to apply constraint '{constraint}' to schema of type '{schema_type}'")
292 for annotation in other_metadata: 1arbsctduevfghwixjykzlAmBnCoDpEqF
293 if (annotation_type := type(annotation)) in (at_to_constraint_map := _get_at_to_constraint_map()): 293 ↛ 294line 293 didn't jump to line 294 because the condition on line 293 was never true1arbsctduevfghwixjykzlAmBnCoDpEqF
294 constraint = at_to_constraint_map[annotation_type]
295 validator = NUMERIC_VALIDATOR_LOOKUP.get(constraint)
296 if validator is None:
297 raise ValueError(f'Unknown constraint {constraint}')
298 schema = cs.no_info_after_validator_function(
299 partial(validator, {constraint: getattr(annotation, constraint)}), schema
300 )
301 continue
302 elif isinstance(annotation, (at.Predicate, at.Not)): 1arbsctduevfghwixjykzlAmBnCoDpEqF
303 predicate_name = f'{annotation.func.__qualname__}' if hasattr(annotation.func, '__qualname__') else '' 1arbsctduevfghwixjykzlAmBnCoDpEqF
305 def val_func(v: Any) -> Any: 1arbsctduevfghwixjykzlAmBnCoDpEqF
306 predicate_satisfied = annotation.func(v) # noqa: B023 1arbsctduevfghwixjykzlAmBnCoDpEqF
308 # annotation.func may also raise an exception, let it pass through
309 if isinstance(annotation, at.Predicate): # noqa: B023 1arbsctduevfghwixjykzlAmBnCoDpEqF
310 if not predicate_satisfied: 1arbsctduevfghwixjykzlAmBnCoDpEqF
311 raise PydanticCustomError( 1arbsctduevfghwixjykzlAmBnCoDpEqF
312 'predicate_failed',
313 f'Predicate {predicate_name} failed', # type: ignore # noqa: B023
314 )
315 else:
316 if predicate_satisfied: 1arbsctduevfghwixjykzlAmBnCoDpEqF
317 raise PydanticCustomError( 1arbsctduevfghwixjykzlAmBnCoDpEqF
318 'not_operation_failed',
319 f'Not of {predicate_name} failed', # type: ignore # noqa: B023
320 )
322 return v 1arbsctduevfghwixjykzlAmBnCoDpEqF
324 schema = cs.no_info_after_validator_function(val_func, schema) 1arbsctduevfghwixjykzlAmBnCoDpEqF
325 else:
326 # ignore any other unknown metadata
327 return None 1arbsctduevfghwixjykzlAmBnCoDpEqF
329 if chain_schema_steps: 1arbsctduevfghwixjykzlAmBnCoDpEqF
330 chain_schema_steps = [schema] + chain_schema_steps 1arbsctduevfghwixjykzlAmBnCoDpEqF
331 return cs.chain_schema(chain_schema_steps) 1arbsctduevfghwixjykzlAmBnCoDpEqF
333 return schema 1arbsctduevfghwixjykzlAmBnCoDpEqF
336def collect_known_metadata(annotations: Iterable[Any]) -> tuple[dict[str, Any], list[Any]]: 1arbsctduevfghwixjykzlAGHIJKLMmBnCoDpEqF
337 """Split `annotations` into known metadata and unknown annotations.
339 Args:
340 annotations: An iterable of annotations.
342 Returns:
343 A tuple contains a dict of known metadata and a list of unknown annotations.
345 Example:
346 ```python
347 from annotated_types import Gt, Len
349 from pydantic._internal._known_annotated_metadata import collect_known_metadata
351 print(collect_known_metadata([Gt(1), Len(42), ...]))
352 #> ({'gt': 1, 'min_length': 42}, [Ellipsis])
353 ```
354 """
355 annotations = expand_grouped_metadata(annotations) 1arbsctduevfghwixjykzlAmBnCoDpEqF
357 res: dict[str, Any] = {} 1arbsctduevfghwixjykzlAmBnCoDpEqF
358 remaining: list[Any] = [] 1arbsctduevfghwixjykzlAmBnCoDpEqF
360 for annotation in annotations: 1arbsctduevfghwixjykzlAmBnCoDpEqF
361 # isinstance(annotation, PydanticMetadata) also covers ._fields:_PydanticGeneralMetadata
362 if isinstance(annotation, PydanticMetadata): 1arbsctduevfghwixjykzlAmBnCoDpEqF
363 res.update(annotation.__dict__) 1arbsctduevfghwixjykzlAmBnCoDpEqF
364 # we don't use dataclasses.asdict because that recursively calls asdict on the field values
365 elif (annotation_type := type(annotation)) in (at_to_constraint_map := _get_at_to_constraint_map()): 1arbsctduevfghwixjykzlAmBnCoDpEqF
366 constraint = at_to_constraint_map[annotation_type] 1arbsctduevfghwixjykzlAmBnCoDpEqF
367 res[constraint] = getattr(annotation, constraint) 1arbsctduevfghwixjykzlAmBnCoDpEqF
368 elif isinstance(annotation, type) and issubclass(annotation, PydanticMetadata): 1arbsctduevfghwixjykzlAmBnCoDpEqF
369 # also support PydanticMetadata classes being used without initialisation,
370 # e.g. `Annotated[int, Strict]` as well as `Annotated[int, Strict()]`
371 res.update({k: v for k, v in vars(annotation).items() if not k.startswith('_')}) 1arbsctduevfghwixjykzlAmBnCoDpEqF
372 else:
373 remaining.append(annotation) 1arbsctduevfghwixjykzlAmBnCoDpEqF
374 # Nones can sneak in but pydantic-core will reject them
375 # it'd be nice to clean things up so we don't put in None (we probably don't _need_ to, it was just easier)
376 # but this is simple enough to kick that can down the road
377 res = {k: v for k, v in res.items() if v is not None} 1arbsctduevfghwixjykzlAmBnCoDpEqF
378 return res, remaining 1arbsctduevfghwixjykzlAmBnCoDpEqF
381def check_metadata(metadata: dict[str, Any], allowed: Iterable[str], source_type: Any) -> None: 1arbsctduevfghwixjykzlAGHIJKLMmBnCoDpEqF
382 """A small utility function to validate that the given metadata can be applied to the target.
383 More than saving lines of code, this gives us a consistent error message for all of our internal implementations.
385 Args:
386 metadata: A dict of metadata.
387 allowed: An iterable of allowed metadata.
388 source_type: The source type.
390 Raises:
391 TypeError: If there is metadatas that can't be applied on source type.
392 """
393 unknown = metadata.keys() - set(allowed)
394 if unknown:
395 raise TypeError(
396 f'The following constraints cannot be applied to {source_type!r}: {", ".join([f"{k!r}" for k in unknown])}'
397 )