Coverage for pydantic/schema.py: 100.00%
472 statements
« prev ^ index » next coverage.py v7.6.1, created at 2024-08-15 13:26 +0000
« prev ^ index » next coverage.py v7.6.1, created at 2024-08-15 13:26 +0000
1import re 1JKabcdefghyzLMijklmnopABEFGHPQRSTUINOqrstuvwxCD
2import warnings 1JKabcdefghyzLMijklmnopABEFGHPQRSTUINOqrstuvwxCD
3from collections import defaultdict 1JKabcdefghyzLMijklmnopABEFGHPQRSTUINOqrstuvwxCD
4from dataclasses import is_dataclass 1JKabcdefghyzLMijklmnopABEFGHPQRSTUINOqrstuvwxCD
5from datetime import date, datetime, time, timedelta 1JKabcdefghyzLMijklmnopABEFGHPQRSTUINOqrstuvwxCD
6from decimal import Decimal 1JKabcdefghyzLMijklmnopABEFGHPQRSTUINOqrstuvwxCD
7from enum import Enum 1JKabcdefghyzLMijklmnopABEFGHPQRSTUINOqrstuvwxCD
8from ipaddress import IPv4Address, IPv4Interface, IPv4Network, IPv6Address, IPv6Interface, IPv6Network 1JKabcdefghyzLMijklmnopABEFGHPQRSTUINOqrstuvwxCD
9from pathlib import Path 1JKabcdefghyzLMijklmnopABEFGHPQRSTUINOqrstuvwxCD
10from typing import ( 1JKabcdefghyzLMijklmnopABEFGHPQRSTUINOqrstuvwxCD
11 TYPE_CHECKING,
12 Any,
13 Callable,
14 Dict,
15 ForwardRef,
16 FrozenSet,
17 Generic,
18 Iterable,
19 List,
20 Optional,
21 Pattern,
22 Sequence,
23 Set,
24 Tuple,
25 Type,
26 TypeVar,
27 Union,
28 cast,
29)
30from uuid import UUID 1JKabcdefghyzLMijklmnopABEFGHPQRSTUINOqrstuvwxCD
32from typing_extensions import Annotated, Literal 1JKabcdefghyzLMijklmnopABEFGHPQRSTUINOqrstuvwxCD
34from pydantic.fields import ( 1JKabcdefghyzLMijklmnopABEFGHPQRSTUINOqrstuvwxCD
35 MAPPING_LIKE_SHAPES,
36 SHAPE_DEQUE,
37 SHAPE_FROZENSET,
38 SHAPE_GENERIC,
39 SHAPE_ITERABLE,
40 SHAPE_LIST,
41 SHAPE_SEQUENCE,
42 SHAPE_SET,
43 SHAPE_SINGLETON,
44 SHAPE_TUPLE,
45 SHAPE_TUPLE_ELLIPSIS,
46 FieldInfo,
47 ModelField,
48)
49from pydantic.json import pydantic_encoder 1JKabcdefghyzLMijklmnopABEFGHPQRSTUINOqrstuvwxCD
50from pydantic.networks import AnyUrl, EmailStr 1JKabcdefghyzLMijklmnopABEFGHPQRSTUINOqrstuvwxCD
51from pydantic.types import ( 1JKabcdefghyzLMijklmnopABEFGHPQRSTUINOqrstuvwxCD
52 ConstrainedDecimal,
53 ConstrainedFloat,
54 ConstrainedFrozenSet,
55 ConstrainedInt,
56 ConstrainedList,
57 ConstrainedSet,
58 ConstrainedStr,
59 SecretBytes,
60 SecretStr,
61 StrictBytes,
62 StrictStr,
63 conbytes,
64 condecimal,
65 confloat,
66 confrozenset,
67 conint,
68 conlist,
69 conset,
70 constr,
71)
72from pydantic.typing import ( 1JKabcdefghyzLMijklmnopABEFGHPQRSTUINOqrstuvwxCD
73 all_literal_values,
74 get_args,
75 get_origin,
76 get_sub_types,
77 is_callable_type,
78 is_literal_type,
79 is_namedtuple,
80 is_none_type,
81 is_union,
82)
83from pydantic.utils import ROOT_KEY, get_model, lenient_issubclass 1JKabcdefghyzLMijklmnopABEFGHPQRSTUINOqrstuvwxCD
85if TYPE_CHECKING: 1JKabcdefghyzLMijklmnopABEFGHPQRSTUINOqrstuvwxCD
86 from pydantic.dataclasses import Dataclass
87 from pydantic.main import BaseModel
89default_prefix = '#/definitions/' 1JKabcdefghyzLMijklmnopABEFGHPQRSTUINOqrstuvwxCD
90default_ref_template = '#/definitions/{model}' 1JKabcdefghyzLMijklmnopABEFGHPQRSTUINOqrstuvwxCD
92TypeModelOrEnum = Union[Type['BaseModel'], Type[Enum]] 1JKabcdefghyzLMijklmnopABEFGHPQRSTUINOqrstuvwxCD
93TypeModelSet = Set[TypeModelOrEnum] 1JKabcdefghyzLMijklmnopABEFGHPQRSTUINOqrstuvwxCD
96def _apply_modify_schema( 1abcdefghyzijklmnopABEFGHPQRSTUIqrstuvwxCD
97 modify_schema: Callable[..., None], field: Optional[ModelField], field_schema: Dict[str, Any]
98) -> None:
99 from inspect import signature 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
101 sig = signature(modify_schema) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
102 args = set(sig.parameters.keys()) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
103 if 'field' in args or 'kwargs' in args: 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
104 modify_schema(field_schema, field=field) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
105 else:
106 modify_schema(field_schema) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
109def schema( 1abcdefghyzijklmnopABEFGHPQRSTUIqrstuvwxCD
110 models: Sequence[Union[Type['BaseModel'], Type['Dataclass']]],
111 *,
112 by_alias: bool = True,
113 title: Optional[str] = None,
114 description: Optional[str] = None,
115 ref_prefix: Optional[str] = None,
116 ref_template: str = default_ref_template,
117) -> Dict[str, Any]:
118 """
119 Process a list of models and generate a single JSON Schema with all of them defined in the ``definitions``
120 top-level JSON key, including their sub-models.
122 :param models: a list of models to include in the generated JSON Schema
123 :param by_alias: generate the schemas using the aliases defined, if any
124 :param title: title for the generated schema that includes the definitions
125 :param description: description for the generated schema
126 :param ref_prefix: the JSON Pointer prefix for schema references with ``$ref``, if None, will be set to the
127 default of ``#/definitions/``. Update it if you want the schemas to reference the definitions somewhere
128 else, e.g. for OpenAPI use ``#/components/schemas/``. The resulting generated schemas will still be at the
129 top-level key ``definitions``, so you can extract them from there. But all the references will have the set
130 prefix.
131 :param ref_template: Use a ``string.format()`` template for ``$ref`` instead of a prefix. This can be useful
132 for references that cannot be represented by ``ref_prefix`` such as a definition stored in another file. For
133 a sibling json file in a ``/schemas`` directory use ``"/schemas/${model}.json#"``.
134 :return: dict with the JSON Schema with a ``definitions`` top-level key including the schema definitions for
135 the models and sub-models passed in ``models``.
136 """
137 clean_models = [get_model(model) for model in models] 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
138 flat_models = get_flat_models_from_models(clean_models) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
139 model_name_map = get_model_name_map(flat_models) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
140 definitions = {} 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
141 output_schema: Dict[str, Any] = {} 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
142 if title: 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
143 output_schema['title'] = title 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
144 if description: 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
145 output_schema['description'] = description 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
146 for model in clean_models: 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
147 m_schema, m_definitions, m_nested_models = model_process_schema( 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
148 model,
149 by_alias=by_alias,
150 model_name_map=model_name_map,
151 ref_prefix=ref_prefix,
152 ref_template=ref_template,
153 )
154 definitions.update(m_definitions) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
155 model_name = model_name_map[model] 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
156 definitions[model_name] = m_schema 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
157 if definitions: 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
158 output_schema['definitions'] = definitions 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
159 return output_schema 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
162def model_schema( 1abcdefghyzijklmnopABEFGHPQRSTUIqrstuvwxCD
163 model: Union[Type['BaseModel'], Type['Dataclass']],
164 by_alias: bool = True,
165 ref_prefix: Optional[str] = None,
166 ref_template: str = default_ref_template,
167) -> Dict[str, Any]:
168 """
169 Generate a JSON Schema for one model. With all the sub-models defined in the ``definitions`` top-level
170 JSON key.
172 :param model: a Pydantic model (a class that inherits from BaseModel)
173 :param by_alias: generate the schemas using the aliases defined, if any
174 :param ref_prefix: the JSON Pointer prefix for schema references with ``$ref``, if None, will be set to the
175 default of ``#/definitions/``. Update it if you want the schemas to reference the definitions somewhere
176 else, e.g. for OpenAPI use ``#/components/schemas/``. The resulting generated schemas will still be at the
177 top-level key ``definitions``, so you can extract them from there. But all the references will have the set
178 prefix.
179 :param ref_template: Use a ``string.format()`` template for ``$ref`` instead of a prefix. This can be useful for
180 references that cannot be represented by ``ref_prefix`` such as a definition stored in another file. For a
181 sibling json file in a ``/schemas`` directory use ``"/schemas/${model}.json#"``.
182 :return: dict with the JSON Schema for the passed ``model``
183 """
184 model = get_model(model) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
185 flat_models = get_flat_models_from_model(model) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
186 model_name_map = get_model_name_map(flat_models) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
187 model_name = model_name_map[model] 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
188 m_schema, m_definitions, nested_models = model_process_schema( 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
189 model, by_alias=by_alias, model_name_map=model_name_map, ref_prefix=ref_prefix, ref_template=ref_template
190 )
191 if model_name in nested_models: 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
192 # model_name is in Nested models, it has circular references
193 m_definitions[model_name] = m_schema 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
194 m_schema = get_schema_ref(model_name, ref_prefix, ref_template, False) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
195 if m_definitions: 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
196 m_schema.update({'definitions': m_definitions}) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
197 return m_schema 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
200def get_field_info_schema(field: ModelField, schema_overrides: bool = False) -> Tuple[Dict[str, Any], bool]: 1JKabcdefghyzLMijklmnopABEFGHPQRSTUINOqrstuvwxCD
201 # If no title is explicitly set, we don't set title in the schema for enums.
202 # The behaviour is the same as `BaseModel` reference, where the default title
203 # is in the definitions part of the schema.
204 schema_: Dict[str, Any] = {} 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
205 if field.field_info.title or not lenient_issubclass(field.type_, Enum): 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
206 schema_['title'] = field.field_info.title or field.alias.title().replace('_', ' ') 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
208 if field.field_info.title: 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
209 schema_overrides = True 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
211 if field.field_info.description: 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
212 schema_['description'] = field.field_info.description 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
213 schema_overrides = True 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
215 if not field.required and field.default is not None and not is_callable_type(field.outer_type_): 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
216 schema_['default'] = encode_default(field.default) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
217 schema_overrides = True 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
219 return schema_, schema_overrides 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
222def field_schema( 1abcdefghyzijklmnopABEFGHPQRSTUIqrstuvwxCD
223 field: ModelField,
224 *,
225 by_alias: bool = True,
226 model_name_map: Dict[TypeModelOrEnum, str],
227 ref_prefix: Optional[str] = None,
228 ref_template: str = default_ref_template,
229 known_models: Optional[TypeModelSet] = None,
230) -> Tuple[Dict[str, Any], Dict[str, Any], Set[str]]:
231 """
232 Process a Pydantic field and return a tuple with a JSON Schema for it as the first item.
233 Also return a dictionary of definitions with models as keys and their schemas as values. If the passed field
234 is a model and has sub-models, and those sub-models don't have overrides (as ``title``, ``default``, etc), they
235 will be included in the definitions and referenced in the schema instead of included recursively.
237 :param field: a Pydantic ``ModelField``
238 :param by_alias: use the defined alias (if any) in the returned schema
239 :param model_name_map: used to generate the JSON Schema references to other models included in the definitions
240 :param ref_prefix: the JSON Pointer prefix to use for references to other schemas, if None, the default of
241 #/definitions/ will be used
242 :param ref_template: Use a ``string.format()`` template for ``$ref`` instead of a prefix. This can be useful for
243 references that cannot be represented by ``ref_prefix`` such as a definition stored in another file. For a
244 sibling json file in a ``/schemas`` directory use ``"/schemas/${model}.json#"``.
245 :param known_models: used to solve circular references
246 :return: tuple of the schema for this field and additional definitions
247 """
248 s, schema_overrides = get_field_info_schema(field) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
250 validation_schema = get_field_schema_validations(field) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
251 if validation_schema: 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
252 s.update(validation_schema) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
253 schema_overrides = True 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
255 f_schema, f_definitions, f_nested_models = field_type_schema( 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
256 field,
257 by_alias=by_alias,
258 model_name_map=model_name_map,
259 schema_overrides=schema_overrides,
260 ref_prefix=ref_prefix,
261 ref_template=ref_template,
262 known_models=known_models or set(),
263 )
265 # $ref will only be returned when there are no schema_overrides
266 if '$ref' in f_schema: 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
267 return f_schema, f_definitions, f_nested_models 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
268 else:
269 s.update(f_schema) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
270 return s, f_definitions, f_nested_models 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
273numeric_types = (int, float, Decimal) 1JKabcdefghyzLMijklmnopABEFGHPQRSTUINOqrstuvwxCD
274_str_types_attrs: Tuple[Tuple[str, Union[type, Tuple[type, ...]], str], ...] = ( 1abcdefghyzijklmnopABPQRSTUqrstuvwxCD
275 ('max_length', numeric_types, 'maxLength'),
276 ('min_length', numeric_types, 'minLength'),
277 ('regex', str, 'pattern'),
278)
280_numeric_types_attrs: Tuple[Tuple[str, Union[type, Tuple[type, ...]], str], ...] = ( 1abcdefghyzijklmnopABPQRSTUqrstuvwxCD
281 ('gt', numeric_types, 'exclusiveMinimum'),
282 ('lt', numeric_types, 'exclusiveMaximum'),
283 ('ge', numeric_types, 'minimum'),
284 ('le', numeric_types, 'maximum'),
285 ('multiple_of', numeric_types, 'multipleOf'),
286)
289def get_field_schema_validations(field: ModelField) -> Dict[str, Any]: 1JKabcdefghyzLMijklmnopABEFGHPQRSTUINOqrstuvwxCD
290 """
291 Get the JSON Schema validation keywords for a ``field`` with an annotation of
292 a Pydantic ``FieldInfo`` with validation arguments.
293 """
294 f_schema: Dict[str, Any] = {} 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
296 if lenient_issubclass(field.type_, Enum): 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
297 # schema is already updated by `enum_process_schema`; just update with field extra
298 if field.field_info.extra: 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
299 f_schema.update(field.field_info.extra) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
300 return f_schema 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
302 if lenient_issubclass(field.type_, (str, bytes)): 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
303 for attr_name, t, keyword in _str_types_attrs: 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
304 attr = getattr(field.field_info, attr_name, None) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
305 if isinstance(attr, t): 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
306 f_schema[keyword] = attr 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
307 if lenient_issubclass(field.type_, numeric_types) and not issubclass(field.type_, bool): 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
308 for attr_name, t, keyword in _numeric_types_attrs: 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
309 attr = getattr(field.field_info, attr_name, None) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
310 if isinstance(attr, t): 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
311 f_schema[keyword] = attr 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
312 if field.field_info is not None and field.field_info.const: 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
313 f_schema['const'] = field.default 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
314 if field.field_info.extra: 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
315 f_schema.update(field.field_info.extra) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
316 modify_schema = getattr(field.outer_type_, '__modify_schema__', None) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
317 if modify_schema: 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
318 _apply_modify_schema(modify_schema, field, f_schema) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
319 return f_schema 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
322def get_model_name_map(unique_models: TypeModelSet) -> Dict[TypeModelOrEnum, str]: 1JKabcdefghyzLMijklmnopABEFGHPQRSTUINOqrstuvwxCD
323 """
324 Process a set of models and generate unique names for them to be used as keys in the JSON Schema
325 definitions. By default the names are the same as the class name. But if two models in different Python
326 modules have the same name (e.g. "users.Model" and "items.Model"), the generated names will be
327 based on the Python module path for those conflicting models to prevent name collisions.
329 :param unique_models: a Python set of models
330 :return: dict mapping models to names
331 """
332 name_model_map = {} 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
333 conflicting_names: Set[str] = set() 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
334 for model in unique_models: 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
335 model_name = normalize_name(model.__name__) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
336 if model_name in conflicting_names: 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
337 model_name = get_long_model_name(model) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
338 name_model_map[model_name] = model 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
339 elif model_name in name_model_map: 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
340 conflicting_names.add(model_name) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
341 conflicting_model = name_model_map.pop(model_name) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
342 name_model_map[get_long_model_name(conflicting_model)] = conflicting_model 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
343 name_model_map[get_long_model_name(model)] = model 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
344 else:
345 name_model_map[model_name] = model 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
346 return {v: k for k, v in name_model_map.items()} 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
349def get_flat_models_from_model(model: Type['BaseModel'], known_models: Optional[TypeModelSet] = None) -> TypeModelSet: 1JKabcdefghyzLMijklmnopABEFGHPQRSTUINOqrstuvwxCD
350 """
351 Take a single ``model`` and generate a set with itself and all the sub-models in the tree. I.e. if you pass
352 model ``Foo`` (subclass of Pydantic ``BaseModel``) as ``model``, and it has a field of type ``Bar`` (also
353 subclass of ``BaseModel``) and that model ``Bar`` has a field of type ``Baz`` (also subclass of ``BaseModel``),
354 the return value will be ``set([Foo, Bar, Baz])``.
356 :param model: a Pydantic ``BaseModel`` subclass
357 :param known_models: used to solve circular references
358 :return: a set with the initial model and all its sub-models
359 """
360 known_models = known_models or set() 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
361 flat_models: TypeModelSet = set() 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
362 flat_models.add(model) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
363 known_models |= flat_models 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
364 fields = cast(Sequence[ModelField], model.__fields__.values()) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
365 flat_models |= get_flat_models_from_fields(fields, known_models=known_models) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
366 return flat_models 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
369def get_flat_models_from_field(field: ModelField, known_models: TypeModelSet) -> TypeModelSet: 1JKabcdefghyzLMijklmnopABEFGHPQRSTUINOqrstuvwxCD
370 """
371 Take a single Pydantic ``ModelField`` (from a model) that could have been declared as a subclass of BaseModel
372 (so, it could be a submodel), and generate a set with its model and all the sub-models in the tree.
373 I.e. if you pass a field that was declared to be of type ``Foo`` (subclass of BaseModel) as ``field``, and that
374 model ``Foo`` has a field of type ``Bar`` (also subclass of ``BaseModel``) and that model ``Bar`` has a field of
375 type ``Baz`` (also subclass of ``BaseModel``), the return value will be ``set([Foo, Bar, Baz])``.
377 :param field: a Pydantic ``ModelField``
378 :param known_models: used to solve circular references
379 :return: a set with the model used in the declaration for this field, if any, and all its sub-models
380 """
381 from pydantic.main import BaseModel 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
383 flat_models: TypeModelSet = set() 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
385 field_type = field.type_ 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
386 if lenient_issubclass(getattr(field_type, '__pydantic_model__', None), BaseModel): 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
387 field_type = field_type.__pydantic_model__ 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
389 if field.sub_fields and not lenient_issubclass(field_type, BaseModel): 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
390 flat_models |= get_flat_models_from_fields(field.sub_fields, known_models=known_models) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
391 elif lenient_issubclass(field_type, BaseModel) and field_type not in known_models: 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
392 flat_models |= get_flat_models_from_model(field_type, known_models=known_models) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
393 elif lenient_issubclass(field_type, Enum): 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
394 flat_models.add(field_type) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
395 return flat_models 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
398def get_flat_models_from_fields(fields: Sequence[ModelField], known_models: TypeModelSet) -> TypeModelSet: 1JKabcdefghyzLMijklmnopABEFGHPQRSTUINOqrstuvwxCD
399 """
400 Take a list of Pydantic ``ModelField``s (from a model) that could have been declared as subclasses of ``BaseModel``
401 (so, any of them could be a submodel), and generate a set with their models and all the sub-models in the tree.
402 I.e. if you pass a the fields of a model ``Foo`` (subclass of ``BaseModel``) as ``fields``, and on of them has a
403 field of type ``Bar`` (also subclass of ``BaseModel``) and that model ``Bar`` has a field of type ``Baz`` (also
404 subclass of ``BaseModel``), the return value will be ``set([Foo, Bar, Baz])``.
406 :param fields: a list of Pydantic ``ModelField``s
407 :param known_models: used to solve circular references
408 :return: a set with any model declared in the fields, and all their sub-models
409 """
410 flat_models: TypeModelSet = set() 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
411 for field in fields: 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
412 flat_models |= get_flat_models_from_field(field, known_models=known_models) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
413 return flat_models 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
416def get_flat_models_from_models(models: Sequence[Type['BaseModel']]) -> TypeModelSet: 1JKabcdefghyzLMijklmnopABEFGHPQRSTUINOqrstuvwxCD
417 """
418 Take a list of ``models`` and generate a set with them and all their sub-models in their trees. I.e. if you pass
419 a list of two models, ``Foo`` and ``Bar``, both subclasses of Pydantic ``BaseModel`` as models, and ``Bar`` has
420 a field of type ``Baz`` (also subclass of ``BaseModel``), the return value will be ``set([Foo, Bar, Baz])``.
421 """
422 flat_models: TypeModelSet = set() 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
423 for model in models: 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
424 flat_models |= get_flat_models_from_model(model) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
425 return flat_models 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
428def get_long_model_name(model: TypeModelOrEnum) -> str: 1JKabcdefghyzLMijklmnopABEFGHPQRSTUINOqrstuvwxCD
429 return f'{model.__module__}__{model.__qualname__}'.replace('.', '__') 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
432def field_type_schema( 1abcdefghyzijklmnopABEFGHPQRSTUIqrstuvwxCD
433 field: ModelField,
434 *,
435 by_alias: bool,
436 model_name_map: Dict[TypeModelOrEnum, str],
437 ref_template: str,
438 schema_overrides: bool = False,
439 ref_prefix: Optional[str] = None,
440 known_models: TypeModelSet,
441) -> Tuple[Dict[str, Any], Dict[str, Any], Set[str]]:
442 """
443 Used by ``field_schema()``, you probably should be using that function.
445 Take a single ``field`` and generate the schema for its type only, not including additional
446 information as title, etc. Also return additional schema definitions, from sub-models.
447 """
448 from pydantic.main import BaseModel # noqa: F811 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
450 definitions = {} 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
451 nested_models: Set[str] = set() 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
452 f_schema: Dict[str, Any]
453 if field.shape in { 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
454 SHAPE_LIST,
455 SHAPE_TUPLE_ELLIPSIS,
456 SHAPE_SEQUENCE,
457 SHAPE_SET,
458 SHAPE_FROZENSET,
459 SHAPE_ITERABLE,
460 SHAPE_DEQUE,
461 }:
462 items_schema, f_definitions, f_nested_models = field_singleton_schema( 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
463 field,
464 by_alias=by_alias,
465 model_name_map=model_name_map,
466 ref_prefix=ref_prefix,
467 ref_template=ref_template,
468 known_models=known_models,
469 )
470 definitions.update(f_definitions) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
471 nested_models.update(f_nested_models) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
472 f_schema = {'type': 'array', 'items': items_schema} 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
473 if field.shape in {SHAPE_SET, SHAPE_FROZENSET}: 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
474 f_schema['uniqueItems'] = True 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
476 elif field.shape in MAPPING_LIKE_SHAPES: 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
477 f_schema = {'type': 'object'} 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
478 key_field = cast(ModelField, field.key_field) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
479 regex = getattr(key_field.type_, 'regex', None) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
480 items_schema, f_definitions, f_nested_models = field_singleton_schema( 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
481 field,
482 by_alias=by_alias,
483 model_name_map=model_name_map,
484 ref_prefix=ref_prefix,
485 ref_template=ref_template,
486 known_models=known_models,
487 )
488 definitions.update(f_definitions) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
489 nested_models.update(f_nested_models) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
490 if regex: 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
491 # Dict keys have a regex pattern
492 # items_schema might be a schema or empty dict, add it either way
493 f_schema['patternProperties'] = {ConstrainedStr._get_pattern(regex): items_schema} 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
494 if items_schema: 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
495 # The dict values are not simply Any, so they need a schema
496 f_schema['additionalProperties'] = items_schema 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
497 elif field.shape == SHAPE_TUPLE or (field.shape == SHAPE_GENERIC and not issubclass(field.type_, BaseModel)): 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
498 sub_schema = [] 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
499 sub_fields = cast(List[ModelField], field.sub_fields) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
500 for sf in sub_fields: 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
501 sf_schema, sf_definitions, sf_nested_models = field_type_schema( 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
502 sf,
503 by_alias=by_alias,
504 model_name_map=model_name_map,
505 ref_prefix=ref_prefix,
506 ref_template=ref_template,
507 known_models=known_models,
508 )
509 definitions.update(sf_definitions) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
510 nested_models.update(sf_nested_models) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
511 sub_schema.append(sf_schema) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
513 sub_fields_len = len(sub_fields) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
514 if field.shape == SHAPE_GENERIC: 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
515 all_of_schemas = sub_schema[0] if sub_fields_len == 1 else {'type': 'array', 'items': sub_schema} 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
516 f_schema = {'allOf': [all_of_schemas]} 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
517 else:
518 f_schema = { 1abcdefghyzijklmnopABEFGHIqrstuvwxCD
519 'type': 'array',
520 'minItems': sub_fields_len,
521 'maxItems': sub_fields_len,
522 }
523 if sub_fields_len >= 1: 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
524 f_schema['items'] = sub_schema 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
525 else:
526 assert field.shape in {SHAPE_SINGLETON, SHAPE_GENERIC}, field.shape 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
527 f_schema, f_definitions, f_nested_models = field_singleton_schema( 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
528 field,
529 by_alias=by_alias,
530 model_name_map=model_name_map,
531 schema_overrides=schema_overrides,
532 ref_prefix=ref_prefix,
533 ref_template=ref_template,
534 known_models=known_models,
535 )
536 definitions.update(f_definitions) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
537 nested_models.update(f_nested_models) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
539 # check field type to avoid repeated calls to the same __modify_schema__ method
540 if field.type_ != field.outer_type_: 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
541 if field.shape == SHAPE_GENERIC: 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
542 field_type = field.type_ 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
543 else:
544 field_type = field.outer_type_ 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
545 modify_schema = getattr(field_type, '__modify_schema__', None) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
546 if modify_schema: 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
547 _apply_modify_schema(modify_schema, field, f_schema) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
548 return f_schema, definitions, nested_models 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
551def model_process_schema( 1abcdefghyzijklmnopABEFGHPQRSTUIqrstuvwxCD
552 model: TypeModelOrEnum,
553 *,
554 by_alias: bool = True,
555 model_name_map: Dict[TypeModelOrEnum, str],
556 ref_prefix: Optional[str] = None,
557 ref_template: str = default_ref_template,
558 known_models: Optional[TypeModelSet] = None,
559 field: Optional[ModelField] = None,
560) -> Tuple[Dict[str, Any], Dict[str, Any], Set[str]]:
561 """
562 Used by ``model_schema()``, you probably should be using that function.
564 Take a single ``model`` and generate its schema. Also return additional schema definitions, from sub-models. The
565 sub-models of the returned schema will be referenced, but their definitions will not be included in the schema. All
566 the definitions are returned as the second value.
567 """
568 from inspect import getdoc, signature 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
570 known_models = known_models or set() 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
571 if lenient_issubclass(model, Enum): 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
572 model = cast(Type[Enum], model) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
573 s = enum_process_schema(model, field=field) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
574 return s, {}, set() 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
575 model = cast(Type['BaseModel'], model) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
576 s = {'title': model.__config__.title or model.__name__} 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
577 doc = getdoc(model) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
578 if doc: 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
579 s['description'] = doc 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
580 known_models.add(model) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
581 m_schema, m_definitions, nested_models = model_type_schema( 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
582 model,
583 by_alias=by_alias,
584 model_name_map=model_name_map,
585 ref_prefix=ref_prefix,
586 ref_template=ref_template,
587 known_models=known_models,
588 )
589 s.update(m_schema) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
590 schema_extra = model.__config__.schema_extra 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
591 if callable(schema_extra): 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
592 if len(signature(schema_extra).parameters) == 1: 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
593 schema_extra(s) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
594 else:
595 schema_extra(s, model) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
596 else:
597 s.update(schema_extra) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
598 return s, m_definitions, nested_models 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
601def model_type_schema( 1abcdefghyzijklmnopABEFGHPQRSTUIqrstuvwxCD
602 model: Type['BaseModel'],
603 *,
604 by_alias: bool,
605 model_name_map: Dict[TypeModelOrEnum, str],
606 ref_template: str,
607 ref_prefix: Optional[str] = None,
608 known_models: TypeModelSet,
609) -> Tuple[Dict[str, Any], Dict[str, Any], Set[str]]:
610 """
611 You probably should be using ``model_schema()``, this function is indirectly used by that function.
613 Take a single ``model`` and generate the schema for its type only, not including additional
614 information as title, etc. Also return additional schema definitions, from sub-models.
615 """
616 properties = {} 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
617 required = [] 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
618 definitions: Dict[str, Any] = {} 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
619 nested_models: Set[str] = set() 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
620 for k, f in model.__fields__.items(): 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
621 try: 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
622 f_schema, f_definitions, f_nested_models = field_schema( 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
623 f,
624 by_alias=by_alias,
625 model_name_map=model_name_map,
626 ref_prefix=ref_prefix,
627 ref_template=ref_template,
628 known_models=known_models,
629 )
630 except SkipField as skip: 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
631 warnings.warn(skip.message, UserWarning) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
632 continue 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
633 definitions.update(f_definitions) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
634 nested_models.update(f_nested_models) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
635 if by_alias: 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
636 properties[f.alias] = f_schema 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
637 if f.required: 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
638 required.append(f.alias) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
639 else:
640 properties[k] = f_schema 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
641 if f.required: 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
642 required.append(k) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
643 if ROOT_KEY in properties: 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
644 out_schema = properties[ROOT_KEY] 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
645 out_schema['title'] = model.__config__.title or model.__name__ 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
646 else:
647 out_schema = {'type': 'object', 'properties': properties} 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
648 if required: 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
649 out_schema['required'] = required 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
650 if model.__config__.extra == 'forbid': 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
651 out_schema['additionalProperties'] = False 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
652 return out_schema, definitions, nested_models 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
655def enum_process_schema(enum: Type[Enum], *, field: Optional[ModelField] = None) -> Dict[str, Any]: 1JKabcdefghyzLMijklmnopABEFGHPQRSTUINOqrstuvwxCD
656 """
657 Take a single `enum` and generate its schema.
659 This is similar to the `model_process_schema` function, but applies to ``Enum`` objects.
660 """
661 import inspect 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
663 schema_: Dict[str, Any] = { 1abcdefghyzijklmnopABqrstuvwxCD
664 'title': enum.__name__,
665 # Python assigns all enums a default docstring value of 'An enumeration', so
666 # all enums will have a description field even if not explicitly provided.
667 'description': inspect.cleandoc(enum.__doc__ or 'An enumeration.'),
668 # Add enum values and the enum field type to the schema.
669 'enum': [item.value for item in cast(Iterable[Enum], enum)],
670 }
672 add_field_type_to_schema(enum, schema_) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
674 modify_schema = getattr(enum, '__modify_schema__', None) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
675 if modify_schema: 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
676 _apply_modify_schema(modify_schema, field, schema_) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
678 return schema_ 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
681def field_singleton_sub_fields_schema( 1abcdefghyzijklmnopABEFGHPQRSTUIqrstuvwxCD
682 field: ModelField,
683 *,
684 by_alias: bool,
685 model_name_map: Dict[TypeModelOrEnum, str],
686 ref_template: str,
687 schema_overrides: bool = False,
688 ref_prefix: Optional[str] = None,
689 known_models: TypeModelSet,
690) -> Tuple[Dict[str, Any], Dict[str, Any], Set[str]]:
691 """
692 This function is indirectly used by ``field_schema()``, you probably should be using that function.
694 Take a list of Pydantic ``ModelField`` from the declaration of a type with parameters, and generate their
695 schema. I.e., fields used as "type parameters", like ``str`` and ``int`` in ``Tuple[str, int]``.
696 """
697 sub_fields = cast(List[ModelField], field.sub_fields) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
698 definitions = {} 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
699 nested_models: Set[str] = set() 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
700 if len(sub_fields) == 1: 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
701 return field_type_schema( 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
702 sub_fields[0],
703 by_alias=by_alias,
704 model_name_map=model_name_map,
705 schema_overrides=schema_overrides,
706 ref_prefix=ref_prefix,
707 ref_template=ref_template,
708 known_models=known_models,
709 )
710 else:
711 s: Dict[str, Any] = {} 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
712 # https://github.com/OAI/OpenAPI-Specification/blob/master/versions/3.0.2.md#discriminator-object
713 field_has_discriminator: bool = field.discriminator_key is not None 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
714 if field_has_discriminator: 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
715 assert field.sub_fields_mapping is not None 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
717 discriminator_models_refs: Dict[str, Union[str, Dict[str, Any]]] = {} 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
719 for discriminator_value, sub_field in field.sub_fields_mapping.items(): 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
720 if isinstance(discriminator_value, Enum): 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
721 discriminator_value = str(discriminator_value.value) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
722 # sub_field is either a `BaseModel` or directly an `Annotated` `Union` of many
723 if is_union(get_origin(sub_field.type_)): 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
724 sub_models = get_sub_types(sub_field.type_) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
725 discriminator_models_refs[discriminator_value] = { 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
726 model_name_map[sub_model]: get_schema_ref(
727 model_name_map[sub_model], ref_prefix, ref_template, False
728 )
729 for sub_model in sub_models
730 }
731 else:
732 sub_field_type = sub_field.type_ 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
733 if hasattr(sub_field_type, '__pydantic_model__'): 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
734 sub_field_type = sub_field_type.__pydantic_model__ 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
736 discriminator_model_name = model_name_map[sub_field_type] 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
737 discriminator_model_ref = get_schema_ref(discriminator_model_name, ref_prefix, ref_template, False) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
738 discriminator_models_refs[discriminator_value] = discriminator_model_ref['$ref'] 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
740 s['discriminator'] = { 1abcdefghyzijklmnopABEFGHIqrstuvwxCD
741 'propertyName': field.discriminator_alias if by_alias else field.discriminator_key,
742 'mapping': discriminator_models_refs,
743 }
745 sub_field_schemas = [] 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
746 for sf in sub_fields: 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
747 sub_schema, sub_definitions, sub_nested_models = field_type_schema( 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
748 sf,
749 by_alias=by_alias,
750 model_name_map=model_name_map,
751 schema_overrides=schema_overrides,
752 ref_prefix=ref_prefix,
753 ref_template=ref_template,
754 known_models=known_models,
755 )
756 definitions.update(sub_definitions) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
757 if schema_overrides and 'allOf' in sub_schema: 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
758 # if the sub_field is a referenced schema we only need the referenced
759 # object. Otherwise we will end up with several allOf inside anyOf/oneOf.
760 # See https://github.com/pydantic/pydantic/issues/1209
761 sub_schema = sub_schema['allOf'][0] 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
763 if sub_schema.keys() == {'discriminator', 'oneOf'}: 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
764 # we don't want discriminator information inside oneOf choices, this is dealt with elsewhere
765 sub_schema.pop('discriminator') 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
766 sub_field_schemas.append(sub_schema) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
767 nested_models.update(sub_nested_models) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
768 s['oneOf' if field_has_discriminator else 'anyOf'] = sub_field_schemas 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
769 return s, definitions, nested_models 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
772# Order is important, e.g. subclasses of str must go before str
773# this is used only for standard library types, custom types should use __modify_schema__ instead
774field_class_to_schema: Tuple[Tuple[Any, Dict[str, Any]], ...] = ( 1abcdefghyzijklmnopABPQRSTUqrstuvwxCD
775 (Path, {'type': 'string', 'format': 'path'}),
776 (datetime, {'type': 'string', 'format': 'date-time'}),
777 (date, {'type': 'string', 'format': 'date'}),
778 (time, {'type': 'string', 'format': 'time'}),
779 (timedelta, {'type': 'number', 'format': 'time-delta'}),
780 (IPv4Network, {'type': 'string', 'format': 'ipv4network'}),
781 (IPv6Network, {'type': 'string', 'format': 'ipv6network'}),
782 (IPv4Interface, {'type': 'string', 'format': 'ipv4interface'}),
783 (IPv6Interface, {'type': 'string', 'format': 'ipv6interface'}),
784 (IPv4Address, {'type': 'string', 'format': 'ipv4'}),
785 (IPv6Address, {'type': 'string', 'format': 'ipv6'}),
786 (Pattern, {'type': 'string', 'format': 'regex'}),
787 (str, {'type': 'string'}),
788 (bytes, {'type': 'string', 'format': 'binary'}),
789 (bool, {'type': 'boolean'}),
790 (int, {'type': 'integer'}),
791 (float, {'type': 'number'}),
792 (Decimal, {'type': 'number'}),
793 (UUID, {'type': 'string', 'format': 'uuid'}),
794 (dict, {'type': 'object'}),
795 (list, {'type': 'array', 'items': {}}),
796 (tuple, {'type': 'array', 'items': {}}),
797 (set, {'type': 'array', 'items': {}, 'uniqueItems': True}),
798 (frozenset, {'type': 'array', 'items': {}, 'uniqueItems': True}),
799)
801json_scheme = {'type': 'string', 'format': 'json-string'} 1JKabcdefghyzLMijklmnopABEFGHPQRSTUINOqrstuvwxCD
804def add_field_type_to_schema(field_type: Any, schema_: Dict[str, Any]) -> None: 1JKabcdefghyzLMijklmnopABEFGHPQRSTUINOqrstuvwxCD
805 """
806 Update the given `schema` with the type-specific metadata for the given `field_type`.
808 This function looks through `field_class_to_schema` for a class that matches the given `field_type`,
809 and then modifies the given `schema` with the information from that type.
810 """
811 for type_, t_schema in field_class_to_schema: 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
812 # Fallback for `typing.Pattern` and `re.Pattern` as they are not a valid class
813 if lenient_issubclass(field_type, type_) or field_type is type_ is Pattern: 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
814 schema_.update(t_schema) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
815 break 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
818def get_schema_ref(name: str, ref_prefix: Optional[str], ref_template: str, schema_overrides: bool) -> Dict[str, Any]: 1JKabcdefghyzLMijklmnopABEFGHPQRSTUINOqrstuvwxCD
819 if ref_prefix: 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
820 schema_ref = {'$ref': ref_prefix + name} 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
821 else:
822 schema_ref = {'$ref': ref_template.format(model=name)} 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
823 return {'allOf': [schema_ref]} if schema_overrides else schema_ref 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
826def field_singleton_schema( # noqa: C901 (ignore complexity) 1abcdefghyzijklmnopABEFGHPQRSTUIqrstuvwxCD
827 field: ModelField,
828 *,
829 by_alias: bool,
830 model_name_map: Dict[TypeModelOrEnum, str],
831 ref_template: str,
832 schema_overrides: bool = False,
833 ref_prefix: Optional[str] = None,
834 known_models: TypeModelSet,
835) -> Tuple[Dict[str, Any], Dict[str, Any], Set[str]]:
836 """
837 This function is indirectly used by ``field_schema()``, you should probably be using that function.
839 Take a single Pydantic ``ModelField``, and return its schema and any additional definitions from sub-models.
840 """
841 from pydantic.main import BaseModel 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
843 definitions: Dict[str, Any] = {} 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
844 nested_models: Set[str] = set() 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
845 field_type = field.type_ 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
847 # Recurse into this field if it contains sub_fields and is NOT a
848 # BaseModel OR that BaseModel is a const
849 if field.sub_fields and ( 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
850 (field.field_info and field.field_info.const) or not lenient_issubclass(field_type, BaseModel)
851 ):
852 return field_singleton_sub_fields_schema( 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
853 field,
854 by_alias=by_alias,
855 model_name_map=model_name_map,
856 schema_overrides=schema_overrides,
857 ref_prefix=ref_prefix,
858 ref_template=ref_template,
859 known_models=known_models,
860 )
861 if field_type is Any or field_type is object or field_type.__class__ == TypeVar or get_origin(field_type) is type: 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
862 return {}, definitions, nested_models # no restrictions 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
863 if is_none_type(field_type): 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
864 return {'type': 'null'}, definitions, nested_models 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
865 if is_callable_type(field_type): 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
866 raise SkipField(f'Callable {field.name} was excluded from schema since JSON schema has no equivalent type.') 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
867 f_schema: Dict[str, Any] = {} 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
868 if field.field_info is not None and field.field_info.const: 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
869 f_schema['const'] = field.default 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
871 if is_literal_type(field_type): 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
872 values = tuple(x.value if isinstance(x, Enum) else x for x in all_literal_values(field_type)) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
874 if len({v.__class__ for v in values}) > 1: 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
875 return field_schema( 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
876 multitypes_literal_field_for_schema(values, field),
877 by_alias=by_alias,
878 model_name_map=model_name_map,
879 ref_prefix=ref_prefix,
880 ref_template=ref_template,
881 known_models=known_models,
882 )
884 # All values have the same type
885 field_type = values[0].__class__ 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
886 f_schema['enum'] = list(values) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
887 add_field_type_to_schema(field_type, f_schema) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
888 elif lenient_issubclass(field_type, Enum): 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
889 enum_name = model_name_map[field_type] 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
890 f_schema, schema_overrides = get_field_info_schema(field, schema_overrides) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
891 f_schema.update(get_schema_ref(enum_name, ref_prefix, ref_template, schema_overrides)) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
892 definitions[enum_name] = enum_process_schema(field_type, field=field) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
893 elif is_namedtuple(field_type): 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
894 sub_schema, *_ = model_process_schema( 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
895 field_type.__pydantic_model__,
896 by_alias=by_alias,
897 model_name_map=model_name_map,
898 ref_prefix=ref_prefix,
899 ref_template=ref_template,
900 known_models=known_models,
901 field=field,
902 )
903 items_schemas = list(sub_schema['properties'].values()) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
904 f_schema.update( 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
905 {
906 'type': 'array',
907 'items': items_schemas,
908 'minItems': len(items_schemas),
909 'maxItems': len(items_schemas),
910 }
911 )
912 elif not hasattr(field_type, '__pydantic_model__'): 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
913 add_field_type_to_schema(field_type, f_schema) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
915 modify_schema = getattr(field_type, '__modify_schema__', None) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
916 if modify_schema: 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
917 _apply_modify_schema(modify_schema, field, f_schema) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
919 if f_schema: 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
920 return f_schema, definitions, nested_models 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
922 # Handle dataclass-based models
923 if lenient_issubclass(getattr(field_type, '__pydantic_model__', None), BaseModel): 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
924 field_type = field_type.__pydantic_model__ 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
926 if issubclass(field_type, BaseModel): 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
927 model_name = model_name_map[field_type] 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
928 if field_type not in known_models: 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
929 sub_schema, sub_definitions, sub_nested_models = model_process_schema( 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
930 field_type,
931 by_alias=by_alias,
932 model_name_map=model_name_map,
933 ref_prefix=ref_prefix,
934 ref_template=ref_template,
935 known_models=known_models,
936 field=field,
937 )
938 definitions.update(sub_definitions) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
939 definitions[model_name] = sub_schema 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
940 nested_models.update(sub_nested_models) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
941 else:
942 nested_models.add(model_name) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
943 schema_ref = get_schema_ref(model_name, ref_prefix, ref_template, schema_overrides) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
944 return schema_ref, definitions, nested_models 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
946 # For generics with no args
947 args = get_args(field_type) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
948 if args is not None and not args and Generic in field_type.__bases__: 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
949 return f_schema, definitions, nested_models 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
951 raise ValueError(f'Value not declarable with JSON Schema, field: {field}') 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
954def multitypes_literal_field_for_schema(values: Tuple[Any, ...], field: ModelField) -> ModelField: 1JKabcdefghyzLMijklmnopABEFGHPQRSTUINOqrstuvwxCD
955 """
956 To support `Literal` with values of different types, we split it into multiple `Literal` with same type
957 e.g. `Literal['qwe', 'asd', 1, 2]` becomes `Union[Literal['qwe', 'asd'], Literal[1, 2]]`
958 """
959 literal_distinct_types = defaultdict(list) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
960 for v in values: 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
961 literal_distinct_types[v.__class__].append(v) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
962 distinct_literals = (Literal[tuple(same_type_values)] for same_type_values in literal_distinct_types.values()) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
964 return ModelField( 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
965 name=field.name,
966 type_=Union[tuple(distinct_literals)], # type: ignore
967 class_validators=field.class_validators,
968 model_config=field.model_config,
969 default=field.default,
970 required=field.required,
971 alias=field.alias,
972 field_info=field.field_info,
973 )
976def encode_default(dft: Any) -> Any: 1JKabcdefghyzLMijklmnopABEFGHPQRSTUINOqrstuvwxCD
977 from pydantic.main import BaseModel 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
979 if isinstance(dft, BaseModel) or is_dataclass(dft): 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
980 dft = cast('dict[str, Any]', pydantic_encoder(dft)) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
982 if isinstance(dft, dict): 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
983 return {encode_default(k): encode_default(v) for k, v in dft.items()} 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
984 elif isinstance(dft, Enum): 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
985 return dft.value 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
986 elif isinstance(dft, (int, float, str)): 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
987 return dft 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
988 elif isinstance(dft, (list, tuple)): 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
989 t = dft.__class__ 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
990 seq_args = (encode_default(v) for v in dft) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
991 return t(*seq_args) if is_namedtuple(t) else t(seq_args) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
992 elif dft is None: 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
993 return None 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
994 else:
995 return pydantic_encoder(dft) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
998_map_types_constraint: Dict[Any, Callable[..., type]] = {int: conint, float: confloat, Decimal: condecimal} 1JKabcdefghyzLMijklmnopABEFGHPQRSTUINOqrstuvwxCD
1001def get_annotation_from_field_info( 1abcdefghyzijklmnopABEFGHPQRSTUIqrstuvwxCD
1002 annotation: Any, field_info: FieldInfo, field_name: str, validate_assignment: bool = False
1003) -> Type[Any]:
1004 """
1005 Get an annotation with validation implemented for numbers and strings based on the field_info.
1006 :param annotation: an annotation from a field specification, as ``str``, ``ConstrainedStr``
1007 :param field_info: an instance of FieldInfo, possibly with declarations for validations and JSON Schema
1008 :param field_name: name of the field for use in error messages
1009 :param validate_assignment: default False, flag for BaseModel Config value of validate_assignment
1010 :return: the same ``annotation`` if unmodified or a new annotation with validation in place
1011 """
1012 constraints = field_info.get_constraints() 1JKabcdefghyzLMijklmnopABEFGHPQRSTUINOqrstuvwxCD
1013 used_constraints: Set[str] = set() 1JKabcdefghyzLMijklmnopABEFGHPQRSTUINOqrstuvwxCD
1014 if constraints: 1JKabcdefghyzLMijklmnopABEFGHPQRSTUINOqrstuvwxCD
1015 annotation, used_constraints = get_annotation_with_constraints(annotation, field_info) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
1016 if validate_assignment: 1JKabcdefghyzLMijklmnopABEFGHPQRSTUINOqrstuvwxCD
1017 used_constraints.add('allow_mutation') 1JKabcdefghyzLMijklmnopABEFGHPQRSTUINOqrstuvwxCD
1019 unused_constraints = constraints - used_constraints 1JKabcdefghyzLMijklmnopABEFGHPQRSTUINOqrstuvwxCD
1020 if unused_constraints: 1JKabcdefghyzLMijklmnopABEFGHPQRSTUINOqrstuvwxCD
1021 raise ValueError( 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
1022 f'On field "{field_name}" the following field constraints are set but not enforced: '
1023 f'{", ".join(unused_constraints)}. '
1024 f'\nFor more details see https://docs.pydantic.dev/usage/schema/#unenforced-field-constraints'
1025 )
1027 return annotation 1JKabcdefghyzLMijklmnopABEFGHPQRSTUINOqrstuvwxCD
1030def get_annotation_with_constraints(annotation: Any, field_info: FieldInfo) -> Tuple[Type[Any], Set[str]]: # noqa: C901 1JKabcdefghyzLMijklmnopABEFGHPQRSTUINOqrstuvwxCD
1031 """
1032 Get an annotation with used constraints implemented for numbers and strings based on the field_info.
1034 :param annotation: an annotation from a field specification, as ``str``, ``ConstrainedStr``
1035 :param field_info: an instance of FieldInfo, possibly with declarations for validations and JSON Schema
1036 :return: the same ``annotation`` if unmodified or a new annotation along with the used constraints.
1037 """
1038 used_constraints: Set[str] = set() 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
1040 def go(type_: Any) -> Type[Any]: 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
1041 if ( 1abcdefghijklmnopEFGHIqrstuvwx
1042 is_literal_type(type_)
1043 or isinstance(type_, ForwardRef)
1044 or lenient_issubclass(type_, (ConstrainedList, ConstrainedSet, ConstrainedFrozenSet))
1045 ):
1046 return type_ 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
1047 origin = get_origin(type_) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
1048 if origin is not None: 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
1049 args: Tuple[Any, ...] = get_args(type_) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
1050 if any(isinstance(a, ForwardRef) for a in args): 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
1051 # forward refs cause infinite recursion below
1052 return type_ 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
1054 if origin is Annotated: 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
1055 return go(args[0]) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
1056 if is_union(origin): 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
1057 return Union[tuple(go(a) for a in args)] # type: ignore 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
1059 if issubclass(origin, List) and ( 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
1060 field_info.min_items is not None
1061 or field_info.max_items is not None
1062 or field_info.unique_items is not None
1063 ):
1064 used_constraints.update({'min_items', 'max_items', 'unique_items'}) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
1065 return conlist( 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
1066 go(args[0]),
1067 min_items=field_info.min_items,
1068 max_items=field_info.max_items,
1069 unique_items=field_info.unique_items,
1070 )
1072 if issubclass(origin, Set) and (field_info.min_items is not None or field_info.max_items is not None): 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
1073 used_constraints.update({'min_items', 'max_items'}) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
1074 return conset(go(args[0]), min_items=field_info.min_items, max_items=field_info.max_items) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
1076 if issubclass(origin, FrozenSet) and (field_info.min_items is not None or field_info.max_items is not None): 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
1077 used_constraints.update({'min_items', 'max_items'}) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
1078 return confrozenset(go(args[0]), min_items=field_info.min_items, max_items=field_info.max_items) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
1080 for t in (Tuple, List, Set, FrozenSet, Sequence): 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
1081 if issubclass(origin, t): # type: ignore 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
1082 return t[tuple(go(a) for a in args)] # type: ignore 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
1084 if issubclass(origin, Dict): 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
1085 return Dict[args[0], go(args[1])] # type: ignore 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
1087 attrs: Optional[Tuple[str, ...]] = None 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
1088 constraint_func: Optional[Callable[..., type]] = None 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
1089 if isinstance(type_, type): 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
1090 if issubclass(type_, (SecretStr, SecretBytes)): 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
1091 attrs = ('max_length', 'min_length') 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
1093 def constraint_func(**kw: Any) -> Type[Any]: # noqa: F811 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
1094 return type(type_.__name__, (type_,), kw) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
1096 elif issubclass(type_, str) and not issubclass(type_, (EmailStr, AnyUrl)): 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
1097 attrs = ('max_length', 'min_length', 'regex') 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
1098 if issubclass(type_, StrictStr): 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
1100 def constraint_func(**kw: Any) -> Type[Any]: 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
1101 return type(type_.__name__, (type_,), kw) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
1103 else:
1104 constraint_func = constr 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
1105 elif issubclass(type_, bytes): 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
1106 attrs = ('max_length', 'min_length', 'regex') 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
1107 if issubclass(type_, StrictBytes): 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
1109 def constraint_func(**kw: Any) -> Type[Any]: 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
1110 return type(type_.__name__, (type_,), kw) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
1112 else:
1113 constraint_func = conbytes 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
1114 elif issubclass(type_, numeric_types) and not issubclass( 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
1115 type_,
1116 (
1117 ConstrainedInt,
1118 ConstrainedFloat,
1119 ConstrainedDecimal,
1120 ConstrainedList,
1121 ConstrainedSet,
1122 ConstrainedFrozenSet,
1123 bool,
1124 ),
1125 ):
1126 # Is numeric type
1127 attrs = ('gt', 'lt', 'ge', 'le', 'multiple_of') 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
1128 if issubclass(type_, float): 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
1129 attrs += ('allow_inf_nan',) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
1130 if issubclass(type_, Decimal): 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
1131 attrs += ('max_digits', 'decimal_places') 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
1132 numeric_type = next(t for t in numeric_types if issubclass(type_, t)) # pragma: no branch 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
1133 constraint_func = _map_types_constraint[numeric_type] 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
1135 if attrs: 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
1136 used_constraints.update(set(attrs)) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
1137 kwargs = { 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
1138 attr_name: attr
1139 for attr_name, attr in ((attr_name, getattr(field_info, attr_name)) for attr_name in attrs)
1140 if attr is not None
1141 }
1142 if kwargs: 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
1143 constraint_func = cast(Callable[..., type], constraint_func) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
1144 return constraint_func(**kwargs) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
1145 return type_ 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
1147 return go(annotation), used_constraints 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
1150def normalize_name(name: str) -> str: 1JKabcdefghyzLMijklmnopABEFGHPQRSTUINOqrstuvwxCD
1151 """
1152 Normalizes the given name. This can be applied to either a model *or* enum.
1153 """
1154 return re.sub(r'[^a-zA-Z0-9.\-_]', '_', name) 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD
1157class SkipField(Exception): 1JKabcdefghyzLMijklmnopABEFGHPQRSTUINOqrstuvwxCD
1158 """
1159 Utility exception used to exclude fields from schema.
1160 """
1162 def __init__(self, message: str) -> None: 1JKabcdefghyzLMijklmnopABEFGHPQRSTUINOqrstuvwxCD
1163 self.message = message 1JKabcdefghyzLMijklmnopABEFGHINOqrstuvwxCD