Coverage for pydantic/types.py: 97.86%
619 statements
« prev ^ index » next coverage.py v7.5.3, created at 2024-06-21 17:00 +0000
« prev ^ index » next coverage.py v7.5.3, created at 2024-06-21 17:00 +0000
1"""The types module contains custom types used by pydantic."""
3from __future__ import annotations as _annotations 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
5import base64 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
6import dataclasses as _dataclasses 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
7import re 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
8from datetime import date, datetime 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
9from decimal import Decimal 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
10from enum import Enum 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
11from pathlib import Path 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
12from types import ModuleType 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
13from typing import ( 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
14 TYPE_CHECKING,
15 Any,
16 Callable,
17 ClassVar,
18 Dict,
19 FrozenSet,
20 Generic,
21 Hashable,
22 Iterator,
23 List,
24 Pattern,
25 Set,
26 TypeVar,
27 Union,
28 cast,
29 get_args,
30 get_origin,
31)
32from uuid import UUID 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
34import annotated_types 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
35from annotated_types import BaseMetadata, MaxLen, MinLen 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
36from pydantic_core import CoreSchema, PydanticCustomError, core_schema 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
37from typing_extensions import Annotated, Literal, Protocol, TypeAlias, TypeAliasType, deprecated 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
39from ._internal import ( 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
40 _core_utils,
41 _fields,
42 _internal_dataclass,
43 _typing_extra,
44 _utils,
45 _validators,
46)
47from ._migration import getattr_migration 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
48from .annotated_handlers import GetCoreSchemaHandler, GetJsonSchemaHandler 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
49from .errors import PydanticUserError 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
50from .json_schema import JsonSchemaValue 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
51from .warnings import PydanticDeprecatedSince20 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
53__all__ = ( 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
54 'Strict',
55 'StrictStr',
56 'conbytes',
57 'conlist',
58 'conset',
59 'confrozenset',
60 'constr',
61 'ImportString',
62 'conint',
63 'PositiveInt',
64 'NegativeInt',
65 'NonNegativeInt',
66 'NonPositiveInt',
67 'confloat',
68 'PositiveFloat',
69 'NegativeFloat',
70 'NonNegativeFloat',
71 'NonPositiveFloat',
72 'FiniteFloat',
73 'condecimal',
74 'UUID1',
75 'UUID3',
76 'UUID4',
77 'UUID5',
78 'FilePath',
79 'DirectoryPath',
80 'NewPath',
81 'Json',
82 'Secret',
83 'SecretStr',
84 'SecretBytes',
85 'StrictBool',
86 'StrictBytes',
87 'StrictInt',
88 'StrictFloat',
89 'PaymentCardNumber',
90 'ByteSize',
91 'PastDate',
92 'FutureDate',
93 'PastDatetime',
94 'FutureDatetime',
95 'condate',
96 'AwareDatetime',
97 'NaiveDatetime',
98 'AllowInfNan',
99 'EncoderProtocol',
100 'EncodedBytes',
101 'EncodedStr',
102 'Base64Encoder',
103 'Base64Bytes',
104 'Base64Str',
105 'Base64UrlBytes',
106 'Base64UrlStr',
107 'GetPydanticSchema',
108 'StringConstraints',
109 'Tag',
110 'Discriminator',
111 'JsonValue',
112 'OnErrorOmit',
113)
116T = TypeVar('T') 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
119@_dataclasses.dataclass 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
120class Strict(_fields.PydanticMetadata, BaseMetadata): 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
121 """Usage docs: https://docs.pydantic.dev/2.8/concepts/strict_mode/#strict-mode-with-annotated-strict
123 A field metadata class to indicate that a field should be validated in strict mode.
125 Attributes:
126 strict: Whether to validate the field in strict mode.
128 Example:
129 ```python
130 from typing_extensions import Annotated
132 from pydantic.types import Strict
134 StrictBool = Annotated[bool, Strict()]
135 ```
136 """
138 strict: bool = True 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
140 def __hash__(self) -> int: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
141 return hash(self.strict) 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
144# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ BOOLEAN TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
146StrictBool = Annotated[bool, Strict()] 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
147"""A boolean that must be either ``True`` or ``False``.""" 1bcdefgahijklmGHIJKLMnopqrs
149# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ INTEGER TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
152def conint( 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
153 *,
154 strict: bool | None = None,
155 gt: int | None = None,
156 ge: int | None = None,
157 lt: int | None = None,
158 le: int | None = None,
159 multiple_of: int | None = None,
160) -> type[int]:
161 """
162 !!! warning "Discouraged"
163 This function is **discouraged** in favor of using
164 [`Annotated`](https://docs.python.org/3/library/typing.html#typing.Annotated) with
165 [`Field`][pydantic.fields.Field] instead.
167 This function will be **deprecated** in Pydantic 3.0.
169 The reason is that `conint` returns a type, which doesn't play well with static analysis tools.
171 === ":x: Don't do this"
172 ```py
173 from pydantic import BaseModel, conint
175 class Foo(BaseModel):
176 bar: conint(strict=True, gt=0)
177 ```
179 === ":white_check_mark: Do this"
180 ```py
181 from typing_extensions import Annotated
183 from pydantic import BaseModel, Field
185 class Foo(BaseModel):
186 bar: Annotated[int, Field(strict=True, gt=0)]
187 ```
189 A wrapper around `int` that allows for additional constraints.
191 Args:
192 strict: Whether to validate the integer in strict mode. Defaults to `None`.
193 gt: The value must be greater than this.
194 ge: The value must be greater than or equal to this.
195 lt: The value must be less than this.
196 le: The value must be less than or equal to this.
197 multiple_of: The value must be a multiple of this.
199 Returns:
200 The wrapped integer type.
202 ```py
203 from pydantic import BaseModel, ValidationError, conint
205 class ConstrainedExample(BaseModel):
206 constrained_int: conint(gt=1)
208 m = ConstrainedExample(constrained_int=2)
209 print(repr(m))
210 #> ConstrainedExample(constrained_int=2)
212 try:
213 ConstrainedExample(constrained_int=0)
214 except ValidationError as e:
215 print(e.errors())
216 '''
217 [
218 {
219 'type': 'greater_than',
220 'loc': ('constrained_int',),
221 'msg': 'Input should be greater than 1',
222 'input': 0,
223 'ctx': {'gt': 1},
224 'url': 'https://errors.pydantic.dev/2/v/greater_than',
225 }
226 ]
227 '''
228 ```
230 """ # noqa: D212
231 return Annotated[ # pyright: ignore[reportReturnType] 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
232 int,
233 Strict(strict) if strict is not None else None,
234 annotated_types.Interval(gt=gt, ge=ge, lt=lt, le=le),
235 annotated_types.MultipleOf(multiple_of) if multiple_of is not None else None,
236 ]
239PositiveInt = Annotated[int, annotated_types.Gt(0)] 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
240"""An integer that must be greater than zero. 1bcdefgahijklmGHIJKLMnopqrs
242```py
243from pydantic import BaseModel, PositiveInt, ValidationError
245class Model(BaseModel):
246 positive_int: PositiveInt
248m = Model(positive_int=1)
249print(repr(m))
250#> Model(positive_int=1)
252try:
253 Model(positive_int=-1)
254except ValidationError as e:
255 print(e.errors())
256 '''
257 [
258 {
259 'type': 'greater_than',
260 'loc': ('positive_int',),
261 'msg': 'Input should be greater than 0',
262 'input': -1,
263 'ctx': {'gt': 0},
264 'url': 'https://errors.pydantic.dev/2/v/greater_than',
265 }
266 ]
267 '''
268```
269"""
270NegativeInt = Annotated[int, annotated_types.Lt(0)] 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
271"""An integer that must be less than zero. 1bcdefgahijklmGHIJKLMnopqrs
273```py
274from pydantic import BaseModel, NegativeInt, ValidationError
276class Model(BaseModel):
277 negative_int: NegativeInt
279m = Model(negative_int=-1)
280print(repr(m))
281#> Model(negative_int=-1)
283try:
284 Model(negative_int=1)
285except ValidationError as e:
286 print(e.errors())
287 '''
288 [
289 {
290 'type': 'less_than',
291 'loc': ('negative_int',),
292 'msg': 'Input should be less than 0',
293 'input': 1,
294 'ctx': {'lt': 0},
295 'url': 'https://errors.pydantic.dev/2/v/less_than',
296 }
297 ]
298 '''
299```
300"""
301NonPositiveInt = Annotated[int, annotated_types.Le(0)] 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
302"""An integer that must be less than or equal to zero. 1bcdefgahijklmGHIJKLMnopqrs
304```py
305from pydantic import BaseModel, NonPositiveInt, ValidationError
307class Model(BaseModel):
308 non_positive_int: NonPositiveInt
310m = Model(non_positive_int=0)
311print(repr(m))
312#> Model(non_positive_int=0)
314try:
315 Model(non_positive_int=1)
316except ValidationError as e:
317 print(e.errors())
318 '''
319 [
320 {
321 'type': 'less_than_equal',
322 'loc': ('non_positive_int',),
323 'msg': 'Input should be less than or equal to 0',
324 'input': 1,
325 'ctx': {'le': 0},
326 'url': 'https://errors.pydantic.dev/2/v/less_than_equal',
327 }
328 ]
329 '''
330```
331"""
332NonNegativeInt = Annotated[int, annotated_types.Ge(0)] 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
333"""An integer that must be greater than or equal to zero. 1bcdefgahijklmGHIJKLMnopqrs
335```py
336from pydantic import BaseModel, NonNegativeInt, ValidationError
338class Model(BaseModel):
339 non_negative_int: NonNegativeInt
341m = Model(non_negative_int=0)
342print(repr(m))
343#> Model(non_negative_int=0)
345try:
346 Model(non_negative_int=-1)
347except ValidationError as e:
348 print(e.errors())
349 '''
350 [
351 {
352 'type': 'greater_than_equal',
353 'loc': ('non_negative_int',),
354 'msg': 'Input should be greater than or equal to 0',
355 'input': -1,
356 'ctx': {'ge': 0},
357 'url': 'https://errors.pydantic.dev/2/v/greater_than_equal',
358 }
359 ]
360 '''
361```
362"""
363StrictInt = Annotated[int, Strict()] 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
364"""An integer that must be validated in strict mode. 1bcdefgahijklmGHIJKLMnopqrs
366```py
367from pydantic import BaseModel, StrictInt, ValidationError
369class StrictIntModel(BaseModel):
370 strict_int: StrictInt
372try:
373 StrictIntModel(strict_int=3.14159)
374except ValidationError as e:
375 print(e)
376 '''
377 1 validation error for StrictIntModel
378 strict_int
379 Input should be a valid integer [type=int_type, input_value=3.14159, input_type=float]
380 '''
381```
382"""
384# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ FLOAT TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
387@_dataclasses.dataclass 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
388class AllowInfNan(_fields.PydanticMetadata): 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
389 """A field metadata class to indicate that a field should allow ``-inf``, ``inf``, and ``nan``."""
391 allow_inf_nan: bool = True 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
393 def __hash__(self) -> int: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
394 return hash(self.allow_inf_nan) 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
397def confloat( 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
398 *,
399 strict: bool | None = None,
400 gt: float | None = None,
401 ge: float | None = None,
402 lt: float | None = None,
403 le: float | None = None,
404 multiple_of: float | None = None,
405 allow_inf_nan: bool | None = None,
406) -> type[float]:
407 """
408 !!! warning "Discouraged"
409 This function is **discouraged** in favor of using
410 [`Annotated`](https://docs.python.org/3/library/typing.html#typing.Annotated) with
411 [`Field`][pydantic.fields.Field] instead.
413 This function will be **deprecated** in Pydantic 3.0.
415 The reason is that `confloat` returns a type, which doesn't play well with static analysis tools.
417 === ":x: Don't do this"
418 ```py
419 from pydantic import BaseModel, confloat
421 class Foo(BaseModel):
422 bar: confloat(strict=True, gt=0)
423 ```
425 === ":white_check_mark: Do this"
426 ```py
427 from typing_extensions import Annotated
429 from pydantic import BaseModel, Field
431 class Foo(BaseModel):
432 bar: Annotated[float, Field(strict=True, gt=0)]
433 ```
435 A wrapper around `float` that allows for additional constraints.
437 Args:
438 strict: Whether to validate the float in strict mode.
439 gt: The value must be greater than this.
440 ge: The value must be greater than or equal to this.
441 lt: The value must be less than this.
442 le: The value must be less than or equal to this.
443 multiple_of: The value must be a multiple of this.
444 allow_inf_nan: Whether to allow `-inf`, `inf`, and `nan`.
446 Returns:
447 The wrapped float type.
449 ```py
450 from pydantic import BaseModel, ValidationError, confloat
452 class ConstrainedExample(BaseModel):
453 constrained_float: confloat(gt=1.0)
455 m = ConstrainedExample(constrained_float=1.1)
456 print(repr(m))
457 #> ConstrainedExample(constrained_float=1.1)
459 try:
460 ConstrainedExample(constrained_float=0.9)
461 except ValidationError as e:
462 print(e.errors())
463 '''
464 [
465 {
466 'type': 'greater_than',
467 'loc': ('constrained_float',),
468 'msg': 'Input should be greater than 1',
469 'input': 0.9,
470 'ctx': {'gt': 1.0},
471 'url': 'https://errors.pydantic.dev/2/v/greater_than',
472 }
473 ]
474 '''
475 ```
476 """ # noqa: D212
477 return Annotated[ # pyright: ignore[reportReturnType] 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
478 float,
479 Strict(strict) if strict is not None else None,
480 annotated_types.Interval(gt=gt, ge=ge, lt=lt, le=le),
481 annotated_types.MultipleOf(multiple_of) if multiple_of is not None else None,
482 AllowInfNan(allow_inf_nan) if allow_inf_nan is not None else None,
483 ]
486PositiveFloat = Annotated[float, annotated_types.Gt(0)] 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
487"""A float that must be greater than zero. 1bcdefgahijklmGHIJKLMnopqrs
489```py
490from pydantic import BaseModel, PositiveFloat, ValidationError
492class Model(BaseModel):
493 positive_float: PositiveFloat
495m = Model(positive_float=1.0)
496print(repr(m))
497#> Model(positive_float=1.0)
499try:
500 Model(positive_float=-1.0)
501except ValidationError as e:
502 print(e.errors())
503 '''
504 [
505 {
506 'type': 'greater_than',
507 'loc': ('positive_float',),
508 'msg': 'Input should be greater than 0',
509 'input': -1.0,
510 'ctx': {'gt': 0.0},
511 'url': 'https://errors.pydantic.dev/2/v/greater_than',
512 }
513 ]
514 '''
515```
516"""
517NegativeFloat = Annotated[float, annotated_types.Lt(0)] 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
518"""A float that must be less than zero. 1bcdefgahijklmGHIJKLMnopqrs
520```py
521from pydantic import BaseModel, NegativeFloat, ValidationError
523class Model(BaseModel):
524 negative_float: NegativeFloat
526m = Model(negative_float=-1.0)
527print(repr(m))
528#> Model(negative_float=-1.0)
530try:
531 Model(negative_float=1.0)
532except ValidationError as e:
533 print(e.errors())
534 '''
535 [
536 {
537 'type': 'less_than',
538 'loc': ('negative_float',),
539 'msg': 'Input should be less than 0',
540 'input': 1.0,
541 'ctx': {'lt': 0.0},
542 'url': 'https://errors.pydantic.dev/2/v/less_than',
543 }
544 ]
545 '''
546```
547"""
548NonPositiveFloat = Annotated[float, annotated_types.Le(0)] 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
549"""A float that must be less than or equal to zero. 1bcdefgahijklmGHIJKLMnopqrs
551```py
552from pydantic import BaseModel, NonPositiveFloat, ValidationError
554class Model(BaseModel):
555 non_positive_float: NonPositiveFloat
557m = Model(non_positive_float=0.0)
558print(repr(m))
559#> Model(non_positive_float=0.0)
561try:
562 Model(non_positive_float=1.0)
563except ValidationError as e:
564 print(e.errors())
565 '''
566 [
567 {
568 'type': 'less_than_equal',
569 'loc': ('non_positive_float',),
570 'msg': 'Input should be less than or equal to 0',
571 'input': 1.0,
572 'ctx': {'le': 0.0},
573 'url': 'https://errors.pydantic.dev/2/v/less_than_equal',
574 }
575 ]
576 '''
577```
578"""
579NonNegativeFloat = Annotated[float, annotated_types.Ge(0)] 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
580"""A float that must be greater than or equal to zero. 1bcdefgahijklmGHIJKLMnopqrs
582```py
583from pydantic import BaseModel, NonNegativeFloat, ValidationError
585class Model(BaseModel):
586 non_negative_float: NonNegativeFloat
588m = Model(non_negative_float=0.0)
589print(repr(m))
590#> Model(non_negative_float=0.0)
592try:
593 Model(non_negative_float=-1.0)
594except ValidationError as e:
595 print(e.errors())
596 '''
597 [
598 {
599 'type': 'greater_than_equal',
600 'loc': ('non_negative_float',),
601 'msg': 'Input should be greater than or equal to 0',
602 'input': -1.0,
603 'ctx': {'ge': 0.0},
604 'url': 'https://errors.pydantic.dev/2/v/greater_than_equal',
605 }
606 ]
607 '''
608```
609"""
610StrictFloat = Annotated[float, Strict(True)] 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
611"""A float that must be validated in strict mode. 1bcdefgahijklmGHIJKLMnopqrs
613```py
614from pydantic import BaseModel, StrictFloat, ValidationError
616class StrictFloatModel(BaseModel):
617 strict_float: StrictFloat
619try:
620 StrictFloatModel(strict_float='1.0')
621except ValidationError as e:
622 print(e)
623 '''
624 1 validation error for StrictFloatModel
625 strict_float
626 Input should be a valid number [type=float_type, input_value='1.0', input_type=str]
627 '''
628```
629"""
630FiniteFloat = Annotated[float, AllowInfNan(False)] 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
631"""A float that must be finite (not ``-inf``, ``inf``, or ``nan``). 1bcdefgahijklmGHIJKLMnopqrs
633```py
634from pydantic import BaseModel, FiniteFloat
636class Model(BaseModel):
637 finite: FiniteFloat
639m = Model(finite=1.0)
640print(m)
641#> finite=1.0
642```
643"""
646# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ BYTES TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
649def conbytes( 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
650 *,
651 min_length: int | None = None,
652 max_length: int | None = None,
653 strict: bool | None = None,
654) -> type[bytes]:
655 """A wrapper around `bytes` that allows for additional constraints.
657 Args:
658 min_length: The minimum length of the bytes.
659 max_length: The maximum length of the bytes.
660 strict: Whether to validate the bytes in strict mode.
662 Returns:
663 The wrapped bytes type.
664 """
665 return Annotated[ # pyright: ignore[reportReturnType] 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
666 bytes,
667 Strict(strict) if strict is not None else None,
668 annotated_types.Len(min_length or 0, max_length),
669 ]
672StrictBytes = Annotated[bytes, Strict()] 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
673"""A bytes that must be validated in strict mode.""" 1bcdefgahijklmGHIJKLMnopqrs
676# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ STRING TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
679@_dataclasses.dataclass(frozen=True) 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
680class StringConstraints(annotated_types.GroupedMetadata): 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
681 """Usage docs: https://docs.pydantic.dev/2.8/concepts/fields/#string-constraints
683 Apply constraints to `str` types.
685 Attributes:
686 strip_whitespace: Whether to remove leading and trailing whitespace.
687 to_upper: Whether to convert the string to uppercase.
688 to_lower: Whether to convert the string to lowercase.
689 strict: Whether to validate the string in strict mode.
690 min_length: The minimum length of the string.
691 max_length: The maximum length of the string.
692 pattern: A regex pattern that the string must match.
693 """
695 strip_whitespace: bool | None = None 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
696 to_upper: bool | None = None 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
697 to_lower: bool | None = None 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
698 strict: bool | None = None 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
699 min_length: int | None = None 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
700 max_length: int | None = None 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
701 pattern: str | Pattern[str] | None = None 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
703 def __iter__(self) -> Iterator[BaseMetadata]: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
704 if self.min_length is not None: 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
705 yield MinLen(self.min_length) 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
706 if self.max_length is not None: 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
707 yield MaxLen(self.max_length) 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
708 if self.strict is not None: 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
709 yield Strict(self.strict) 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
710 if ( 1tuvwxayzAB
711 self.strip_whitespace is not None
712 or self.pattern is not None
713 or self.to_lower is not None
714 or self.to_upper is not None
715 ):
716 yield _fields.pydantic_general_metadata( 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
717 strip_whitespace=self.strip_whitespace,
718 to_upper=self.to_upper,
719 to_lower=self.to_lower,
720 pattern=self.pattern,
721 )
724def constr( 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
725 *,
726 strip_whitespace: bool | None = None,
727 to_upper: bool | None = None,
728 to_lower: bool | None = None,
729 strict: bool | None = None,
730 min_length: int | None = None,
731 max_length: int | None = None,
732 pattern: str | Pattern[str] | None = None,
733) -> type[str]:
734 """
735 !!! warning "Discouraged"
736 This function is **discouraged** in favor of using
737 [`Annotated`](https://docs.python.org/3/library/typing.html#typing.Annotated) with
738 [`StringConstraints`][pydantic.types.StringConstraints] instead.
740 This function will be **deprecated** in Pydantic 3.0.
742 The reason is that `constr` returns a type, which doesn't play well with static analysis tools.
744 === ":x: Don't do this"
745 ```py
746 from pydantic import BaseModel, constr
748 class Foo(BaseModel):
749 bar: constr(strip_whitespace=True, to_upper=True, pattern=r'^[A-Z]+$')
750 ```
752 === ":white_check_mark: Do this"
753 ```py
754 from typing_extensions import Annotated
756 from pydantic import BaseModel, StringConstraints
758 class Foo(BaseModel):
759 bar: Annotated[str, StringConstraints(strip_whitespace=True, to_upper=True, pattern=r'^[A-Z]+$')]
760 ```
762 A wrapper around `str` that allows for additional constraints.
764 ```py
765 from pydantic import BaseModel, constr
767 class Foo(BaseModel):
768 bar: constr(strip_whitespace=True, to_upper=True, pattern=r'^[A-Z]+$')
771 foo = Foo(bar=' hello ')
772 print(foo)
773 #> bar='HELLO'
774 ```
776 Args:
777 strip_whitespace: Whether to remove leading and trailing whitespace.
778 to_upper: Whether to turn all characters to uppercase.
779 to_lower: Whether to turn all characters to lowercase.
780 strict: Whether to validate the string in strict mode.
781 min_length: The minimum length of the string.
782 max_length: The maximum length of the string.
783 pattern: A regex pattern to validate the string against.
785 Returns:
786 The wrapped string type.
787 """ # noqa: D212
788 return Annotated[ # pyright: ignore[reportReturnType] 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
789 str,
790 StringConstraints(
791 strip_whitespace=strip_whitespace,
792 to_upper=to_upper,
793 to_lower=to_lower,
794 strict=strict,
795 min_length=min_length,
796 max_length=max_length,
797 pattern=pattern,
798 ),
799 ]
802StrictStr = Annotated[str, Strict()] 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
803"""A string that must be validated in strict mode.""" 1bcdefgahijklmGHIJKLMnopqrs
806# ~~~~~~~~~~~~~~~~~~~~~~~~~~~ COLLECTION TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
807HashableItemType = TypeVar('HashableItemType', bound=Hashable) 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
810def conset( 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
811 item_type: type[HashableItemType], *, min_length: int | None = None, max_length: int | None = None
812) -> type[set[HashableItemType]]:
813 """A wrapper around `typing.Set` that allows for additional constraints.
815 Args:
816 item_type: The type of the items in the set.
817 min_length: The minimum length of the set.
818 max_length: The maximum length of the set.
820 Returns:
821 The wrapped set type.
822 """
823 return Annotated[Set[item_type], annotated_types.Len(min_length or 0, max_length)] # pyright: ignore[reportReturnType] 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
826def confrozenset( 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
827 item_type: type[HashableItemType], *, min_length: int | None = None, max_length: int | None = None
828) -> type[frozenset[HashableItemType]]:
829 """A wrapper around `typing.FrozenSet` that allows for additional constraints.
831 Args:
832 item_type: The type of the items in the frozenset.
833 min_length: The minimum length of the frozenset.
834 max_length: The maximum length of the frozenset.
836 Returns:
837 The wrapped frozenset type.
838 """
839 return Annotated[FrozenSet[item_type], annotated_types.Len(min_length or 0, max_length)] # pyright: ignore[reportReturnType] 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
842AnyItemType = TypeVar('AnyItemType') 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
845def conlist( 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
846 item_type: type[AnyItemType],
847 *,
848 min_length: int | None = None,
849 max_length: int | None = None,
850 unique_items: bool | None = None,
851) -> type[list[AnyItemType]]:
852 """A wrapper around typing.List that adds validation.
854 Args:
855 item_type: The type of the items in the list.
856 min_length: The minimum length of the list. Defaults to None.
857 max_length: The maximum length of the list. Defaults to None.
858 unique_items: Whether the items in the list must be unique. Defaults to None.
859 !!! warning Deprecated
860 The `unique_items` parameter is deprecated, use `Set` instead.
861 See [this issue](https://github.com/pydantic/pydantic-core/issues/296) for more details.
863 Returns:
864 The wrapped list type.
865 """
866 if unique_items is not None: 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
867 raise PydanticUserError( 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
868 (
869 '`unique_items` is removed, use `Set` instead'
870 '(this feature is discussed in https://github.com/pydantic/pydantic-core/issues/296)'
871 ),
872 code='removed-kwargs',
873 )
874 return Annotated[List[item_type], annotated_types.Len(min_length or 0, max_length)] # pyright: ignore[reportReturnType] 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
877# ~~~~~~~~~~~~~~~~~~~~~~~~~~ IMPORT STRING TYPE ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
879AnyType = TypeVar('AnyType') 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
880if TYPE_CHECKING: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
881 ImportString = Annotated[AnyType, ...] 1a
882else:
884 class ImportString: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
885 """A type that can be used to import a type from a string.
887 `ImportString` expects a string and loads the Python object importable at that dotted path.
888 Attributes of modules may be separated from the module by `:` or `.`, e.g. if `'math:cos'` was provided,
889 the resulting field value would be the function`cos`. If a `.` is used and both an attribute and submodule
890 are present at the same path, the module will be preferred.
892 On model instantiation, pointers will be evaluated and imported. There is
893 some nuance to this behavior, demonstrated in the examples below.
895 **Good behavior:**
896 ```py
897 from math import cos
899 from pydantic import BaseModel, Field, ImportString, ValidationError
902 class ImportThings(BaseModel):
903 obj: ImportString
906 # A string value will cause an automatic import
907 my_cos = ImportThings(obj='math.cos')
909 # You can use the imported function as you would expect
910 cos_of_0 = my_cos.obj(0)
911 assert cos_of_0 == 1
914 # A string whose value cannot be imported will raise an error
915 try:
916 ImportThings(obj='foo.bar')
917 except ValidationError as e:
918 print(e)
919 '''
920 1 validation error for ImportThings
921 obj
922 Invalid python path: No module named 'foo.bar' [type=import_error, input_value='foo.bar', input_type=str]
923 '''
926 # Actual python objects can be assigned as well
927 my_cos = ImportThings(obj=cos)
928 my_cos_2 = ImportThings(obj='math.cos')
929 my_cos_3 = ImportThings(obj='math:cos')
930 assert my_cos == my_cos_2 == my_cos_3
933 # You can set default field value either as Python object:
934 class ImportThingsDefaultPyObj(BaseModel):
935 obj: ImportString = math.cos
938 # or as a string value (but only if used with `validate_default=True`)
939 class ImportThingsDefaultString(BaseModel):
940 obj: ImportString = Field(default='math.cos', validate_default=True)
943 my_cos_default1 = ImportThingsDefaultPyObj()
944 my_cos_default2 = ImportThingsDefaultString()
945 assert my_cos_default1.obj == my_cos_default2.obj == math.cos
948 # note: this will not work!
949 class ImportThingsMissingValidateDefault(BaseModel):
950 obj: ImportString = 'math.cos'
952 my_cos_default3 = ImportThingsMissingValidateDefault()
953 assert my_cos_default3.obj == 'math.cos' # just string, not evaluated
954 ```
956 Serializing an `ImportString` type to json is also possible.
958 ```py
959 from pydantic import BaseModel, ImportString
962 class ImportThings(BaseModel):
963 obj: ImportString
966 # Create an instance
967 m = ImportThings(obj='math.cos')
968 print(m)
969 #> obj=<built-in function cos>
970 print(m.model_dump_json())
971 #> {"obj":"math.cos"}
972 ```
973 """
975 @classmethod 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
976 def __class_getitem__(cls, item: AnyType) -> AnyType: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
977 return Annotated[item, cls()] 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
979 @classmethod 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
980 def __get_pydantic_core_schema__( 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
981 cls, source: type[Any], handler: GetCoreSchemaHandler
982 ) -> core_schema.CoreSchema:
983 serializer = core_schema.plain_serializer_function_ser_schema(cls._serialize, when_used='json') 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
984 if cls is source: 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
985 # Treat bare usage of ImportString (`schema is None`) as the same as ImportString[Any]
986 return core_schema.no_info_plain_validator_function( 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
987 function=_validators.import_string, serialization=serializer
988 )
989 else:
990 return core_schema.no_info_before_validator_function( 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
991 function=_validators.import_string, schema=handler(source), serialization=serializer
992 )
994 @classmethod 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
995 def __get_pydantic_json_schema__(cls, cs: CoreSchema, handler: GetJsonSchemaHandler) -> JsonSchemaValue: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
996 return handler(core_schema.str_schema()) 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
998 @staticmethod 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
999 def _serialize(v: Any) -> str: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1000 if isinstance(v, ModuleType): 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1001 return v.__name__ 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1002 elif hasattr(v, '__module__') and hasattr(v, '__name__'): 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1003 return f'{v.__module__}.{v.__name__}' 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1004 else:
1005 return v 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1007 def __repr__(self) -> str: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1008 return 'ImportString' 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1011# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ DECIMAL TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
1014def condecimal( 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1015 *,
1016 strict: bool | None = None,
1017 gt: int | Decimal | None = None,
1018 ge: int | Decimal | None = None,
1019 lt: int | Decimal | None = None,
1020 le: int | Decimal | None = None,
1021 multiple_of: int | Decimal | None = None,
1022 max_digits: int | None = None,
1023 decimal_places: int | None = None,
1024 allow_inf_nan: bool | None = None,
1025) -> type[Decimal]:
1026 """
1027 !!! warning "Discouraged"
1028 This function is **discouraged** in favor of using
1029 [`Annotated`](https://docs.python.org/3/library/typing.html#typing.Annotated) with
1030 [`Field`][pydantic.fields.Field] instead.
1032 This function will be **deprecated** in Pydantic 3.0.
1034 The reason is that `condecimal` returns a type, which doesn't play well with static analysis tools.
1036 === ":x: Don't do this"
1037 ```py
1038 from pydantic import BaseModel, condecimal
1040 class Foo(BaseModel):
1041 bar: condecimal(strict=True, allow_inf_nan=True)
1042 ```
1044 === ":white_check_mark: Do this"
1045 ```py
1046 from decimal import Decimal
1048 from typing_extensions import Annotated
1050 from pydantic import BaseModel, Field
1052 class Foo(BaseModel):
1053 bar: Annotated[Decimal, Field(strict=True, allow_inf_nan=True)]
1054 ```
1056 A wrapper around Decimal that adds validation.
1058 Args:
1059 strict: Whether to validate the value in strict mode. Defaults to `None`.
1060 gt: The value must be greater than this. Defaults to `None`.
1061 ge: The value must be greater than or equal to this. Defaults to `None`.
1062 lt: The value must be less than this. Defaults to `None`.
1063 le: The value must be less than or equal to this. Defaults to `None`.
1064 multiple_of: The value must be a multiple of this. Defaults to `None`.
1065 max_digits: The maximum number of digits. Defaults to `None`.
1066 decimal_places: The number of decimal places. Defaults to `None`.
1067 allow_inf_nan: Whether to allow infinity and NaN. Defaults to `None`.
1069 ```py
1070 from decimal import Decimal
1072 from pydantic import BaseModel, ValidationError, condecimal
1074 class ConstrainedExample(BaseModel):
1075 constrained_decimal: condecimal(gt=Decimal('1.0'))
1077 m = ConstrainedExample(constrained_decimal=Decimal('1.1'))
1078 print(repr(m))
1079 #> ConstrainedExample(constrained_decimal=Decimal('1.1'))
1081 try:
1082 ConstrainedExample(constrained_decimal=Decimal('0.9'))
1083 except ValidationError as e:
1084 print(e.errors())
1085 '''
1086 [
1087 {
1088 'type': 'greater_than',
1089 'loc': ('constrained_decimal',),
1090 'msg': 'Input should be greater than 1.0',
1091 'input': Decimal('0.9'),
1092 'ctx': {'gt': Decimal('1.0')},
1093 'url': 'https://errors.pydantic.dev/2/v/greater_than',
1094 }
1095 ]
1096 '''
1097 ```
1098 """ # noqa: D212
1099 return Annotated[ # pyright: ignore[reportReturnType] 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1100 Decimal,
1101 Strict(strict) if strict is not None else None,
1102 annotated_types.Interval(gt=gt, ge=ge, lt=lt, le=le),
1103 annotated_types.MultipleOf(multiple_of) if multiple_of is not None else None,
1104 _fields.pydantic_general_metadata(max_digits=max_digits, decimal_places=decimal_places),
1105 AllowInfNan(allow_inf_nan) if allow_inf_nan is not None else None,
1106 ]
1109# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ UUID TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
1112@_dataclasses.dataclass(**_internal_dataclass.slots_true) 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1113class UuidVersion: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1114 """A field metadata class to indicate a [UUID](https://docs.python.org/3/library/uuid.html) version."""
1116 uuid_version: Literal[1, 3, 4, 5] 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1118 def __get_pydantic_json_schema__( 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1119 self, core_schema: core_schema.CoreSchema, handler: GetJsonSchemaHandler
1120 ) -> JsonSchemaValue:
1121 field_schema = handler(core_schema) 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1122 field_schema.pop('anyOf', None) # remove the bytes/str union 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1123 field_schema.update(type='string', format=f'uuid{self.uuid_version}') 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1124 return field_schema 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1126 def __get_pydantic_core_schema__(self, source: Any, handler: GetCoreSchemaHandler) -> core_schema.CoreSchema: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1127 if isinstance(self, source): 1127 ↛ 1129line 1127 didn't jump to line 1129, because the condition on line 1127 was never true1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1128 # used directly as a type
1129 return core_schema.uuid_schema(version=self.uuid_version)
1130 else:
1131 # update existing schema with self.uuid_version
1132 schema = handler(source) 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1133 _check_annotated_type(schema['type'], 'uuid', self.__class__.__name__) 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1134 schema['version'] = self.uuid_version # type: ignore 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1135 return schema 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1137 def __hash__(self) -> int: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1138 return hash(type(self.uuid_version)) 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1141UUID1 = Annotated[UUID, UuidVersion(1)] 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1142"""A [UUID](https://docs.python.org/3/library/uuid.html) that must be version 1. 1bcdefgahijklmGHIJKLMnopqrs
1144```py
1145import uuid
1147from pydantic import UUID1, BaseModel
1149class Model(BaseModel):
1150 uuid1: UUID1
1152Model(uuid1=uuid.uuid1())
1153```
1154"""
1155UUID3 = Annotated[UUID, UuidVersion(3)] 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1156"""A [UUID](https://docs.python.org/3/library/uuid.html) that must be version 3. 1bcdefgahijklmGHIJKLMnopqrs
1158```py
1159import uuid
1161from pydantic import UUID3, BaseModel
1163class Model(BaseModel):
1164 uuid3: UUID3
1166Model(uuid3=uuid.uuid3(uuid.NAMESPACE_DNS, 'pydantic.org'))
1167```
1168"""
1169UUID4 = Annotated[UUID, UuidVersion(4)] 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1170"""A [UUID](https://docs.python.org/3/library/uuid.html) that must be version 4. 1bcdefgahijklmGHIJKLMnopqrs
1172```py
1173import uuid
1175from pydantic import UUID4, BaseModel
1177class Model(BaseModel):
1178 uuid4: UUID4
1180Model(uuid4=uuid.uuid4())
1181```
1182"""
1183UUID5 = Annotated[UUID, UuidVersion(5)] 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1184"""A [UUID](https://docs.python.org/3/library/uuid.html) that must be version 5. 1bcdefgahijklmGHIJKLMnopqrs
1186```py
1187import uuid
1189from pydantic import UUID5, BaseModel
1191class Model(BaseModel):
1192 uuid5: UUID5
1194Model(uuid5=uuid.uuid5(uuid.NAMESPACE_DNS, 'pydantic.org'))
1195```
1196"""
1199# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ PATH TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
1202@_dataclasses.dataclass 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1203class PathType: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1204 path_type: Literal['file', 'dir', 'new'] 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1206 def __get_pydantic_json_schema__( 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1207 self, core_schema: core_schema.CoreSchema, handler: GetJsonSchemaHandler
1208 ) -> JsonSchemaValue:
1209 field_schema = handler(core_schema) 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1210 format_conversion = {'file': 'file-path', 'dir': 'directory-path'} 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1211 field_schema.update(format=format_conversion.get(self.path_type, 'path'), type='string') 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1212 return field_schema 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1214 def __get_pydantic_core_schema__(self, source: Any, handler: GetCoreSchemaHandler) -> core_schema.CoreSchema: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1215 function_lookup = { 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1216 'file': cast(core_schema.WithInfoValidatorFunction, self.validate_file),
1217 'dir': cast(core_schema.WithInfoValidatorFunction, self.validate_directory),
1218 'new': cast(core_schema.WithInfoValidatorFunction, self.validate_new),
1219 }
1221 return core_schema.with_info_after_validator_function( 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1222 function_lookup[self.path_type],
1223 handler(source),
1224 )
1226 @staticmethod 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1227 def validate_file(path: Path, _: core_schema.ValidationInfo) -> Path: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1228 if path.is_file(): 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1229 return path 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1230 else:
1231 raise PydanticCustomError('path_not_file', 'Path does not point to a file') 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1233 @staticmethod 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1234 def validate_directory(path: Path, _: core_schema.ValidationInfo) -> Path: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1235 if path.is_dir(): 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1236 return path 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1237 else:
1238 raise PydanticCustomError('path_not_directory', 'Path does not point to a directory') 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1240 @staticmethod 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1241 def validate_new(path: Path, _: core_schema.ValidationInfo) -> Path: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1242 if path.exists(): 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1243 raise PydanticCustomError('path_exists', 'Path already exists') 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1244 elif not path.parent.exists(): 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1245 raise PydanticCustomError('parent_does_not_exist', 'Parent directory does not exist') 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1246 else:
1247 return path 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1249 def __hash__(self) -> int: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1250 return hash(type(self.path_type)) 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1253FilePath = Annotated[Path, PathType('file')] 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1254"""A path that must point to a file. 1bcdefgahijklmGHIJKLMnopqrs
1256```py
1257from pathlib import Path
1259from pydantic import BaseModel, FilePath, ValidationError
1261class Model(BaseModel):
1262 f: FilePath
1264path = Path('text.txt')
1265path.touch()
1266m = Model(f='text.txt')
1267print(m.model_dump())
1268#> {'f': PosixPath('text.txt')}
1269path.unlink()
1271path = Path('directory')
1272path.mkdir(exist_ok=True)
1273try:
1274 Model(f='directory') # directory
1275except ValidationError as e:
1276 print(e)
1277 '''
1278 1 validation error for Model
1279 f
1280 Path does not point to a file [type=path_not_file, input_value='directory', input_type=str]
1281 '''
1282path.rmdir()
1284try:
1285 Model(f='not-exists-file')
1286except ValidationError as e:
1287 print(e)
1288 '''
1289 1 validation error for Model
1290 f
1291 Path does not point to a file [type=path_not_file, input_value='not-exists-file', input_type=str]
1292 '''
1293```
1294"""
1295DirectoryPath = Annotated[Path, PathType('dir')] 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1296"""A path that must point to a directory. 1bcdefgahijklmGHIJKLMnopqrs
1298```py
1299from pathlib import Path
1301from pydantic import BaseModel, DirectoryPath, ValidationError
1303class Model(BaseModel):
1304 f: DirectoryPath
1306path = Path('directory/')
1307path.mkdir()
1308m = Model(f='directory/')
1309print(m.model_dump())
1310#> {'f': PosixPath('directory')}
1311path.rmdir()
1313path = Path('file.txt')
1314path.touch()
1315try:
1316 Model(f='file.txt') # file
1317except ValidationError as e:
1318 print(e)
1319 '''
1320 1 validation error for Model
1321 f
1322 Path does not point to a directory [type=path_not_directory, input_value='file.txt', input_type=str]
1323 '''
1324path.unlink()
1326try:
1327 Model(f='not-exists-directory')
1328except ValidationError as e:
1329 print(e)
1330 '''
1331 1 validation error for Model
1332 f
1333 Path does not point to a directory [type=path_not_directory, input_value='not-exists-directory', input_type=str]
1334 '''
1335```
1336"""
1337NewPath = Annotated[Path, PathType('new')] 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1338"""A path for a new file or directory that must not already exist. The parent directory must already exist.""" 1bcdefgahijklmGHIJKLMnopqrs
1341# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ JSON TYPE ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
1343if TYPE_CHECKING: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1344 # Json[list[str]] will be recognized by type checkers as list[str]
1345 Json = Annotated[AnyType, ...]
1347else:
1349 class Json: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1350 """A special type wrapper which loads JSON before parsing.
1352 You can use the `Json` data type to make Pydantic first load a raw JSON string before
1353 validating the loaded data into the parametrized type:
1355 ```py
1356 from typing import Any, List
1358 from pydantic import BaseModel, Json, ValidationError
1361 class AnyJsonModel(BaseModel):
1362 json_obj: Json[Any]
1365 class ConstrainedJsonModel(BaseModel):
1366 json_obj: Json[List[int]]
1369 print(AnyJsonModel(json_obj='{"b": 1}'))
1370 #> json_obj={'b': 1}
1371 print(ConstrainedJsonModel(json_obj='[1, 2, 3]'))
1372 #> json_obj=[1, 2, 3]
1374 try:
1375 ConstrainedJsonModel(json_obj=12)
1376 except ValidationError as e:
1377 print(e)
1378 '''
1379 1 validation error for ConstrainedJsonModel
1380 json_obj
1381 JSON input should be string, bytes or bytearray [type=json_type, input_value=12, input_type=int]
1382 '''
1384 try:
1385 ConstrainedJsonModel(json_obj='[a, b]')
1386 except ValidationError as e:
1387 print(e)
1388 '''
1389 1 validation error for ConstrainedJsonModel
1390 json_obj
1391 Invalid JSON: expected value at line 1 column 2 [type=json_invalid, input_value='[a, b]', input_type=str]
1392 '''
1394 try:
1395 ConstrainedJsonModel(json_obj='["a", "b"]')
1396 except ValidationError as e:
1397 print(e)
1398 '''
1399 2 validation errors for ConstrainedJsonModel
1400 json_obj.0
1401 Input should be a valid integer, unable to parse string as an integer [type=int_parsing, input_value='a', input_type=str]
1402 json_obj.1
1403 Input should be a valid integer, unable to parse string as an integer [type=int_parsing, input_value='b', input_type=str]
1404 '''
1405 ```
1407 When you dump the model using `model_dump` or `model_dump_json`, the dumped value will be the result of validation,
1408 not the original JSON string. However, you can use the argument `round_trip=True` to get the original JSON string back:
1410 ```py
1411 from typing import List
1413 from pydantic import BaseModel, Json
1416 class ConstrainedJsonModel(BaseModel):
1417 json_obj: Json[List[int]]
1420 print(ConstrainedJsonModel(json_obj='[1, 2, 3]').model_dump_json())
1421 #> {"json_obj":[1,2,3]}
1422 print(
1423 ConstrainedJsonModel(json_obj='[1, 2, 3]').model_dump_json(round_trip=True)
1424 )
1425 #> {"json_obj":"[1,2,3]"}
1426 ```
1427 """
1429 @classmethod 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1430 def __class_getitem__(cls, item: AnyType) -> AnyType: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1431 return Annotated[item, cls()] 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1433 @classmethod 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1434 def __get_pydantic_core_schema__(cls, source: Any, handler: GetCoreSchemaHandler) -> core_schema.CoreSchema: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1435 if cls is source: 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1436 return core_schema.json_schema(None) 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1437 else:
1438 return core_schema.json_schema(handler(source)) 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1440 def __repr__(self) -> str: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1441 return 'Json' 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1443 def __hash__(self) -> int: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1444 return hash(type(self)) 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1446 def __eq__(self, other: Any) -> bool: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1447 return type(other) == type(self) 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1450# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ SECRET TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
1452SecretType = TypeVar('SecretType') 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1455class _SecretBase(Generic[SecretType]): 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1456 def __init__(self, secret_value: SecretType) -> None: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1457 self._secret_value: SecretType = secret_value 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1459 def get_secret_value(self) -> SecretType: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1460 """Get the secret value.
1462 Returns:
1463 The secret value.
1464 """
1465 return self._secret_value 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1467 def __eq__(self, other: Any) -> bool: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1468 return isinstance(other, self.__class__) and self.get_secret_value() == other.get_secret_value() 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1470 def __hash__(self) -> int: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1471 return hash(self.get_secret_value()) 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1473 def __str__(self) -> str: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1474 return str(self._display()) 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1476 def __repr__(self) -> str: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1477 return f'{self.__class__.__name__}({self._display()!r})' 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1479 def _display(self) -> str | bytes: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1480 raise NotImplementedError
1483class Secret(_SecretBase[SecretType]): 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1484 """A generic base class used for defining a field with sensitive information that you do not want to be visible in logging or tracebacks.
1486 You may either directly parametrize `Secret` with a type, or subclass from `Secret` with a parametrized type. The benefit of subclassing
1487 is that you can define a custom `_display` method, which will be used for `repr()` and `str()` methods. The examples below demonstrate both
1488 ways of using `Secret` to create a new secret type.
1490 1. Directly parametrizing `Secret` with a type:
1492 ```py
1493 from pydantic import BaseModel, Secret
1495 SecretBool = Secret[bool]
1497 class Model(BaseModel):
1498 secret_bool: SecretBool
1500 m = Model(secret_bool=True)
1501 print(m.model_dump())
1502 #> {'secret_bool': Secret('**********')}
1504 print(m.model_dump_json())
1505 #> {"secret_bool":"**********"}
1507 print(m.secret_bool.get_secret_value())
1508 #> True
1509 ```
1511 2. Subclassing from parametrized `Secret`:
1513 ```py
1514 from datetime import date
1516 from pydantic import BaseModel, Secret
1518 class SecretDate(Secret[date]):
1519 def _display(self) -> str:
1520 return '****/**/**'
1522 class Model(BaseModel):
1523 secret_date: SecretDate
1525 m = Model(secret_date=date(2022, 1, 1))
1526 print(m.model_dump())
1527 #> {'secret_date': SecretDate('****/**/**')}
1529 print(m.model_dump_json())
1530 #> {"secret_date":"****/**/**"}
1532 print(m.secret_date.get_secret_value())
1533 #> 2022-01-01
1534 ```
1536 The value returned by the `_display` method will be used for `repr()` and `str()`.
1537 """
1539 def _display(self) -> str | bytes: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1540 return '**********' if self.get_secret_value() else '' 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1542 @classmethod 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1543 def __get_pydantic_core_schema__(cls, source: type[Any], handler: GetCoreSchemaHandler) -> core_schema.CoreSchema: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1544 inner_type = None 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1545 # if origin_type is Secret, then cls is a GenericAlias, and we can extract the inner type directly
1546 origin_type = get_origin(source) 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1547 if origin_type is not None: 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1548 inner_type = get_args(source)[0] 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1549 # otherwise, we need to get the inner type from the base class
1550 else:
1551 bases = getattr(cls, '__orig_bases__', getattr(cls, '__bases__', [])) 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1552 for base in bases: 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1553 if get_origin(base) is Secret: 1553 ↛ 1552line 1553 didn't jump to line 1552, because the condition on line 1553 was always true1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1554 inner_type = get_args(base)[0] 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1555 if bases == [] or inner_type is None: 1555 ↛ 1556line 1555 didn't jump to line 1556, because the condition on line 1555 was never true1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1556 raise TypeError(
1557 f"Can't get secret type from {cls.__name__}. "
1558 'Please use Secret[<type>], or subclass from Secret[<type>] instead.'
1559 )
1561 inner_schema = handler.generate_schema(inner_type) # type: ignore 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1563 def validate_secret_value(value, handler) -> Secret[SecretType]: 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1564 if isinstance(value, Secret): 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1565 value = value.get_secret_value() 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1566 validated_inner = handler(value) 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1567 return cls(validated_inner) 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1569 def serialize(value: Secret[SecretType], info: core_schema.SerializationInfo) -> str | Secret[SecretType]: 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1570 if info.mode == 'json': 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1571 return str(value) 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1572 else:
1573 return value 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1575 return core_schema.json_or_python_schema( 1575 ↛ exitline 1575 didn't jump to the function exit1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1576 python_schema=core_schema.no_info_wrap_validator_function(
1577 validate_secret_value,
1578 inner_schema,
1579 ),
1580 json_schema=core_schema.no_info_after_validator_function(lambda x: cls(x), inner_schema),
1581 serialization=core_schema.plain_serializer_function_ser_schema(
1582 serialize,
1583 info_arg=True,
1584 when_used='always',
1585 ),
1586 )
1589def _secret_display(value: SecretType) -> str: # type: ignore 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1590 return '**********' if value else '' 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1593class _SecretField(_SecretBase[SecretType]): 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1594 _inner_schema: ClassVar[CoreSchema] 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1595 _error_kind: ClassVar[str] 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1597 @classmethod 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1598 def __get_pydantic_core_schema__(cls, source: type[Any], handler: GetCoreSchemaHandler) -> core_schema.CoreSchema: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1599 def serialize( 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1600 value: _SecretField[SecretType], info: core_schema.SerializationInfo
1601 ) -> str | _SecretField[SecretType]:
1602 if info.mode == 'json': 1602 ↛ 1607line 1602 didn't jump to line 1607, because the condition on line 1602 was always true1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1603 # we want the output to always be string without the `b'` prefix for bytes,
1604 # hence we just use `secret_display`
1605 return _secret_display(value.get_secret_value()) 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1606 else:
1607 return value
1609 def get_json_schema(_core_schema: core_schema.CoreSchema, handler: GetJsonSchemaHandler) -> JsonSchemaValue: 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1610 json_schema = handler(cls._inner_schema) 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1611 _utils.update_not_none( 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1612 json_schema,
1613 type='string',
1614 writeOnly=True,
1615 format='password',
1616 )
1617 return json_schema 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1619 json_schema = core_schema.no_info_after_validator_function( 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1620 source, # construct the type
1621 cls._inner_schema,
1622 )
1624 def get_secret_schema(strict: bool) -> CoreSchema: 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1625 return core_schema.json_or_python_schema( 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1626 python_schema=core_schema.union_schema(
1627 [
1628 core_schema.is_instance_schema(source),
1629 json_schema,
1630 ],
1631 custom_error_type=cls._error_kind,
1632 strict=strict,
1633 ),
1634 json_schema=json_schema,
1635 serialization=core_schema.plain_serializer_function_ser_schema(
1636 serialize,
1637 info_arg=True,
1638 return_schema=core_schema.str_schema(),
1639 when_used='json',
1640 ),
1641 )
1643 return core_schema.lax_or_strict_schema( 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1644 lax_schema=get_secret_schema(strict=False),
1645 strict_schema=get_secret_schema(strict=True),
1646 metadata={'pydantic_js_functions': [get_json_schema]},
1647 )
1650class SecretStr(_SecretField[str]): 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1651 """A string used for storing sensitive information that you do not want to be visible in logging or tracebacks.
1653 When the secret value is nonempty, it is displayed as `'**********'` instead of the underlying value in
1654 calls to `repr()` and `str()`. If the value _is_ empty, it is displayed as `''`.
1656 ```py
1657 from pydantic import BaseModel, SecretStr
1659 class User(BaseModel):
1660 username: str
1661 password: SecretStr
1663 user = User(username='scolvin', password='password1')
1665 print(user)
1666 #> username='scolvin' password=SecretStr('**********')
1667 print(user.password.get_secret_value())
1668 #> password1
1669 print((SecretStr('password'), SecretStr('')))
1670 #> (SecretStr('**********'), SecretStr(''))
1671 ```
1672 """
1674 _inner_schema: ClassVar[CoreSchema] = core_schema.str_schema() 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1675 _error_kind: ClassVar[str] = 'string_type' 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1677 def __len__(self) -> int: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1678 return len(self._secret_value) 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1680 def _display(self) -> str: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1681 return _secret_display(self._secret_value) 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1684class SecretBytes(_SecretField[bytes]): 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1685 """A bytes used for storing sensitive information that you do not want to be visible in logging or tracebacks.
1687 It displays `b'**********'` instead of the string value on `repr()` and `str()` calls.
1688 When the secret value is nonempty, it is displayed as `b'**********'` instead of the underlying value in
1689 calls to `repr()` and `str()`. If the value _is_ empty, it is displayed as `b''`.
1691 ```py
1692 from pydantic import BaseModel, SecretBytes
1694 class User(BaseModel):
1695 username: str
1696 password: SecretBytes
1698 user = User(username='scolvin', password=b'password1')
1699 #> username='scolvin' password=SecretBytes(b'**********')
1700 print(user.password.get_secret_value())
1701 #> b'password1'
1702 print((SecretBytes(b'password'), SecretBytes(b'')))
1703 #> (SecretBytes(b'**********'), SecretBytes(b''))
1704 ```
1705 """
1707 _inner_schema: ClassVar[CoreSchema] = core_schema.bytes_schema() 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1708 _error_kind: ClassVar[str] = 'bytes_type' 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1710 def __len__(self) -> int: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1711 return len(self._secret_value) 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1713 def _display(self) -> bytes: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1714 return _secret_display(self._secret_value).encode() 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1717# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ PAYMENT CARD TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
1720class PaymentCardBrand(str, Enum): 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1721 amex = 'American Express' 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1722 mastercard = 'Mastercard' 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1723 visa = 'Visa' 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1724 other = 'other' 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1726 def __str__(self) -> str: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1727 return self.value 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1730@deprecated( 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1731 'The `PaymentCardNumber` class is deprecated, use `pydantic_extra_types` instead. '
1732 'See https://docs.pydantic.dev/latest/api/pydantic_extra_types_payment/#pydantic_extra_types.payment.PaymentCardNumber.',
1733 category=PydanticDeprecatedSince20,
1734)
1735class PaymentCardNumber(str): 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1736 """Based on: https://en.wikipedia.org/wiki/Payment_card_number."""
1738 strip_whitespace: ClassVar[bool] = True 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1739 min_length: ClassVar[int] = 12 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1740 max_length: ClassVar[int] = 19 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1741 bin: str 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1742 last4: str 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1743 brand: PaymentCardBrand 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1745 def __init__(self, card_number: str): 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1746 self.validate_digits(card_number) 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1748 card_number = self.validate_luhn_check_digit(card_number) 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1750 self.bin = card_number[:6] 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1751 self.last4 = card_number[-4:] 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1752 self.brand = self.validate_brand(card_number) 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1754 @classmethod 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1755 def __get_pydantic_core_schema__(cls, source: type[Any], handler: GetCoreSchemaHandler) -> core_schema.CoreSchema: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1756 return core_schema.with_info_after_validator_function( 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1757 cls.validate,
1758 core_schema.str_schema(
1759 min_length=cls.min_length, max_length=cls.max_length, strip_whitespace=cls.strip_whitespace
1760 ),
1761 )
1763 @classmethod 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1764 def validate(cls, input_value: str, /, _: core_schema.ValidationInfo) -> PaymentCardNumber: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1765 """Validate the card number and return a `PaymentCardNumber` instance."""
1766 return cls(input_value) 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1768 @property 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1769 def masked(self) -> str: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1770 """Mask all but the last 4 digits of the card number.
1772 Returns:
1773 A masked card number string.
1774 """
1775 num_masked = len(self) - 10 # len(bin) + len(last4) == 10 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1776 return f'{self.bin}{"*" * num_masked}{self.last4}' 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1778 @classmethod 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1779 def validate_digits(cls, card_number: str) -> None: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1780 """Validate that the card number is all digits."""
1781 if not card_number.isdigit(): 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1782 raise PydanticCustomError('payment_card_number_digits', 'Card number is not all digits') 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1784 @classmethod 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1785 def validate_luhn_check_digit(cls, card_number: str) -> str: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1786 """Based on: https://en.wikipedia.org/wiki/Luhn_algorithm."""
1787 sum_ = int(card_number[-1]) 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1788 length = len(card_number) 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1789 parity = length % 2 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1790 for i in range(length - 1): 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1791 digit = int(card_number[i]) 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1792 if i % 2 == parity: 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1793 digit *= 2 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1794 if digit > 9: 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1795 digit -= 9 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1796 sum_ += digit 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1797 valid = sum_ % 10 == 0 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1798 if not valid: 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1799 raise PydanticCustomError('payment_card_number_luhn', 'Card number is not luhn valid') 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1800 return card_number 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1802 @staticmethod 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1803 def validate_brand(card_number: str) -> PaymentCardBrand: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1804 """Validate length based on BIN for major brands:
1805 https://en.wikipedia.org/wiki/Payment_card_number#Issuer_identification_number_(IIN).
1806 """
1807 if card_number[0] == '4': 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1808 brand = PaymentCardBrand.visa 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1809 elif 51 <= int(card_number[:2]) <= 55: 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1810 brand = PaymentCardBrand.mastercard 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1811 elif card_number[:2] in {'34', '37'}: 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1812 brand = PaymentCardBrand.amex 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1813 else:
1814 brand = PaymentCardBrand.other 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1816 required_length: None | int | str = None 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1817 if brand in PaymentCardBrand.mastercard: 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1818 required_length = 16 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1819 valid = len(card_number) == required_length 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1820 elif brand == PaymentCardBrand.visa: 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1821 required_length = '13, 16 or 19' 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1822 valid = len(card_number) in {13, 16, 19} 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1823 elif brand == PaymentCardBrand.amex: 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1824 required_length = 15 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1825 valid = len(card_number) == required_length 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1826 else:
1827 valid = True 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1829 if not valid: 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1830 raise PydanticCustomError( 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1831 'payment_card_number_brand',
1832 'Length for a {brand} card must be {required_length}',
1833 {'brand': brand, 'required_length': required_length},
1834 )
1835 return brand 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1838# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ BYTE SIZE TYPE ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
1841class ByteSize(int): 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1842 """Converts a string representing a number of bytes with units (such as `'1KB'` or `'11.5MiB'`) into an integer.
1844 You can use the `ByteSize` data type to (case-insensitively) convert a string representation of a number of bytes into
1845 an integer, and also to print out human-readable strings representing a number of bytes.
1847 In conformance with [IEC 80000-13 Standard](https://en.wikipedia.org/wiki/ISO/IEC_80000) we interpret `'1KB'` to mean 1000 bytes,
1848 and `'1KiB'` to mean 1024 bytes. In general, including a middle `'i'` will cause the unit to be interpreted as a power of 2,
1849 rather than a power of 10 (so, for example, `'1 MB'` is treated as `1_000_000` bytes, whereas `'1 MiB'` is treated as `1_048_576` bytes).
1851 !!! info
1852 Note that `1b` will be parsed as "1 byte" and not "1 bit".
1854 ```py
1855 from pydantic import BaseModel, ByteSize
1857 class MyModel(BaseModel):
1858 size: ByteSize
1860 print(MyModel(size=52000).size)
1861 #> 52000
1862 print(MyModel(size='3000 KiB').size)
1863 #> 3072000
1865 m = MyModel(size='50 PB')
1866 print(m.size.human_readable())
1867 #> 44.4PiB
1868 print(m.size.human_readable(decimal=True))
1869 #> 50.0PB
1870 print(m.size.human_readable(separator=' '))
1871 #> 44.4 PiB
1873 print(m.size.to('TiB'))
1874 #> 45474.73508864641
1875 ```
1876 """
1878 byte_sizes = { 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1879 'b': 1,
1880 'kb': 10**3,
1881 'mb': 10**6,
1882 'gb': 10**9,
1883 'tb': 10**12,
1884 'pb': 10**15,
1885 'eb': 10**18,
1886 'kib': 2**10,
1887 'mib': 2**20,
1888 'gib': 2**30,
1889 'tib': 2**40,
1890 'pib': 2**50,
1891 'eib': 2**60,
1892 'bit': 1 / 8,
1893 'kbit': 10**3 / 8,
1894 'mbit': 10**6 / 8,
1895 'gbit': 10**9 / 8,
1896 'tbit': 10**12 / 8,
1897 'pbit': 10**15 / 8,
1898 'ebit': 10**18 / 8,
1899 'kibit': 2**10 / 8,
1900 'mibit': 2**20 / 8,
1901 'gibit': 2**30 / 8,
1902 'tibit': 2**40 / 8,
1903 'pibit': 2**50 / 8,
1904 'eibit': 2**60 / 8,
1905 }
1906 byte_sizes.update({k.lower()[0]: v for k, v in byte_sizes.items() if 'i' not in k}) 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1908 byte_string_pattern = r'^\s*(\d*\.?\d+)\s*(\w+)?' 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1909 byte_string_re = re.compile(byte_string_pattern, re.IGNORECASE) 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1911 @classmethod 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1912 def __get_pydantic_core_schema__(cls, source: type[Any], handler: GetCoreSchemaHandler) -> core_schema.CoreSchema: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1913 return core_schema.with_info_after_validator_function( 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1914 function=cls._validate,
1915 schema=core_schema.union_schema(
1916 [
1917 core_schema.str_schema(pattern=cls.byte_string_pattern),
1918 core_schema.int_schema(ge=0),
1919 ],
1920 custom_error_type='byte_size',
1921 custom_error_message='could not parse value and unit from byte string',
1922 ),
1923 serialization=core_schema.plain_serializer_function_ser_schema(
1924 int, return_schema=core_schema.int_schema(ge=0)
1925 ),
1926 )
1928 @classmethod 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1929 def _validate(cls, input_value: Any, /, _: core_schema.ValidationInfo) -> ByteSize: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1930 try: 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1931 return cls(int(input_value)) 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1932 except ValueError: 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1933 pass 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1935 str_match = cls.byte_string_re.match(str(input_value)) 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1936 if str_match is None: 1936 ↛ 1937line 1936 didn't jump to line 1937, because the condition on line 1936 was never true1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1937 raise PydanticCustomError('byte_size', 'could not parse value and unit from byte string')
1939 scalar, unit = str_match.groups() 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1940 if unit is None: 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1941 unit = 'b' 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1943 try: 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1944 unit_mult = cls.byte_sizes[unit.lower()] 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1945 except KeyError: 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1946 raise PydanticCustomError('byte_size_unit', 'could not interpret byte unit: {unit}', {'unit': unit}) 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1948 return cls(int(float(scalar) * unit_mult)) 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1950 def human_readable(self, decimal: bool = False, separator: str = '') -> str: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1951 """Converts a byte size to a human readable string.
1953 Args:
1954 decimal: If True, use decimal units (e.g. 1000 bytes per KB). If False, use binary units
1955 (e.g. 1024 bytes per KiB).
1956 separator: A string used to split the value and unit. Defaults to an empty string ('').
1958 Returns:
1959 A human readable string representation of the byte size.
1960 """
1961 if decimal: 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1962 divisor = 1000 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1963 units = 'B', 'KB', 'MB', 'GB', 'TB', 'PB' 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1964 final_unit = 'EB' 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1965 else:
1966 divisor = 1024 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1967 units = 'B', 'KiB', 'MiB', 'GiB', 'TiB', 'PiB' 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1968 final_unit = 'EiB' 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1970 num = float(self) 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1971 for unit in units: 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1972 if abs(num) < divisor: 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1973 if unit == 'B': 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1974 return f'{num:0.0f}{separator}{unit}' 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1975 else:
1976 return f'{num:0.1f}{separator}{unit}' 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1977 num /= divisor 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1979 return f'{num:0.1f}{separator}{final_unit}' 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1981 def to(self, unit: str) -> float: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
1982 """Converts a byte size to another unit, including both byte and bit units.
1984 Args:
1985 unit: The unit to convert to. Must be one of the following: B, KB, MB, GB, TB, PB, EB,
1986 KiB, MiB, GiB, TiB, PiB, EiB (byte units) and
1987 bit, kbit, mbit, gbit, tbit, pbit, ebit,
1988 kibit, mibit, gibit, tibit, pibit, eibit (bit units).
1990 Returns:
1991 The byte size in the new unit.
1992 """
1993 try: 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1994 unit_div = self.byte_sizes[unit.lower()] 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1995 except KeyError: 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1996 raise PydanticCustomError('byte_size_unit', 'Could not interpret byte unit: {unit}', {'unit': unit}) 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
1998 return self / unit_div 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2001# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ DATE TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
2004def _check_annotated_type(annotated_type: str, expected_type: str, annotation: str) -> None: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2005 if annotated_type != expected_type: 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2006 raise PydanticUserError(f"'{annotation}' cannot annotate '{annotated_type}'.", code='invalid_annotated_type') 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2009if TYPE_CHECKING: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2010 PastDate = Annotated[date, ...]
2011 FutureDate = Annotated[date, ...]
2012else:
2014 class PastDate: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2015 """A date in the past."""
2017 @classmethod 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2018 def __get_pydantic_core_schema__( 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2019 cls, source: type[Any], handler: GetCoreSchemaHandler
2020 ) -> core_schema.CoreSchema:
2021 if cls is source: 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2022 # used directly as a type
2023 return core_schema.date_schema(now_op='past') 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2024 else:
2025 schema = handler(source) 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2026 _check_annotated_type(schema['type'], 'date', cls.__name__) 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2027 schema['now_op'] = 'past' 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2028 return schema 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2030 def __repr__(self) -> str: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2031 return 'PastDate' 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2033 class FutureDate: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2034 """A date in the future."""
2036 @classmethod 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2037 def __get_pydantic_core_schema__( 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2038 cls, source: type[Any], handler: GetCoreSchemaHandler
2039 ) -> core_schema.CoreSchema:
2040 if cls is source: 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2041 # used directly as a type
2042 return core_schema.date_schema(now_op='future') 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2043 else:
2044 schema = handler(source) 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2045 _check_annotated_type(schema['type'], 'date', cls.__name__) 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2046 schema['now_op'] = 'future' 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2047 return schema 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2049 def __repr__(self) -> str: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2050 return 'FutureDate' 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2053def condate( 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2054 *,
2055 strict: bool | None = None,
2056 gt: date | None = None,
2057 ge: date | None = None,
2058 lt: date | None = None,
2059 le: date | None = None,
2060) -> type[date]:
2061 """A wrapper for date that adds constraints.
2063 Args:
2064 strict: Whether to validate the date value in strict mode. Defaults to `None`.
2065 gt: The value must be greater than this. Defaults to `None`.
2066 ge: The value must be greater than or equal to this. Defaults to `None`.
2067 lt: The value must be less than this. Defaults to `None`.
2068 le: The value must be less than or equal to this. Defaults to `None`.
2070 Returns:
2071 A date type with the specified constraints.
2072 """
2073 return Annotated[ # pyright: ignore[reportReturnType] 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2074 date,
2075 Strict(strict) if strict is not None else None,
2076 annotated_types.Interval(gt=gt, ge=ge, lt=lt, le=le),
2077 ]
2080# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ DATETIME TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
2082if TYPE_CHECKING: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2083 AwareDatetime = Annotated[datetime, ...]
2084 NaiveDatetime = Annotated[datetime, ...]
2085 PastDatetime = Annotated[datetime, ...]
2086 FutureDatetime = Annotated[datetime, ...]
2088else:
2090 class AwareDatetime: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2091 """A datetime that requires timezone info."""
2093 @classmethod 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2094 def __get_pydantic_core_schema__( 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2095 cls, source: type[Any], handler: GetCoreSchemaHandler
2096 ) -> core_schema.CoreSchema:
2097 if cls is source: 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2098 # used directly as a type
2099 return core_schema.datetime_schema(tz_constraint='aware') 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2100 else:
2101 schema = handler(source) 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2102 _check_annotated_type(schema['type'], 'datetime', cls.__name__) 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2103 schema['tz_constraint'] = 'aware' 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2104 return schema 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2106 def __repr__(self) -> str: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2107 return 'AwareDatetime' 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2109 class NaiveDatetime: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2110 """A datetime that doesn't require timezone info."""
2112 @classmethod 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2113 def __get_pydantic_core_schema__( 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2114 cls, source: type[Any], handler: GetCoreSchemaHandler
2115 ) -> core_schema.CoreSchema:
2116 if cls is source: 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2117 # used directly as a type
2118 return core_schema.datetime_schema(tz_constraint='naive') 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2119 else:
2120 schema = handler(source) 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2121 _check_annotated_type(schema['type'], 'datetime', cls.__name__) 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2122 schema['tz_constraint'] = 'naive' 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2123 return schema 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2125 def __repr__(self) -> str: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2126 return 'NaiveDatetime' 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2128 class PastDatetime: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2129 """A datetime that must be in the past."""
2131 @classmethod 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2132 def __get_pydantic_core_schema__( 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2133 cls, source: type[Any], handler: GetCoreSchemaHandler
2134 ) -> core_schema.CoreSchema:
2135 if cls is source: 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2136 # used directly as a type
2137 return core_schema.datetime_schema(now_op='past') 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2138 else:
2139 schema = handler(source) 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2140 _check_annotated_type(schema['type'], 'datetime', cls.__name__) 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2141 schema['now_op'] = 'past' 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2142 return schema 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2144 def __repr__(self) -> str: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2145 return 'PastDatetime' 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2147 class FutureDatetime: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2148 """A datetime that must be in the future."""
2150 @classmethod 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2151 def __get_pydantic_core_schema__( 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2152 cls, source: type[Any], handler: GetCoreSchemaHandler
2153 ) -> core_schema.CoreSchema:
2154 if cls is source: 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2155 # used directly as a type
2156 return core_schema.datetime_schema(now_op='future') 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2157 else:
2158 schema = handler(source) 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2159 _check_annotated_type(schema['type'], 'datetime', cls.__name__) 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2160 schema['now_op'] = 'future' 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2161 return schema 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2163 def __repr__(self) -> str: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2164 return 'FutureDatetime' 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2167# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Encoded TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
2170class EncoderProtocol(Protocol): 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2171 """Protocol for encoding and decoding data to and from bytes.""" 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2173 @classmethod 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2174 def decode(cls, data: bytes) -> bytes: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2175 """Decode the data using the encoder.
2177 Args:
2178 data: The data to decode.
2180 Returns:
2181 The decoded data.
2182 """
2183 ...
2185 @classmethod 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2186 def encode(cls, value: bytes) -> bytes: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2187 """Encode the data using the encoder.
2189 Args:
2190 value: The data to encode.
2192 Returns:
2193 The encoded data.
2194 """
2195 ...
2197 @classmethod 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2198 def get_json_format(cls) -> str: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2199 """Get the JSON format for the encoded data.
2201 Returns:
2202 The JSON format for the encoded data.
2203 """
2204 ...
2207class Base64Encoder(EncoderProtocol): 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2208 """Standard (non-URL-safe) Base64 encoder."""
2210 @classmethod 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2211 def decode(cls, data: bytes) -> bytes: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2212 """Decode the data from base64 encoded bytes to original bytes data.
2214 Args:
2215 data: The data to decode.
2217 Returns:
2218 The decoded data.
2219 """
2220 try: 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2221 return base64.decodebytes(data) 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2222 except ValueError as e: 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2223 raise PydanticCustomError('base64_decode', "Base64 decoding error: '{error}'", {'error': str(e)}) 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2225 @classmethod 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2226 def encode(cls, value: bytes) -> bytes: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2227 """Encode the data from bytes to a base64 encoded bytes.
2229 Args:
2230 value: The data to encode.
2232 Returns:
2233 The encoded data.
2234 """
2235 return base64.encodebytes(value) 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2237 @classmethod 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2238 def get_json_format(cls) -> Literal['base64']: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2239 """Get the JSON format for the encoded data.
2241 Returns:
2242 The JSON format for the encoded data.
2243 """
2244 return 'base64' 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2247class Base64UrlEncoder(EncoderProtocol): 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2248 """URL-safe Base64 encoder."""
2250 @classmethod 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2251 def decode(cls, data: bytes) -> bytes: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2252 """Decode the data from base64 encoded bytes to original bytes data.
2254 Args:
2255 data: The data to decode.
2257 Returns:
2258 The decoded data.
2259 """
2260 try: 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2261 return base64.urlsafe_b64decode(data) 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2262 except ValueError as e: 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2263 raise PydanticCustomError('base64_decode', "Base64 decoding error: '{error}'", {'error': str(e)}) 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2265 @classmethod 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2266 def encode(cls, value: bytes) -> bytes: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2267 """Encode the data from bytes to a base64 encoded bytes.
2269 Args:
2270 value: The data to encode.
2272 Returns:
2273 The encoded data.
2274 """
2275 return base64.urlsafe_b64encode(value) 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2277 @classmethod 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2278 def get_json_format(cls) -> Literal['base64url']: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2279 """Get the JSON format for the encoded data.
2281 Returns:
2282 The JSON format for the encoded data.
2283 """
2284 return 'base64url' 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2287@_dataclasses.dataclass(**_internal_dataclass.slots_true) 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2288class EncodedBytes: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2289 """A bytes type that is encoded and decoded using the specified encoder.
2291 `EncodedBytes` needs an encoder that implements `EncoderProtocol` to operate.
2293 ```py
2294 from typing_extensions import Annotated
2296 from pydantic import BaseModel, EncodedBytes, EncoderProtocol, ValidationError
2298 class MyEncoder(EncoderProtocol):
2299 @classmethod
2300 def decode(cls, data: bytes) -> bytes:
2301 if data == b'**undecodable**':
2302 raise ValueError('Cannot decode data')
2303 return data[13:]
2305 @classmethod
2306 def encode(cls, value: bytes) -> bytes:
2307 return b'**encoded**: ' + value
2309 @classmethod
2310 def get_json_format(cls) -> str:
2311 return 'my-encoder'
2313 MyEncodedBytes = Annotated[bytes, EncodedBytes(encoder=MyEncoder)]
2315 class Model(BaseModel):
2316 my_encoded_bytes: MyEncodedBytes
2318 # Initialize the model with encoded data
2319 m = Model(my_encoded_bytes=b'**encoded**: some bytes')
2321 # Access decoded value
2322 print(m.my_encoded_bytes)
2323 #> b'some bytes'
2325 # Serialize into the encoded form
2326 print(m.model_dump())
2327 #> {'my_encoded_bytes': b'**encoded**: some bytes'}
2329 # Validate encoded data
2330 try:
2331 Model(my_encoded_bytes=b'**undecodable**')
2332 except ValidationError as e:
2333 print(e)
2334 '''
2335 1 validation error for Model
2336 my_encoded_bytes
2337 Value error, Cannot decode data [type=value_error, input_value=b'**undecodable**', input_type=bytes]
2338 '''
2339 ```
2340 """
2342 encoder: type[EncoderProtocol] 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2344 def __get_pydantic_json_schema__( 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2345 self, core_schema: core_schema.CoreSchema, handler: GetJsonSchemaHandler
2346 ) -> JsonSchemaValue:
2347 field_schema = handler(core_schema) 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2348 field_schema.update(type='string', format=self.encoder.get_json_format()) 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2349 return field_schema 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2351 def __get_pydantic_core_schema__(self, source: type[Any], handler: GetCoreSchemaHandler) -> core_schema.CoreSchema: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2352 return core_schema.with_info_after_validator_function( 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2353 function=self.decode,
2354 schema=core_schema.bytes_schema(),
2355 serialization=core_schema.plain_serializer_function_ser_schema(function=self.encode),
2356 )
2358 def decode(self, data: bytes, _: core_schema.ValidationInfo) -> bytes: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2359 """Decode the data using the specified encoder.
2361 Args:
2362 data: The data to decode.
2364 Returns:
2365 The decoded data.
2366 """
2367 return self.encoder.decode(data) 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2369 def encode(self, value: bytes) -> bytes: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2370 """Encode the data using the specified encoder.
2372 Args:
2373 value: The data to encode.
2375 Returns:
2376 The encoded data.
2377 """
2378 return self.encoder.encode(value) 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2380 def __hash__(self) -> int: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2381 return hash(self.encoder) 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2384@_dataclasses.dataclass(**_internal_dataclass.slots_true) 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2385class EncodedStr(EncodedBytes): 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2386 """A str type that is encoded and decoded using the specified encoder.
2388 `EncodedStr` needs an encoder that implements `EncoderProtocol` to operate.
2390 ```py
2391 from typing_extensions import Annotated
2393 from pydantic import BaseModel, EncodedStr, EncoderProtocol, ValidationError
2395 class MyEncoder(EncoderProtocol):
2396 @classmethod
2397 def decode(cls, data: bytes) -> bytes:
2398 if data == b'**undecodable**':
2399 raise ValueError('Cannot decode data')
2400 return data[13:]
2402 @classmethod
2403 def encode(cls, value: bytes) -> bytes:
2404 return b'**encoded**: ' + value
2406 @classmethod
2407 def get_json_format(cls) -> str:
2408 return 'my-encoder'
2410 MyEncodedStr = Annotated[str, EncodedStr(encoder=MyEncoder)]
2412 class Model(BaseModel):
2413 my_encoded_str: MyEncodedStr
2415 # Initialize the model with encoded data
2416 m = Model(my_encoded_str='**encoded**: some str')
2418 # Access decoded value
2419 print(m.my_encoded_str)
2420 #> some str
2422 # Serialize into the encoded form
2423 print(m.model_dump())
2424 #> {'my_encoded_str': '**encoded**: some str'}
2426 # Validate encoded data
2427 try:
2428 Model(my_encoded_str='**undecodable**')
2429 except ValidationError as e:
2430 print(e)
2431 '''
2432 1 validation error for Model
2433 my_encoded_str
2434 Value error, Cannot decode data [type=value_error, input_value='**undecodable**', input_type=str]
2435 '''
2436 ```
2437 """
2439 def __get_pydantic_core_schema__(self, source: type[Any], handler: GetCoreSchemaHandler) -> core_schema.CoreSchema: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2440 return core_schema.with_info_after_validator_function( 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2441 function=self.decode_str,
2442 schema=super(EncodedStr, self).__get_pydantic_core_schema__(source=source, handler=handler), # noqa: UP008
2443 serialization=core_schema.plain_serializer_function_ser_schema(function=self.encode_str),
2444 )
2446 def decode_str(self, data: bytes, _: core_schema.ValidationInfo) -> str: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2447 """Decode the data using the specified encoder.
2449 Args:
2450 data: The data to decode.
2452 Returns:
2453 The decoded data.
2454 """
2455 return data.decode() 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2457 def encode_str(self, value: str) -> str: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2458 """Encode the data using the specified encoder.
2460 Args:
2461 value: The data to encode.
2463 Returns:
2464 The encoded data.
2465 """
2466 return super(EncodedStr, self).encode(value=value.encode()).decode() # noqa: UP008 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2468 def __hash__(self) -> int: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2469 return hash(self.encoder) 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2472Base64Bytes = Annotated[bytes, EncodedBytes(encoder=Base64Encoder)] 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2473"""A bytes type that is encoded and decoded using the standard (non-URL-safe) base64 encoder. 1bcdefgahijklmGHIJKLMnopqrs
2475Note:
2476 Under the hood, `Base64Bytes` use standard library `base64.encodebytes` and `base64.decodebytes` functions.
2478 As a result, attempting to decode url-safe base64 data using the `Base64Bytes` type may fail or produce an incorrect
2479 decoding.
2481```py
2482from pydantic import Base64Bytes, BaseModel, ValidationError
2484class Model(BaseModel):
2485 base64_bytes: Base64Bytes
2487# Initialize the model with base64 data
2488m = Model(base64_bytes=b'VGhpcyBpcyB0aGUgd2F5')
2490# Access decoded value
2491print(m.base64_bytes)
2492#> b'This is the way'
2494# Serialize into the base64 form
2495print(m.model_dump())
2496#> {'base64_bytes': b'VGhpcyBpcyB0aGUgd2F5\n'}
2498# Validate base64 data
2499try:
2500 print(Model(base64_bytes=b'undecodable').base64_bytes)
2501except ValidationError as e:
2502 print(e)
2503 '''
2504 1 validation error for Model
2505 base64_bytes
2506 Base64 decoding error: 'Incorrect padding' [type=base64_decode, input_value=b'undecodable', input_type=bytes]
2507 '''
2508```
2509"""
2510Base64Str = Annotated[str, EncodedStr(encoder=Base64Encoder)] 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2511"""A str type that is encoded and decoded using the standard (non-URL-safe) base64 encoder. 1bcdefgahijklmGHIJKLMnopqrs
2513Note:
2514 Under the hood, `Base64Bytes` use standard library `base64.encodebytes` and `base64.decodebytes` functions.
2516 As a result, attempting to decode url-safe base64 data using the `Base64Str` type may fail or produce an incorrect
2517 decoding.
2519```py
2520from pydantic import Base64Str, BaseModel, ValidationError
2522class Model(BaseModel):
2523 base64_str: Base64Str
2525# Initialize the model with base64 data
2526m = Model(base64_str='VGhlc2UgYXJlbid0IHRoZSBkcm9pZHMgeW91J3JlIGxvb2tpbmcgZm9y')
2528# Access decoded value
2529print(m.base64_str)
2530#> These aren't the droids you're looking for
2532# Serialize into the base64 form
2533print(m.model_dump())
2534#> {'base64_str': 'VGhlc2UgYXJlbid0IHRoZSBkcm9pZHMgeW91J3JlIGxvb2tpbmcgZm9y\n'}
2536# Validate base64 data
2537try:
2538 print(Model(base64_str='undecodable').base64_str)
2539except ValidationError as e:
2540 print(e)
2541 '''
2542 1 validation error for Model
2543 base64_str
2544 Base64 decoding error: 'Incorrect padding' [type=base64_decode, input_value='undecodable', input_type=str]
2545 '''
2546```
2547"""
2548Base64UrlBytes = Annotated[bytes, EncodedBytes(encoder=Base64UrlEncoder)] 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2549"""A bytes type that is encoded and decoded using the URL-safe base64 encoder. 1bcdefgahijklmGHIJKLMnopqrs
2551Note:
2552 Under the hood, `Base64UrlBytes` use standard library `base64.urlsafe_b64encode` and `base64.urlsafe_b64decode`
2553 functions.
2555 As a result, the `Base64UrlBytes` type can be used to faithfully decode "vanilla" base64 data
2556 (using `'+'` and `'/'`).
2558```py
2559from pydantic import Base64UrlBytes, BaseModel
2561class Model(BaseModel):
2562 base64url_bytes: Base64UrlBytes
2564# Initialize the model with base64 data
2565m = Model(base64url_bytes=b'SHc_dHc-TXc==')
2566print(m)
2567#> base64url_bytes=b'Hw?tw>Mw'
2568```
2569"""
2570Base64UrlStr = Annotated[str, EncodedStr(encoder=Base64UrlEncoder)] 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2571"""A str type that is encoded and decoded using the URL-safe base64 encoder. 1bcdefgahijklmGHIJKLMnopqrs
2573Note:
2574 Under the hood, `Base64UrlStr` use standard library `base64.urlsafe_b64encode` and `base64.urlsafe_b64decode`
2575 functions.
2577 As a result, the `Base64UrlStr` type can be used to faithfully decode "vanilla" base64 data (using `'+'` and `'/'`).
2579```py
2580from pydantic import Base64UrlStr, BaseModel
2582class Model(BaseModel):
2583 base64url_str: Base64UrlStr
2585# Initialize the model with base64 data
2586m = Model(base64url_str='SHc_dHc-TXc==')
2587print(m)
2588#> base64url_str='Hw?tw>Mw'
2589```
2590"""
2593__getattr__ = getattr_migration(__name__) 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2596@_dataclasses.dataclass(**_internal_dataclass.slots_true) 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2597class GetPydanticSchema: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2598 """Usage docs: https://docs.pydantic.dev/2.8/concepts/types/#using-getpydanticschema-to-reduce-boilerplate
2600 A convenience class for creating an annotation that provides pydantic custom type hooks.
2602 This class is intended to eliminate the need to create a custom "marker" which defines the
2603 `__get_pydantic_core_schema__` and `__get_pydantic_json_schema__` custom hook methods.
2605 For example, to have a field treated by type checkers as `int`, but by pydantic as `Any`, you can do:
2606 ```python
2607 from typing import Any
2609 from typing_extensions import Annotated
2611 from pydantic import BaseModel, GetPydanticSchema
2613 HandleAsAny = GetPydanticSchema(lambda _s, h: h(Any))
2615 class Model(BaseModel):
2616 x: Annotated[int, HandleAsAny] # pydantic sees `x: Any`
2618 print(repr(Model(x='abc').x))
2619 #> 'abc'
2620 ```
2621 """
2623 get_pydantic_core_schema: Callable[[Any, GetCoreSchemaHandler], CoreSchema] | None = None 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2624 get_pydantic_json_schema: Callable[[Any, GetJsonSchemaHandler], JsonSchemaValue] | None = None 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2626 # Note: we may want to consider adding a convenience staticmethod `def for_type(type_: Any) -> GetPydanticSchema:`
2627 # which returns `GetPydanticSchema(lambda _s, h: h(type_))`
2629 if not TYPE_CHECKING: 2629 ↛ 2641line 2629 didn't jump to line 2641, because the condition on line 2629 was always true1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2630 # We put `__getattr__` in a non-TYPE_CHECKING block because otherwise, mypy allows arbitrary attribute access
2632 def __getattr__(self, item: str) -> Any: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2633 """Use this rather than defining `__get_pydantic_core_schema__` etc. to reduce the number of nested calls."""
2634 if item == '__get_pydantic_core_schema__' and self.get_pydantic_core_schema: 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2635 return self.get_pydantic_core_schema 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2636 elif item == '__get_pydantic_json_schema__' and self.get_pydantic_json_schema: 2636 ↛ 2637line 2636 didn't jump to line 2637, because the condition on line 2636 was never true1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2637 return self.get_pydantic_json_schema
2638 else:
2639 return object.__getattribute__(self, item) 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2641 __hash__ = object.__hash__ 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2644@_dataclasses.dataclass(**_internal_dataclass.slots_true, frozen=True) 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2645class Tag: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2646 """Provides a way to specify the expected tag to use for a case of a (callable) discriminated union.
2648 Also provides a way to label a union case in error messages.
2650 When using a callable `Discriminator`, attach a `Tag` to each case in the `Union` to specify the tag that
2651 should be used to identify that case. For example, in the below example, the `Tag` is used to specify that
2652 if `get_discriminator_value` returns `'apple'`, the input should be validated as an `ApplePie`, and if it
2653 returns `'pumpkin'`, the input should be validated as a `PumpkinPie`.
2655 The primary role of the `Tag` here is to map the return value from the callable `Discriminator` function to
2656 the appropriate member of the `Union` in question.
2658 ```py
2659 from typing import Any, Union
2661 from typing_extensions import Annotated, Literal
2663 from pydantic import BaseModel, Discriminator, Tag
2665 class Pie(BaseModel):
2666 time_to_cook: int
2667 num_ingredients: int
2669 class ApplePie(Pie):
2670 fruit: Literal['apple'] = 'apple'
2672 class PumpkinPie(Pie):
2673 filling: Literal['pumpkin'] = 'pumpkin'
2675 def get_discriminator_value(v: Any) -> str:
2676 if isinstance(v, dict):
2677 return v.get('fruit', v.get('filling'))
2678 return getattr(v, 'fruit', getattr(v, 'filling', None))
2680 class ThanksgivingDinner(BaseModel):
2681 dessert: Annotated[
2682 Union[
2683 Annotated[ApplePie, Tag('apple')],
2684 Annotated[PumpkinPie, Tag('pumpkin')],
2685 ],
2686 Discriminator(get_discriminator_value),
2687 ]
2689 apple_variation = ThanksgivingDinner.model_validate(
2690 {'dessert': {'fruit': 'apple', 'time_to_cook': 60, 'num_ingredients': 8}}
2691 )
2692 print(repr(apple_variation))
2693 '''
2694 ThanksgivingDinner(dessert=ApplePie(time_to_cook=60, num_ingredients=8, fruit='apple'))
2695 '''
2697 pumpkin_variation = ThanksgivingDinner.model_validate(
2698 {
2699 'dessert': {
2700 'filling': 'pumpkin',
2701 'time_to_cook': 40,
2702 'num_ingredients': 6,
2703 }
2704 }
2705 )
2706 print(repr(pumpkin_variation))
2707 '''
2708 ThanksgivingDinner(dessert=PumpkinPie(time_to_cook=40, num_ingredients=6, filling='pumpkin'))
2709 '''
2710 ```
2712 !!! note
2713 You must specify a `Tag` for every case in a `Tag` that is associated with a
2714 callable `Discriminator`. Failing to do so will result in a `PydanticUserError` with code
2715 [`callable-discriminator-no-tag`](../errors/usage_errors.md#callable-discriminator-no-tag).
2717 See the [Discriminated Unions] concepts docs for more details on how to use `Tag`s.
2719 [Discriminated Unions]: ../concepts/unions.md#discriminated-unions
2720 """
2722 tag: str 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2724 def __get_pydantic_core_schema__(self, source_type: Any, handler: GetCoreSchemaHandler) -> CoreSchema: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2725 schema = handler(source_type) 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2726 metadata = schema.setdefault('metadata', {}) 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2727 assert isinstance(metadata, dict) 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2728 metadata[_core_utils.TAGGED_UNION_TAG_KEY] = self.tag 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2729 return schema 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2732@_dataclasses.dataclass(**_internal_dataclass.slots_true, frozen=True) 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2733class Discriminator: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2734 """Usage docs: https://docs.pydantic.dev/2.8/concepts/unions/#discriminated-unions-with-callable-discriminator
2736 Provides a way to use a custom callable as the way to extract the value of a union discriminator.
2738 This allows you to get validation behavior like you'd get from `Field(discriminator=<field_name>)`,
2739 but without needing to have a single shared field across all the union choices. This also makes it
2740 possible to handle unions of models and primitive types with discriminated-union-style validation errors.
2741 Finally, this allows you to use a custom callable as the way to identify which member of a union a value
2742 belongs to, while still seeing all the performance benefits of a discriminated union.
2744 Consider this example, which is much more performant with the use of `Discriminator` and thus a `TaggedUnion`
2745 than it would be as a normal `Union`.
2747 ```py
2748 from typing import Any, Union
2750 from typing_extensions import Annotated, Literal
2752 from pydantic import BaseModel, Discriminator, Tag
2754 class Pie(BaseModel):
2755 time_to_cook: int
2756 num_ingredients: int
2758 class ApplePie(Pie):
2759 fruit: Literal['apple'] = 'apple'
2761 class PumpkinPie(Pie):
2762 filling: Literal['pumpkin'] = 'pumpkin'
2764 def get_discriminator_value(v: Any) -> str:
2765 if isinstance(v, dict):
2766 return v.get('fruit', v.get('filling'))
2767 return getattr(v, 'fruit', getattr(v, 'filling', None))
2769 class ThanksgivingDinner(BaseModel):
2770 dessert: Annotated[
2771 Union[
2772 Annotated[ApplePie, Tag('apple')],
2773 Annotated[PumpkinPie, Tag('pumpkin')],
2774 ],
2775 Discriminator(get_discriminator_value),
2776 ]
2778 apple_variation = ThanksgivingDinner.model_validate(
2779 {'dessert': {'fruit': 'apple', 'time_to_cook': 60, 'num_ingredients': 8}}
2780 )
2781 print(repr(apple_variation))
2782 '''
2783 ThanksgivingDinner(dessert=ApplePie(time_to_cook=60, num_ingredients=8, fruit='apple'))
2784 '''
2786 pumpkin_variation = ThanksgivingDinner.model_validate(
2787 {
2788 'dessert': {
2789 'filling': 'pumpkin',
2790 'time_to_cook': 40,
2791 'num_ingredients': 6,
2792 }
2793 }
2794 )
2795 print(repr(pumpkin_variation))
2796 '''
2797 ThanksgivingDinner(dessert=PumpkinPie(time_to_cook=40, num_ingredients=6, filling='pumpkin'))
2798 '''
2799 ```
2801 See the [Discriminated Unions] concepts docs for more details on how to use `Discriminator`s.
2803 [Discriminated Unions]: ../concepts/unions.md#discriminated-unions
2804 """
2806 discriminator: str | Callable[[Any], Hashable] 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2807 """The callable or field name for discriminating the type in a tagged union. 1bcdefgahijklmGHIJKLMnopqrs
2809 A `Callable` discriminator must extract the value of the discriminator from the input.
2810 A `str` discriminator must be the name of a field to discriminate against.
2811 """
2812 custom_error_type: str | None = None 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2813 """Type to use in [custom errors](../errors/errors.md#custom-errors) replacing the standard discriminated union 1bcdefgahijklmGHIJKLMnopqrs
2814 validation errors.
2815 """
2816 custom_error_message: str | None = None 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2817 """Message to use in custom errors.""" 1bcdefgahijklmGHIJKLMnopqrs
2818 custom_error_context: dict[str, int | str | float] | None = None 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2819 """Context to use in custom errors.""" 1bcdefgahijklmGHIJKLMnopqrs
2821 def __get_pydantic_core_schema__(self, source_type: Any, handler: GetCoreSchemaHandler) -> CoreSchema: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2822 origin = _typing_extra.get_origin(source_type) 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2823 if not origin or not _typing_extra.origin_is_union(origin): 2823 ↛ 2824line 2823 didn't jump to line 2824, because the condition on line 2823 was never true1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2824 raise TypeError(f'{type(self).__name__} must be used with a Union type, not {source_type}')
2826 if isinstance(self.discriminator, str): 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2827 from pydantic import Field 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2829 return handler(Annotated[source_type, Field(discriminator=self.discriminator)]) 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2830 else:
2831 original_schema = handler(source_type) 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2832 return self._convert_schema(original_schema) 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2834 def _convert_schema(self, original_schema: core_schema.CoreSchema) -> core_schema.TaggedUnionSchema: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2835 if original_schema['type'] != 'union': 2835 ↛ 2840line 2835 didn't jump to line 2840, because the condition on line 2835 was never true1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2836 # This likely indicates that the schema was a single-item union that was simplified.
2837 # In this case, we do the same thing we do in
2838 # `pydantic._internal._discriminated_union._ApplyInferredDiscriminator._apply_to_root`, namely,
2839 # package the generated schema back into a single-item union.
2840 original_schema = core_schema.union_schema([original_schema])
2842 tagged_union_choices = {} 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2843 for i, choice in enumerate(original_schema['choices']): 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2844 tag = None 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2845 if isinstance(choice, tuple): 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2846 choice, tag = choice 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2847 metadata = choice.get('metadata') 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2848 if metadata is not None: 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2849 metadata_tag = metadata.get(_core_utils.TAGGED_UNION_TAG_KEY) 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2850 if metadata_tag is not None: 2850 ↛ 2852line 2850 didn't jump to line 2852, because the condition on line 2850 was always true1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2851 tag = metadata_tag 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2852 if tag is None: 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2853 raise PydanticUserError( 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2854 f'`Tag` not provided for choice {choice} used with `Discriminator`',
2855 code='callable-discriminator-no-tag',
2856 )
2857 tagged_union_choices[tag] = choice 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2859 # Have to do these verbose checks to ensure falsy values ('' and {}) don't get ignored
2860 custom_error_type = self.custom_error_type 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2861 if custom_error_type is None: 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2862 custom_error_type = original_schema.get('custom_error_type') 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2864 custom_error_message = self.custom_error_message 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2865 if custom_error_message is None: 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2866 custom_error_message = original_schema.get('custom_error_message') 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2868 custom_error_context = self.custom_error_context 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2869 if custom_error_context is None: 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2870 custom_error_context = original_schema.get('custom_error_context') 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2872 custom_error_type = original_schema.get('custom_error_type') if custom_error_type is None else custom_error_type 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2873 return core_schema.tagged_union_schema( 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2874 tagged_union_choices,
2875 self.discriminator,
2876 custom_error_type=custom_error_type,
2877 custom_error_message=custom_error_message,
2878 custom_error_context=custom_error_context,
2879 strict=original_schema.get('strict'),
2880 ref=original_schema.get('ref'),
2881 metadata=original_schema.get('metadata'),
2882 serialization=original_schema.get('serialization'),
2883 )
2886_JSON_TYPES = {int, float, str, bool, list, dict, type(None)} 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2889def _get_type_name(x: Any) -> str: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2890 type_ = type(x) 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2891 if type_ in _JSON_TYPES: 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2892 return type_.__name__ 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2894 # Handle proper subclasses; note we don't need to handle None or bool here
2895 if isinstance(x, int): 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2896 return 'int' 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2897 if isinstance(x, float): 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2898 return 'float' 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2899 if isinstance(x, str): 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2900 return 'str' 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2901 if isinstance(x, list): 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2902 return 'list' 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2903 if isinstance(x, dict): 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2904 return 'dict' 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2906 # Fail by returning the type's actual name
2907 return getattr(type_, '__name__', '<no type name>') 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2910class _AllowAnyJson: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2911 @classmethod 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2912 def __get_pydantic_core_schema__(cls, source_type: Any, handler: GetCoreSchemaHandler) -> CoreSchema: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2913 python_schema = handler(source_type) 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2914 return core_schema.json_or_python_schema(json_schema=core_schema.any_schema(), python_schema=python_schema) 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
2917if TYPE_CHECKING: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2918 # This seems to only be necessary for mypy
2919 JsonValue: TypeAlias = Union[
2920 List['JsonValue'],
2921 Dict[str, 'JsonValue'],
2922 str,
2923 bool,
2924 int,
2925 float,
2926 None,
2927 ]
2928 """A `JsonValue` is used to represent a value that can be serialized to JSON.
2930 It may be one of:
2932 * `List['JsonValue']`
2933 * `Dict[str, 'JsonValue']`
2934 * `str`
2935 * `bool`
2936 * `int`
2937 * `float`
2938 * `None`
2940 The following example demonstrates how to use `JsonValue` to validate JSON data,
2941 and what kind of errors to expect when input data is not json serializable.
2943 ```py
2944 import json
2946 from pydantic import BaseModel, JsonValue, ValidationError
2948 class Model(BaseModel):
2949 j: JsonValue
2951 valid_json_data = {'j': {'a': {'b': {'c': 1, 'd': [2, None]}}}}
2952 invalid_json_data = {'j': {'a': {'b': ...}}}
2954 print(repr(Model.model_validate(valid_json_data)))
2955 #> Model(j={'a': {'b': {'c': 1, 'd': [2, None]}}})
2956 print(repr(Model.model_validate_json(json.dumps(valid_json_data))))
2957 #> Model(j={'a': {'b': {'c': 1, 'd': [2, None]}}})
2959 try:
2960 Model.model_validate(invalid_json_data)
2961 except ValidationError as e:
2962 print(e)
2963 '''
2964 1 validation error for Model
2965 j.dict.a.dict.b
2966 input was not a valid JSON value [type=invalid-json-value, input_value=Ellipsis, input_type=ellipsis]
2967 '''
2968 ```
2969 """
2971else:
2972 JsonValue = TypeAliasType( 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2973 'JsonValue',
2974 Annotated[
2975 Union[
2976 Annotated[List['JsonValue'], Tag('list')],
2977 Annotated[Dict[str, 'JsonValue'], Tag('dict')],
2978 Annotated[str, Tag('str')],
2979 Annotated[bool, Tag('bool')],
2980 Annotated[int, Tag('int')],
2981 Annotated[float, Tag('float')],
2982 Annotated[None, Tag('NoneType')],
2983 ],
2984 Discriminator(
2985 _get_type_name,
2986 custom_error_type='invalid-json-value',
2987 custom_error_message='input was not a valid JSON value',
2988 ),
2989 _AllowAnyJson,
2990 ],
2991 )
2994class _OnErrorOmit: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2995 @classmethod 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2996 def __get_pydantic_core_schema__(cls, source_type: Any, handler: GetCoreSchemaHandler) -> CoreSchema: 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
2997 # there is no actual default value here but we use with_default_schema since it already has the on_error
2998 # behavior implemented and it would be no more efficient to implement it on every other validator
2999 # or as a standalone validator
3000 return core_schema.with_default_schema(schema=handler(source_type), on_error='omit') 1tuvwbcdefgxaCDEFhijklmyzABnopqrs
3003OnErrorOmit = Annotated[T, _OnErrorOmit] 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
3004""" 1tuvwbcdefgxaCDEFhijklmNOGHIJKLMyzABnopqrs
3005When used as an item in a list, the key type in a dict, optional values of a TypedDict, etc.
3006this annotation omits the item from the iteration if there is any error validating it.
3007That is, instead of a [`ValidationError`][pydantic_core.ValidationError] being propagated up and the entire iterable being discarded
3008any invalid items are discarded and the valid ones are returned.
3009"""