Coverage for pydantic/mypy.py: 91.04%
617 statements
« prev ^ index » next coverage.py v7.8.0, created at 2025-04-17 15:12 +0000
« prev ^ index » next coverage.py v7.8.0, created at 2025-04-17 15:12 +0000
1"""This module includes classes and functions designed specifically for use with the mypy plugin."""
3from __future__ import annotations 1hijklmnopqrstuvwxyzABCDEabcdefgFGHIJKLMNOP
5import sys 1hijklmnopqrstuvwxyzABCDEabcdefgFGHIJKLMNOP
6from collections.abc import Iterator 1hijklmnopqrstuvwxyzABCDEabcdefgFGHIJKLMNOP
7from configparser import ConfigParser 1hijklmnopqrstuvwxyzABCDEabcdefgFGHIJKLMNOP
8from typing import Any, Callable, cast 1hijklmnopqrstuvwxyzABCDEabcdefgFGHIJKLMNOP
10from mypy.errorcodes import ErrorCode 1hijklmnopqrstuvwxyzABCDEabcdefgFGHIJKLMNOP
11from mypy.expandtype import expand_type, expand_type_by_instance 1abcdefg
12from mypy.nodes import ( 1abcdefg
13 ARG_NAMED,
14 ARG_NAMED_OPT,
15 ARG_OPT,
16 ARG_POS,
17 ARG_STAR2,
18 INVARIANT,
19 MDEF,
20 Argument,
21 AssignmentStmt,
22 Block,
23 CallExpr,
24 ClassDef,
25 Context,
26 Decorator,
27 DictExpr,
28 EllipsisExpr,
29 Expression,
30 FuncDef,
31 IfStmt,
32 JsonDict,
33 MemberExpr,
34 NameExpr,
35 PassStmt,
36 PlaceholderNode,
37 RefExpr,
38 Statement,
39 StrExpr,
40 SymbolTableNode,
41 TempNode,
42 TypeAlias,
43 TypeInfo,
44 Var,
45)
46from mypy.options import Options 1abcdefg
47from mypy.plugin import ( 1abcdefg
48 CheckerPluginInterface,
49 ClassDefContext,
50 MethodContext,
51 Plugin,
52 ReportConfigContext,
53 SemanticAnalyzerPluginInterface,
54)
55from mypy.plugins.common import ( 1abcdefg
56 deserialize_and_fixup_type,
57)
58from mypy.semanal import set_callable_name 1abcdefg
59from mypy.server.trigger import make_wildcard_trigger 1abcdefg
60from mypy.state import state 1abcdefg
61from mypy.type_visitor import TypeTranslator 1abcdefg
62from mypy.typeops import map_type_from_supertype 1abcdefg
63from mypy.types import ( 1abcdefg
64 AnyType,
65 CallableType,
66 Instance,
67 NoneType,
68 Type,
69 TypeOfAny,
70 TypeType,
71 TypeVarType,
72 UnionType,
73 get_proper_type,
74)
75from mypy.typevars import fill_typevars 1abcdefg
76from mypy.util import get_unique_redefinition_name 1abcdefg
77from mypy.version import __version__ as mypy_version 1abcdefg
79from pydantic._internal import _fields 1abcdefg
80from pydantic.version import parse_mypy_version 1abcdefg
82CONFIGFILE_KEY = 'pydantic-mypy' 1abcdefg
83METADATA_KEY = 'pydantic-mypy-metadata' 1abcdefg
84BASEMODEL_FULLNAME = 'pydantic.main.BaseModel' 1abcdefg
85BASESETTINGS_FULLNAME = 'pydantic_settings.main.BaseSettings' 1abcdefg
86ROOT_MODEL_FULLNAME = 'pydantic.root_model.RootModel' 1abcdefg
87MODEL_METACLASS_FULLNAME = 'pydantic._internal._model_construction.ModelMetaclass' 1abcdefg
88FIELD_FULLNAME = 'pydantic.fields.Field' 1abcdefg
89DATACLASS_FULLNAME = 'pydantic.dataclasses.dataclass' 1abcdefg
90MODEL_VALIDATOR_FULLNAME = 'pydantic.functional_validators.model_validator' 1abcdefg
91DECORATOR_FULLNAMES = { 1abcdefg
92 'pydantic.functional_validators.field_validator',
93 'pydantic.functional_validators.model_validator',
94 'pydantic.functional_serializers.serializer',
95 'pydantic.functional_serializers.model_serializer',
96 'pydantic.deprecated.class_validators.validator',
97 'pydantic.deprecated.class_validators.root_validator',
98}
99IMPLICIT_CLASSMETHOD_DECORATOR_FULLNAMES = DECORATOR_FULLNAMES - {'pydantic.functional_serializers.model_serializer'} 1abcdefg
102MYPY_VERSION_TUPLE = parse_mypy_version(mypy_version) 1abcdefg
103BUILTINS_NAME = 'builtins' 1abcdefg
105# Increment version if plugin changes and mypy caches should be invalidated
106__version__ = 2 1abcdefg
109def plugin(version: str) -> type[Plugin]: 1abcdefg
110 """`version` is the mypy version string.
112 We might want to use this to print a warning if the mypy version being used is
113 newer, or especially older, than we expect (or need).
115 Args:
116 version: The mypy version string.
118 Return:
119 The Pydantic mypy plugin type.
120 """
121 return PydanticPlugin 1abcdefg
124class PydanticPlugin(Plugin): 1abcdefg
125 """The Pydantic mypy plugin."""
127 def __init__(self, options: Options) -> None: 1abcdefg
128 self.plugin_config = PydanticPluginConfig(options) 1abcdefg
129 self._plugin_data = self.plugin_config.to_data() 1abcdefg
130 super().__init__(options) 1abcdefg
132 def get_base_class_hook(self, fullname: str) -> Callable[[ClassDefContext], None] | None: 1abcdefg
133 """Update Pydantic model class."""
134 sym = self.lookup_fully_qualified(fullname) 1abcdefg
135 if sym and isinstance(sym.node, TypeInfo): # pragma: no branch 1abcdefg
136 # No branching may occur if the mypy cache has not been cleared
137 if sym.node.has_base(BASEMODEL_FULLNAME): 1abcdefg
138 return self._pydantic_model_class_maker_callback 1abcdefg
139 return None 1abcdefg
141 def get_metaclass_hook(self, fullname: str) -> Callable[[ClassDefContext], None] | None: 1abcdefg
142 """Update Pydantic `ModelMetaclass` definition."""
143 if fullname == MODEL_METACLASS_FULLNAME: 1abcdefg
144 return self._pydantic_model_metaclass_marker_callback 1abcdefg
145 return None 1abcdefg
147 def get_method_hook(self, fullname: str) -> Callable[[MethodContext], Type] | None: 1abcdefg
148 """Adjust return type of `from_orm` method call."""
149 if fullname.endswith('.from_orm'): 1abcdefg
150 return from_attributes_callback 1abcdefg
151 return None 1abcdefg
153 def report_config_data(self, ctx: ReportConfigContext) -> dict[str, Any]: 1abcdefg
154 """Return all plugin config data.
156 Used by mypy to determine if cache needs to be discarded.
157 """
158 return self._plugin_data 1abcdefg
160 def _pydantic_model_class_maker_callback(self, ctx: ClassDefContext) -> None: 1abcdefg
161 transformer = PydanticModelTransformer(ctx.cls, ctx.reason, ctx.api, self.plugin_config) 1abcdefg
162 transformer.transform() 1abcdefg
164 def _pydantic_model_metaclass_marker_callback(self, ctx: ClassDefContext) -> None: 1abcdefg
165 """Reset dataclass_transform_spec attribute of ModelMetaclass.
167 Let the plugin handle it. This behavior can be disabled
168 if 'debug_dataclass_transform' is set to True', for testing purposes.
169 """
170 if self.plugin_config.debug_dataclass_transform: 170 ↛ 171line 170 didn't jump to line 171 because the condition on line 170 was never true1abcdefg
171 return
172 info_metaclass = ctx.cls.info.declared_metaclass 1abcdefg
173 assert info_metaclass, "callback not passed from 'get_metaclass_hook'" 1abcdefg
174 if getattr(info_metaclass.type, 'dataclass_transform_spec', None): 1abcdefg
175 info_metaclass.type.dataclass_transform_spec = None 1abcdefg
178class PydanticPluginConfig: 1abcdefg
179 """A Pydantic mypy plugin config holder.
181 Attributes:
182 init_forbid_extra: Whether to add a `**kwargs` at the end of the generated `__init__` signature.
183 init_typed: Whether to annotate fields in the generated `__init__`.
184 warn_required_dynamic_aliases: Whether to raise required dynamic aliases error.
185 debug_dataclass_transform: Whether to not reset `dataclass_transform_spec` attribute
186 of `ModelMetaclass` for testing purposes.
187 """
189 __slots__ = ( 1abcdefg
190 'init_forbid_extra',
191 'init_typed',
192 'warn_required_dynamic_aliases',
193 'debug_dataclass_transform',
194 )
195 init_forbid_extra: bool 1abcdefg
196 init_typed: bool 1abcdefg
197 warn_required_dynamic_aliases: bool 1abcdefg
198 debug_dataclass_transform: bool # undocumented 1abcdefg
200 def __init__(self, options: Options) -> None: 1abcdefg
201 if options.config_file is None: # pragma: no cover 1abcdefg
202 return
204 toml_config = parse_toml(options.config_file) 1abcdefg
205 if toml_config is not None: 1abcdefg
206 config = toml_config.get('tool', {}).get('pydantic-mypy', {}) 1abcdefg
207 for key in self.__slots__: 1abcdefg
208 setting = config.get(key, False) 1abcdefg
209 if not isinstance(setting, bool): 1abcdefg
210 raise ValueError(f'Configuration value must be a boolean for key: {key}') 1abcdefg
211 setattr(self, key, setting) 1abcdefg
212 else:
213 plugin_config = ConfigParser() 1abcdefg
214 plugin_config.read(options.config_file) 1abcdefg
215 for key in self.__slots__: 1abcdefg
216 setting = plugin_config.getboolean(CONFIGFILE_KEY, key, fallback=False) 1abcdefg
217 setattr(self, key, setting) 1abcdefg
219 def to_data(self) -> dict[str, Any]: 1abcdefg
220 """Returns a dict of config names to their values."""
221 return {key: getattr(self, key) for key in self.__slots__} 1abcdefg
224def from_attributes_callback(ctx: MethodContext) -> Type: 1abcdefg
225 """Raise an error if from_attributes is not enabled."""
226 model_type: Instance
227 ctx_type = ctx.type 1abcdefg
228 if isinstance(ctx_type, TypeType): 1abcdefg
229 ctx_type = ctx_type.item 1abcdefg
230 if isinstance(ctx_type, CallableType) and isinstance(ctx_type.ret_type, Instance): 1abcdefg
231 model_type = ctx_type.ret_type # called on the class 1abcdefg
232 elif isinstance(ctx_type, Instance): 1abcdefg
233 model_type = ctx_type # called on an instance (unusual, but still valid) 1abcdefg
234 else: # pragma: no cover
235 detail = f'ctx.type: {ctx_type} (of type {ctx_type.__class__.__name__})'
236 error_unexpected_behavior(detail, ctx.api, ctx.context)
237 return ctx.default_return_type
238 pydantic_metadata = model_type.type.metadata.get(METADATA_KEY) 1abcdefg
239 if pydantic_metadata is None: 1abcdefg
240 return ctx.default_return_type 1abcdefg
241 if not model_type.type.has_base(BASEMODEL_FULLNAME): 241 ↛ 243line 241 didn't jump to line 243 because the condition on line 241 was never true1abcdefg
242 # not a Pydantic v2 model
243 return ctx.default_return_type
244 from_attributes = pydantic_metadata.get('config', {}).get('from_attributes') 1abcdefg
245 if from_attributes is not True: 1abcdefg
246 error_from_attributes(model_type.type.name, ctx.api, ctx.context) 1abcdefg
247 return ctx.default_return_type 1abcdefg
250class PydanticModelField: 1abcdefg
251 """Based on mypy.plugins.dataclasses.DataclassAttribute."""
253 def __init__( 1abcdefg
254 self,
255 name: str,
256 alias: str | None,
257 is_frozen: bool,
258 has_dynamic_alias: bool,
259 has_default: bool,
260 strict: bool | None,
261 line: int,
262 column: int,
263 type: Type | None,
264 info: TypeInfo,
265 ):
266 self.name = name 1abcdefg
267 self.alias = alias 1abcdefg
268 self.is_frozen = is_frozen 1abcdefg
269 self.has_dynamic_alias = has_dynamic_alias 1abcdefg
270 self.has_default = has_default 1abcdefg
271 self.strict = strict 1abcdefg
272 self.line = line 1abcdefg
273 self.column = column 1abcdefg
274 self.type = type 1abcdefg
275 self.info = info 1abcdefg
277 def to_argument( 1abcdefg
278 self,
279 current_info: TypeInfo,
280 typed: bool,
281 model_strict: bool,
282 force_optional: bool,
283 use_alias: bool,
284 api: SemanticAnalyzerPluginInterface,
285 force_typevars_invariant: bool,
286 is_root_model_root: bool,
287 ) -> Argument:
288 """Based on mypy.plugins.dataclasses.DataclassAttribute.to_argument."""
289 variable = self.to_var(current_info, api, use_alias, force_typevars_invariant) 1abcdefg
291 strict = model_strict if self.strict is None else self.strict 1abcdefg
292 if typed or strict: 1abcdefg
293 type_annotation = self.expand_type(current_info, api, include_root_type=True) 1abcdefg
294 else:
295 type_annotation = AnyType(TypeOfAny.explicit) 1abcdefg
297 return Argument( 1abcdefg
298 variable=variable,
299 type_annotation=type_annotation,
300 initializer=None,
301 kind=ARG_OPT
302 if is_root_model_root
303 else (ARG_NAMED_OPT if force_optional or self.has_default else ARG_NAMED),
304 )
306 def expand_type( 1abcdefg
307 self,
308 current_info: TypeInfo,
309 api: SemanticAnalyzerPluginInterface,
310 force_typevars_invariant: bool = False,
311 include_root_type: bool = False,
312 ) -> Type | None:
313 """Based on mypy.plugins.dataclasses.DataclassAttribute.expand_type."""
314 if force_typevars_invariant: 1abcdefg
315 # In some cases, mypy will emit an error "Cannot use a covariant type variable as a parameter"
316 # To prevent that, we add an option to replace typevars with invariant ones while building certain
317 # method signatures (in particular, `__init__`). There may be a better way to do this, if this causes
318 # us problems in the future, we should look into why the dataclasses plugin doesn't have this issue.
319 if isinstance(self.type, TypeVarType): 1abcdefg
320 modified_type = self.type.copy_modified() 1abcdefg
321 modified_type.variance = INVARIANT 1abcdefg
322 self.type = modified_type 1abcdefg
324 if self.type is not None and self.info.self_type is not None: 324 ↛ 346line 324 didn't jump to line 346 because the condition on line 324 was always true1abcdefg
325 # In general, it is not safe to call `expand_type()` during semantic analysis,
326 # however this plugin is called very late, so all types should be fully ready.
327 # Also, it is tricky to avoid eager expansion of Self types here (e.g. because
328 # we serialize attributes).
329 with state.strict_optional_set(api.options.strict_optional): 1abcdefg
330 filled_with_typevars = fill_typevars(current_info) 1abcdefg
331 # Cannot be TupleType as current_info represents a Pydantic model:
332 assert isinstance(filled_with_typevars, Instance) 1abcdefg
333 if force_typevars_invariant: 1abcdefg
334 for arg in filled_with_typevars.args: 1abcdefg
335 if isinstance(arg, TypeVarType): 335 ↛ 334line 335 didn't jump to line 334 because the condition on line 335 was always true1abcdefg
336 arg.variance = INVARIANT 1abcdefg
338 expanded_type = expand_type(self.type, {self.info.self_type.id: filled_with_typevars}) 1abcdefg
339 if include_root_type and isinstance(expanded_type, Instance) and is_root_model(expanded_type.type): 1abcdefg
340 # When a root model is used as a field, Pydantic allows both an instance of the root model
341 # as well as instances of the `root` field type:
342 root_type = cast(Type, expanded_type.type['root'].type) 1abcdefg
343 expanded_root_type = expand_type_by_instance(root_type, expanded_type) 1abcdefg
344 expanded_type = UnionType([expanded_type, expanded_root_type]) 1abcdefg
345 return expanded_type 1abcdefg
346 return self.type
348 def to_var( 1abcdefg
349 self,
350 current_info: TypeInfo,
351 api: SemanticAnalyzerPluginInterface,
352 use_alias: bool,
353 force_typevars_invariant: bool = False,
354 ) -> Var:
355 """Based on mypy.plugins.dataclasses.DataclassAttribute.to_var."""
356 if use_alias and self.alias is not None: 1abcdefg
357 name = self.alias 1abcdefg
358 else:
359 name = self.name 1abcdefg
361 return Var(name, self.expand_type(current_info, api, force_typevars_invariant)) 1abcdefg
363 def serialize(self) -> JsonDict: 1abcdefg
364 """Based on mypy.plugins.dataclasses.DataclassAttribute.serialize."""
365 assert self.type 1abcdefg
366 return { 1abcdefg
367 'name': self.name,
368 'alias': self.alias,
369 'is_frozen': self.is_frozen,
370 'has_dynamic_alias': self.has_dynamic_alias,
371 'has_default': self.has_default,
372 'strict': self.strict,
373 'line': self.line,
374 'column': self.column,
375 'type': self.type.serialize(),
376 }
378 @classmethod 1abcdefg
379 def deserialize(cls, info: TypeInfo, data: JsonDict, api: SemanticAnalyzerPluginInterface) -> PydanticModelField: 1abcdefg
380 """Based on mypy.plugins.dataclasses.DataclassAttribute.deserialize."""
381 data = data.copy() 1abcdefg
382 typ = deserialize_and_fixup_type(data.pop('type'), api) 1abcdefg
383 return cls(type=typ, info=info, **data) 1abcdefg
385 def expand_typevar_from_subtype(self, sub_type: TypeInfo, api: SemanticAnalyzerPluginInterface) -> None: 1abcdefg
386 """Expands type vars in the context of a subtype when an attribute is inherited
387 from a generic super type.
388 """
389 if self.type is not None: 389 ↛ exitline 389 didn't return from function 'expand_typevar_from_subtype' because the condition on line 389 was always true1abcdefg
390 with state.strict_optional_set(api.options.strict_optional): 1abcdefg
391 self.type = map_type_from_supertype(self.type, sub_type, self.info) 1abcdefg
394class PydanticModelClassVar: 1abcdefg
395 """Based on mypy.plugins.dataclasses.DataclassAttribute.
397 ClassVars are ignored by subclasses.
399 Attributes:
400 name: the ClassVar name
401 """
403 def __init__(self, name): 1abcdefg
404 self.name = name 1abcdefg
406 @classmethod 1abcdefg
407 def deserialize(cls, data: JsonDict) -> PydanticModelClassVar: 1abcdefg
408 """Based on mypy.plugins.dataclasses.DataclassAttribute.deserialize."""
409 data = data.copy()
410 return cls(**data)
412 def serialize(self) -> JsonDict: 1abcdefg
413 """Based on mypy.plugins.dataclasses.DataclassAttribute.serialize."""
414 return { 1abcdefg
415 'name': self.name,
416 }
419class PydanticModelTransformer: 1abcdefg
420 """Transform the BaseModel subclass according to the plugin settings.
422 Attributes:
423 tracked_config_fields: A set of field configs that the plugin has to track their value.
424 """
426 tracked_config_fields: set[str] = { 1abcdefg
427 'extra',
428 'frozen',
429 'from_attributes',
430 'populate_by_name',
431 'validate_by_alias',
432 'validate_by_name',
433 'alias_generator',
434 'strict',
435 }
437 def __init__( 1abcdefg
438 self,
439 cls: ClassDef,
440 reason: Expression | Statement,
441 api: SemanticAnalyzerPluginInterface,
442 plugin_config: PydanticPluginConfig,
443 ) -> None:
444 self._cls = cls 1abcdefg
445 self._reason = reason 1abcdefg
446 self._api = api 1abcdefg
448 self.plugin_config = plugin_config 1abcdefg
450 def transform(self) -> bool: 1abcdefg
451 """Configures the BaseModel subclass according to the plugin settings.
453 In particular:
455 * determines the model config and fields,
456 * adds a fields-aware signature for the initializer and construct methods
457 * freezes the class if frozen = True
458 * stores the fields, config, and if the class is settings in the mypy metadata for access by subclasses
459 """
460 info = self._cls.info 1abcdefg
461 is_a_root_model = is_root_model(info) 1abcdefg
462 config = self.collect_config() 1abcdefg
463 fields, class_vars = self.collect_fields_and_class_vars(config, is_a_root_model) 1abcdefg
464 if fields is None or class_vars is None: 464 ↛ 466line 464 didn't jump to line 466 because the condition on line 464 was never true1abcdefg
465 # Some definitions are not ready. We need another pass.
466 return False
467 for field in fields: 1abcdefg
468 if field.type is None: 1abcdefg
469 return False 1abcdefg
471 is_settings = info.has_base(BASESETTINGS_FULLNAME) 1abcdefg
472 self.add_initializer(fields, config, is_settings, is_a_root_model) 1abcdefg
473 self.add_model_construct_method(fields, config, is_settings, is_a_root_model) 1abcdefg
474 self.set_frozen(fields, self._api, frozen=config.frozen is True) 1abcdefg
476 self.adjust_decorator_signatures() 1abcdefg
478 info.metadata[METADATA_KEY] = { 1abcdefg
479 'fields': {field.name: field.serialize() for field in fields},
480 'class_vars': {class_var.name: class_var.serialize() for class_var in class_vars},
481 'config': config.get_values_dict(),
482 }
484 return True 1abcdefg
486 def adjust_decorator_signatures(self) -> None: 1abcdefg
487 """When we decorate a function `f` with `pydantic.validator(...)`, `pydantic.field_validator`
488 or `pydantic.serializer(...)`, mypy sees `f` as a regular method taking a `self` instance,
489 even though pydantic internally wraps `f` with `classmethod` if necessary.
491 Teach mypy this by marking any function whose outermost decorator is a `validator()`,
492 `field_validator()` or `serializer()` call as a `classmethod`.
493 """
494 for sym in self._cls.info.names.values(): 1abcdefg
495 if isinstance(sym.node, Decorator): 1abcdefg
496 first_dec = sym.node.original_decorators[0] 1abcdefg
497 if ( 1abc
498 isinstance(first_dec, CallExpr)
499 and isinstance(first_dec.callee, NameExpr)
500 and first_dec.callee.fullname in IMPLICIT_CLASSMETHOD_DECORATOR_FULLNAMES
501 # @model_validator(mode="after") is an exception, it expects a regular method
502 and not (
503 first_dec.callee.fullname == MODEL_VALIDATOR_FULLNAME
504 and any(
505 first_dec.arg_names[i] == 'mode' and isinstance(arg, StrExpr) and arg.value == 'after'
506 for i, arg in enumerate(first_dec.args)
507 )
508 )
509 ):
510 # TODO: Only do this if the first argument of the decorated function is `cls`
511 sym.node.func.is_class = True 1abcdefg
513 def collect_config(self) -> ModelConfigData: # noqa: C901 (ignore complexity) 1abcdefg
514 """Collects the values of the config attributes that are used by the plugin, accounting for parent classes."""
515 cls = self._cls 1abcdefg
516 config = ModelConfigData() 1abcdefg
518 has_config_kwargs = False 1abcdefg
519 has_config_from_namespace = False 1abcdefg
521 # Handle `class MyModel(BaseModel, <name>=<expr>, ...):`
522 for name, expr in cls.keywords.items(): 1abcdefg
523 config_data = self.get_config_update(name, expr) 1abcdefg
524 if config_data: 1abcdefg
525 has_config_kwargs = True 1abcdefg
526 config.update(config_data) 1abcdefg
528 # Handle `model_config`
529 stmt: Statement | None = None 1abcdefg
530 for stmt in cls.defs.body: 1abcdefg
531 if not isinstance(stmt, (AssignmentStmt, ClassDef)): 1abcdefg
532 continue 1abcdefg
534 if isinstance(stmt, AssignmentStmt): 1abcdefg
535 lhs = stmt.lvalues[0] 1abcdefg
536 if not isinstance(lhs, NameExpr) or lhs.name != 'model_config': 1abcdefg
537 continue 1abcdefg
539 if isinstance(stmt.rvalue, CallExpr): # calls to `dict` or `ConfigDict` 1abcdefg
540 for arg_name, arg in zip(stmt.rvalue.arg_names, stmt.rvalue.args): 1abcdefg
541 if arg_name is None: 541 ↛ 542line 541 didn't jump to line 542 because the condition on line 541 was never true1abcdefg
542 continue
543 config.update(self.get_config_update(arg_name, arg, lax_extra=True)) 1abcdefg
544 elif isinstance(stmt.rvalue, DictExpr): # dict literals 544 ↛ 561line 544 didn't jump to line 561 because the condition on line 544 was always true1abcdefg
545 for key_expr, value_expr in stmt.rvalue.items: 1abcdefg
546 if not isinstance(key_expr, StrExpr): 546 ↛ 547line 546 didn't jump to line 547 because the condition on line 546 was never true1abcdefg
547 continue
548 config.update(self.get_config_update(key_expr.value, value_expr)) 1abcdefg
550 elif isinstance(stmt, ClassDef): 550 ↛ 561line 550 didn't jump to line 561 because the condition on line 550 was always true1abcdefg
551 if stmt.name != 'Config': # 'deprecated' Config-class 1abcdefg
552 continue 1abcdefg
553 for substmt in stmt.defs.body: 1abcdefg
554 if not isinstance(substmt, AssignmentStmt): 1abcdefg
555 continue 1abcdefg
556 lhs = substmt.lvalues[0] 1abcdefg
557 if not isinstance(lhs, NameExpr): 557 ↛ 558line 557 didn't jump to line 558 because the condition on line 557 was never true1abcdefg
558 continue
559 config.update(self.get_config_update(lhs.name, substmt.rvalue)) 1abcdefg
561 if has_config_kwargs: 561 ↛ 562line 561 didn't jump to line 562 because the condition on line 561 was never true1abcdefg
562 self._api.fail(
563 'Specifying config in two places is ambiguous, use either Config attribute or class kwargs',
564 cls,
565 )
566 break
568 has_config_from_namespace = True 1abcdefg
570 if has_config_kwargs or has_config_from_namespace: 1abcdefg
571 if ( 1abc
572 stmt
573 and config.has_alias_generator
574 and not (config.validate_by_name or config.populate_by_name)
575 and self.plugin_config.warn_required_dynamic_aliases
576 ):
577 error_required_dynamic_aliases(self._api, stmt) 1abcdefg
579 for info in cls.info.mro[1:]: # 0 is the current class 1abcdefg
580 if METADATA_KEY not in info.metadata: 1abcdefg
581 continue 1abcdefg
583 # Each class depends on the set of fields in its ancestors
584 self._api.add_plugin_dependency(make_wildcard_trigger(info.fullname)) 1abcdefg
585 for name, value in info.metadata[METADATA_KEY]['config'].items(): 1abcdefg
586 config.setdefault(name, value) 1abcdefg
587 return config 1abcdefg
589 def collect_fields_and_class_vars( 1abcdefg
590 self, model_config: ModelConfigData, is_root_model: bool
591 ) -> tuple[list[PydanticModelField] | None, list[PydanticModelClassVar] | None]:
592 """Collects the fields for the model, accounting for parent classes."""
593 cls = self._cls 1abcdefg
595 # First, collect fields and ClassVars belonging to any class in the MRO, ignoring duplicates.
596 #
597 # We iterate through the MRO in reverse because attrs defined in the parent must appear
598 # earlier in the attributes list than attrs defined in the child. See:
599 # https://docs.python.org/3/library/dataclasses.html#inheritance
600 #
601 # However, we also want fields defined in the subtype to override ones defined
602 # in the parent. We can implement this via a dict without disrupting the attr order
603 # because dicts preserve insertion order in Python 3.7+.
604 found_fields: dict[str, PydanticModelField] = {} 1abcdefg
605 found_class_vars: dict[str, PydanticModelClassVar] = {} 1abcdefg
606 for info in reversed(cls.info.mro[1:-1]): # 0 is the current class, -2 is BaseModel, -1 is object 1abcdefg
607 # if BASEMODEL_METADATA_TAG_KEY in info.metadata and BASEMODEL_METADATA_KEY not in info.metadata:
608 # # We haven't processed the base class yet. Need another pass.
609 # return None, None
610 if METADATA_KEY not in info.metadata: 1abcdefg
611 continue 1abcdefg
613 # Each class depends on the set of attributes in its dataclass ancestors.
614 self._api.add_plugin_dependency(make_wildcard_trigger(info.fullname)) 1abcdefg
616 for name, data in info.metadata[METADATA_KEY]['fields'].items(): 1abcdefg
617 field = PydanticModelField.deserialize(info, data, self._api) 1abcdefg
618 # (The following comment comes directly from the dataclasses plugin)
619 # TODO: We shouldn't be performing type operations during the main
620 # semantic analysis pass, since some TypeInfo attributes might
621 # still be in flux. This should be performed in a later phase.
622 field.expand_typevar_from_subtype(cls.info, self._api) 1abcdefg
623 found_fields[name] = field 1abcdefg
625 sym_node = cls.info.names.get(name) 1abcdefg
626 if sym_node and sym_node.node and not isinstance(sym_node.node, Var): 626 ↛ 627line 626 didn't jump to line 627 because the condition on line 626 was never true1abcdefg
627 self._api.fail(
628 'BaseModel field may only be overridden by another field',
629 sym_node.node,
630 )
631 # Collect ClassVars
632 for name, data in info.metadata[METADATA_KEY]['class_vars'].items(): 632 ↛ 633line 632 didn't jump to line 633 because the loop on line 632 never started1abcdefg
633 found_class_vars[name] = PydanticModelClassVar.deserialize(data)
635 # Second, collect fields and ClassVars belonging to the current class.
636 current_field_names: set[str] = set() 1abcdefg
637 current_class_vars_names: set[str] = set() 1abcdefg
638 for stmt in self._get_assignment_statements_from_block(cls.defs): 1abcdefg
639 maybe_field = self.collect_field_or_class_var_from_stmt(stmt, model_config, found_class_vars) 1abcdefg
640 if maybe_field is None: 1abcdefg
641 continue 1abcdefg
643 lhs = stmt.lvalues[0] 1abcdefg
644 assert isinstance(lhs, NameExpr) # collect_field_or_class_var_from_stmt guarantees this 1abcdefg
645 if isinstance(maybe_field, PydanticModelField): 1abcdefg
646 if is_root_model and lhs.name != 'root': 1abcdefg
647 error_extra_fields_on_root_model(self._api, stmt) 1abcdefg
648 else:
649 current_field_names.add(lhs.name) 1abcdefg
650 found_fields[lhs.name] = maybe_field 1abcdefg
651 elif isinstance(maybe_field, PydanticModelClassVar): 651 ↛ 638line 651 didn't jump to line 638 because the condition on line 651 was always true1abcdefg
652 current_class_vars_names.add(lhs.name) 1abcdefg
653 found_class_vars[lhs.name] = maybe_field 1abcdefg
655 return list(found_fields.values()), list(found_class_vars.values()) 1abcdefg
657 def _get_assignment_statements_from_if_statement(self, stmt: IfStmt) -> Iterator[AssignmentStmt]: 1abcdefg
658 for body in stmt.body: 1abcdefg
659 if not body.is_unreachable: 659 ↛ 658line 659 didn't jump to line 658 because the condition on line 659 was always true1abcdefg
660 yield from self._get_assignment_statements_from_block(body) 1abcdefg
661 if stmt.else_body is not None and not stmt.else_body.is_unreachable: 661 ↛ 662line 661 didn't jump to line 662 because the condition on line 661 was never true1abcdefg
662 yield from self._get_assignment_statements_from_block(stmt.else_body)
664 def _get_assignment_statements_from_block(self, block: Block) -> Iterator[AssignmentStmt]: 1abcdefg
665 for stmt in block.body: 1abcdefg
666 if isinstance(stmt, AssignmentStmt): 1abcdefg
667 yield stmt 1abcdefg
668 elif isinstance(stmt, IfStmt): 1abcdefg
669 yield from self._get_assignment_statements_from_if_statement(stmt) 1abcdefg
671 def collect_field_or_class_var_from_stmt( # noqa C901 1abcdefg
672 self, stmt: AssignmentStmt, model_config: ModelConfigData, class_vars: dict[str, PydanticModelClassVar]
673 ) -> PydanticModelField | PydanticModelClassVar | None:
674 """Get pydantic model field from statement.
676 Args:
677 stmt: The statement.
678 model_config: Configuration settings for the model.
679 class_vars: ClassVars already known to be defined on the model.
681 Returns:
682 A pydantic model field if it could find the field in statement. Otherwise, `None`.
683 """
684 cls = self._cls 1abcdefg
686 lhs = stmt.lvalues[0] 1abcdefg
687 if not isinstance(lhs, NameExpr) or not _fields.is_valid_field_name(lhs.name) or lhs.name == 'model_config': 1abcdefg
688 return None 1abcdefg
690 if not stmt.new_syntax: 1abcdefg
691 if ( 1abc
692 isinstance(stmt.rvalue, CallExpr)
693 and isinstance(stmt.rvalue.callee, CallExpr)
694 and isinstance(stmt.rvalue.callee.callee, NameExpr)
695 and stmt.rvalue.callee.callee.fullname in DECORATOR_FULLNAMES
696 ):
697 # This is a (possibly-reused) validator or serializer, not a field
698 # In particular, it looks something like: my_validator = validator('my_field')(f)
699 # Eventually, we may want to attempt to respect model_config['ignored_types']
700 return None 1abcdefg
702 if lhs.name in class_vars: 702 ↛ 704line 702 didn't jump to line 704 because the condition on line 702 was never true1abcdefg
703 # Class vars are not fields and are not required to be annotated
704 return None
706 # The assignment does not have an annotation, and it's not anything else we recognize
707 error_untyped_fields(self._api, stmt) 1abcdefg
708 return None 1abcdefg
710 lhs = stmt.lvalues[0] 1abcdefg
711 if not isinstance(lhs, NameExpr): 711 ↛ 712line 711 didn't jump to line 712 because the condition on line 711 was never true1abcdefg
712 return None
714 if not _fields.is_valid_field_name(lhs.name) or lhs.name == 'model_config': 714 ↛ 715line 714 didn't jump to line 715 because the condition on line 714 was never true1abcdefg
715 return None
717 sym = cls.info.names.get(lhs.name) 1abcdefg
718 if sym is None: # pragma: no cover 1abcdefg
719 # This is likely due to a star import (see the dataclasses plugin for a more detailed explanation)
720 # This is the same logic used in the dataclasses plugin
721 return None
723 node = sym.node 1abcdefg
724 if isinstance(node, PlaceholderNode): # pragma: no cover 1abcdefg
725 # See the PlaceholderNode docstring for more detail about how this can occur
726 # Basically, it is an edge case when dealing with complex import logic
728 # The dataclasses plugin now asserts this cannot happen, but I'd rather not error if it does..
729 return None
731 if isinstance(node, TypeAlias): 1abcdefg
732 self._api.fail(
733 'Type aliases inside BaseModel definitions are not supported at runtime',
734 node,
735 )
736 # Skip processing this node. This doesn't match the runtime behaviour,
737 # but the only alternative would be to modify the SymbolTable,
738 # and it's a little hairy to do that in a plugin.
739 return None
741 if not isinstance(node, Var): # pragma: no cover 1abcdefg
742 # Don't know if this edge case still happens with the `is_valid_field` check above
743 # but better safe than sorry
745 # The dataclasses plugin now asserts this cannot happen, but I'd rather not error if it does..
746 return None
748 # x: ClassVar[int] is not a field
749 if node.is_classvar: 1abcdefg
750 return PydanticModelClassVar(lhs.name) 1abcdefg
752 # x: InitVar[int] is not supported in BaseModel
753 node_type = get_proper_type(node.type) 1abcdefg
754 if isinstance(node_type, Instance) and node_type.type.fullname == 'dataclasses.InitVar': 754 ↛ 755line 754 didn't jump to line 755 because the condition on line 754 was never true1abcdefg
755 self._api.fail(
756 'InitVar is not supported in BaseModel',
757 node,
758 )
760 has_default = self.get_has_default(stmt) 1abcdefg
761 strict = self.get_strict(stmt) 1abcdefg
763 if sym.type is None and node.is_final and node.is_inferred: 763 ↛ 771line 763 didn't jump to line 771 because the condition on line 763 was never true1abcdefg
764 # This follows the logic from the dataclasses plugin. The following comment is taken verbatim:
765 #
766 # This is a special case, assignment like x: Final = 42 is classified
767 # annotated above, but mypy strips the `Final` turning it into x = 42.
768 # We do not support inferred types in dataclasses, so we can try inferring
769 # type for simple literals, and otherwise require an explicit type
770 # argument for Final[...].
771 typ = self._api.analyze_simple_literal_type(stmt.rvalue, is_final=True)
772 if typ:
773 node.type = typ
774 else:
775 self._api.fail(
776 'Need type argument for Final[...] with non-literal default in BaseModel',
777 stmt,
778 )
779 node.type = AnyType(TypeOfAny.from_error)
781 if node.is_final and has_default: 1abcdefg
782 # TODO this path should be removed (see https://github.com/pydantic/pydantic/issues/11119)
783 return PydanticModelClassVar(lhs.name) 1abcdefg
785 alias, has_dynamic_alias = self.get_alias_info(stmt) 1abcdefg
786 if ( 1abc
787 has_dynamic_alias
788 and not (model_config.validate_by_name or model_config.populate_by_name)
789 and self.plugin_config.warn_required_dynamic_aliases
790 ):
791 error_required_dynamic_aliases(self._api, stmt) 1abcdefg
792 is_frozen = self.is_field_frozen(stmt) 1abcdefg
794 init_type = self._infer_dataclass_attr_init_type(sym, lhs.name, stmt) 1abcdefg
795 return PydanticModelField( 1abcdefg
796 name=lhs.name,
797 has_dynamic_alias=has_dynamic_alias,
798 has_default=has_default,
799 strict=strict,
800 alias=alias,
801 is_frozen=is_frozen,
802 line=stmt.line,
803 column=stmt.column,
804 type=init_type,
805 info=cls.info,
806 )
808 def _infer_dataclass_attr_init_type(self, sym: SymbolTableNode, name: str, context: Context) -> Type | None: 1abcdefg
809 """Infer __init__ argument type for an attribute.
811 In particular, possibly use the signature of __set__.
812 """
813 default = sym.type 1abcdefg
814 if sym.implicit: 814 ↛ 815line 814 didn't jump to line 815 because the condition on line 814 was never true1abcdefg
815 return default
816 t = get_proper_type(sym.type) 1abcdefg
818 # Perform a simple-minded inference from the signature of __set__, if present.
819 # We can't use mypy.checkmember here, since this plugin runs before type checking.
820 # We only support some basic scanerios here, which is hopefully sufficient for
821 # the vast majority of use cases.
822 if not isinstance(t, Instance): 1abcdefg
823 return default 1abcdefg
824 setter = t.type.get('__set__') 1abcdefg
825 if setter: 825 ↛ 826line 825 didn't jump to line 826 because the condition on line 825 was never true1abcdefg
826 if isinstance(setter.node, FuncDef):
827 super_info = t.type.get_containing_type_info('__set__')
828 assert super_info
829 if setter.type:
830 setter_type = get_proper_type(map_type_from_supertype(setter.type, t.type, super_info))
831 else:
832 return AnyType(TypeOfAny.unannotated)
833 if isinstance(setter_type, CallableType) and setter_type.arg_kinds == [
834 ARG_POS,
835 ARG_POS,
836 ARG_POS,
837 ]:
838 return expand_type_by_instance(setter_type.arg_types[2], t)
839 else:
840 self._api.fail(f'Unsupported signature for "__set__" in "{t.type.name}"', context)
841 else:
842 self._api.fail(f'Unsupported "__set__" in "{t.type.name}"', context)
844 return default 1abcdefg
846 def add_initializer( 1abcdefg
847 self, fields: list[PydanticModelField], config: ModelConfigData, is_settings: bool, is_root_model: bool
848 ) -> None:
849 """Adds a fields-aware `__init__` method to the class.
851 The added `__init__` will be annotated with types vs. all `Any` depending on the plugin settings.
852 """
853 if '__init__' in self._cls.info.names and not self._cls.info.names['__init__'].plugin_generated: 1abcdefg
854 return # Don't generate an __init__ if one already exists 1abcdefg
856 typed = self.plugin_config.init_typed 1abcdefg
857 model_strict = bool(config.strict) 1abcdefg
858 use_alias = not (config.validate_by_name or config.populate_by_name) and config.validate_by_alias is not False 1abcdefg
859 requires_dynamic_aliases = bool(config.has_alias_generator and not config.validate_by_name) 1abcdefg
860 args = self.get_field_arguments( 1abcdefg
861 fields,
862 typed=typed,
863 model_strict=model_strict,
864 requires_dynamic_aliases=requires_dynamic_aliases,
865 use_alias=use_alias,
866 is_settings=is_settings,
867 is_root_model=is_root_model,
868 force_typevars_invariant=True,
869 )
871 if is_settings: 1abcdefg
872 base_settings_node = self._api.lookup_fully_qualified(BASESETTINGS_FULLNAME).node 1abcdefg
873 assert isinstance(base_settings_node, TypeInfo) 1abcdefg
874 if '__init__' in base_settings_node.names: 874 ↛ 894line 874 didn't jump to line 894 because the condition on line 874 was always true1abcdefg
875 base_settings_init_node = base_settings_node.names['__init__'].node 1abcdefg
876 assert isinstance(base_settings_init_node, FuncDef) 1abcdefg
877 if base_settings_init_node is not None and base_settings_init_node.type is not None: 877 ↛ 894line 877 didn't jump to line 894 because the condition on line 877 was always true1abcdefg
878 func_type = base_settings_init_node.type 1abcdefg
879 assert isinstance(func_type, CallableType) 1abcdefg
880 for arg_idx, arg_name in enumerate(func_type.arg_names): 1abcdefg
881 if arg_name is None or arg_name.startswith('__') or not arg_name.startswith('_'): 1abcdefg
882 continue 1abcdefg
883 analyzed_variable_type = self._api.anal_type(func_type.arg_types[arg_idx]) 1abcdefg
884 if analyzed_variable_type is not None and arg_name == '_cli_settings_source': 1abcdefg
885 # _cli_settings_source is defined as CliSettingsSource[Any], and as such
886 # the Any causes issues with --disallow-any-explicit. As a workaround, change
887 # the Any type (as if CliSettingsSource was left unparameterized):
888 analyzed_variable_type = analyzed_variable_type.accept( 1abcdefg
889 ChangeExplicitTypeOfAny(TypeOfAny.from_omitted_generics)
890 )
891 variable = Var(arg_name, analyzed_variable_type) 1abcdefg
892 args.append(Argument(variable, analyzed_variable_type, None, ARG_OPT)) 1abcdefg
894 if not self.should_init_forbid_extra(fields, config): 1abcdefg
895 var = Var('kwargs') 1abcdefg
896 args.append(Argument(var, AnyType(TypeOfAny.explicit), None, ARG_STAR2)) 1abcdefg
898 add_method(self._api, self._cls, '__init__', args=args, return_type=NoneType()) 1abcdefg
900 def add_model_construct_method( 1abcdefg
901 self,
902 fields: list[PydanticModelField],
903 config: ModelConfigData,
904 is_settings: bool,
905 is_root_model: bool,
906 ) -> None:
907 """Adds a fully typed `model_construct` classmethod to the class.
909 Similar to the fields-aware __init__ method, but always uses the field names (not aliases),
910 and does not treat settings fields as optional.
911 """
912 set_str = self._api.named_type(f'{BUILTINS_NAME}.set', [self._api.named_type(f'{BUILTINS_NAME}.str')]) 1abcdefg
913 optional_set_str = UnionType([set_str, NoneType()]) 1abcdefg
914 fields_set_argument = Argument(Var('_fields_set', optional_set_str), optional_set_str, None, ARG_OPT) 1abcdefg
915 with state.strict_optional_set(self._api.options.strict_optional): 1abcdefg
916 args = self.get_field_arguments( 1abcdefg
917 fields,
918 typed=True,
919 model_strict=bool(config.strict),
920 requires_dynamic_aliases=False,
921 use_alias=False,
922 is_settings=is_settings,
923 is_root_model=is_root_model,
924 )
925 if not self.should_init_forbid_extra(fields, config): 1abcdefg
926 var = Var('kwargs') 1abcdefg
927 args.append(Argument(var, AnyType(TypeOfAny.explicit), None, ARG_STAR2)) 1abcdefg
929 args = args + [fields_set_argument] if is_root_model else [fields_set_argument] + args 1abcdefg
931 add_method( 1abcdefg
932 self._api,
933 self._cls,
934 'model_construct',
935 args=args,
936 return_type=fill_typevars(self._cls.info),
937 is_classmethod=True,
938 )
940 def set_frozen(self, fields: list[PydanticModelField], api: SemanticAnalyzerPluginInterface, frozen: bool) -> None: 1abcdefg
941 """Marks all fields as properties so that attempts to set them trigger mypy errors.
943 This is the same approach used by the attrs and dataclasses plugins.
944 """
945 info = self._cls.info 1abcdefg
946 for field in fields: 1abcdefg
947 sym_node = info.names.get(field.name) 1abcdefg
948 if sym_node is not None: 1abcdefg
949 var = sym_node.node 1abcdefg
950 if isinstance(var, Var): 950 ↛ 952line 950 didn't jump to line 952 because the condition on line 950 was always true1abcdefg
951 var.is_property = frozen or field.is_frozen 1abcdefg
952 elif isinstance(var, PlaceholderNode) and not self._api.final_iteration:
953 # See https://github.com/pydantic/pydantic/issues/5191 to hit this branch for test coverage
954 self._api.defer()
955 else: # pragma: no cover
956 # I don't know whether it's possible to hit this branch, but I've added it for safety
957 try:
958 var_str = str(var)
959 except TypeError:
960 # This happens for PlaceholderNode; perhaps it will happen for other types in the future..
961 var_str = repr(var)
962 detail = f'sym_node.node: {var_str} (of type {var.__class__})'
963 error_unexpected_behavior(detail, self._api, self._cls)
964 else:
965 var = field.to_var(info, api, use_alias=False) 1abcdefg
966 var.info = info 1abcdefg
967 var.is_property = frozen 1abcdefg
968 var._fullname = info.fullname + '.' + var.name 1abcdefg
969 info.names[var.name] = SymbolTableNode(MDEF, var) 1abcdefg
971 def get_config_update(self, name: str, arg: Expression, lax_extra: bool = False) -> ModelConfigData | None: 1abcdefg
972 """Determines the config update due to a single kwarg in the ConfigDict definition.
974 Warns if a tracked config attribute is set to a value the plugin doesn't know how to interpret (e.g., an int)
975 """
976 if name not in self.tracked_config_fields: 1abcdefg
977 return None 1abcdefg
978 if name == 'extra': 1abcdefg
979 if isinstance(arg, StrExpr): 1abcdefg
980 forbid_extra = arg.value == 'forbid' 1abcdefg
981 elif isinstance(arg, MemberExpr): 1abcdefg
982 forbid_extra = arg.name == 'forbid' 1abcdefg
983 else:
984 if not lax_extra: 1abcdefg
985 # Only emit an error for other types of `arg` (e.g., `NameExpr`, `ConditionalExpr`, etc.) when
986 # reading from a config class, etc. If a ConfigDict is used, then we don't want to emit an error
987 # because you'll get type checking from the ConfigDict itself.
988 #
989 # It would be nice if we could introspect the types better otherwise, but I don't know what the API
990 # is to evaluate an expr into its type and then check if that type is compatible with the expected
991 # type. Note that you can still get proper type checking via: `model_config = ConfigDict(...)`, just
992 # if you don't use an explicit string, the plugin won't be able to infer whether extra is forbidden.
993 error_invalid_config_value(name, self._api, arg) 1abcdefg
994 return None 1abcdefg
995 return ModelConfigData(forbid_extra=forbid_extra) 1abcdefg
996 if name == 'alias_generator': 1abcdefg
997 has_alias_generator = True 1abcdefg
998 if isinstance(arg, NameExpr) and arg.fullname == 'builtins.None': 1abcdefg
999 has_alias_generator = False 1abcdefg
1000 return ModelConfigData(has_alias_generator=has_alias_generator) 1abcdefg
1001 if isinstance(arg, NameExpr) and arg.fullname in ('builtins.True', 'builtins.False'): 1abcdefg
1002 return ModelConfigData(**{name: arg.fullname == 'builtins.True'}) 1abcdefg
1003 error_invalid_config_value(name, self._api, arg) 1abcdefg
1004 return None 1abcdefg
1006 @staticmethod 1abcdefg
1007 def get_has_default(stmt: AssignmentStmt) -> bool: 1abcdefg
1008 """Returns a boolean indicating whether the field defined in `stmt` is a required field."""
1009 expr = stmt.rvalue 1abcdefg
1010 if isinstance(expr, TempNode): 1abcdefg
1011 # TempNode means annotation-only, so has no default
1012 return False 1abcdefg
1013 if isinstance(expr, CallExpr) and isinstance(expr.callee, RefExpr) and expr.callee.fullname == FIELD_FULLNAME: 1abcdefg
1014 # The "default value" is a call to `Field`; at this point, the field has a default if and only if:
1015 # * there is a positional argument that is not `...`
1016 # * there is a keyword argument named "default" that is not `...`
1017 # * there is a "default_factory" that is not `None`
1018 for arg, name in zip(expr.args, expr.arg_names): 1abcdefg
1019 # If name is None, then this arg is the default because it is the only positional argument.
1020 if name is None or name == 'default': 1abcdefg
1021 return arg.__class__ is not EllipsisExpr 1abcdefg
1022 if name == 'default_factory': 1abcdefg
1023 return not (isinstance(arg, NameExpr) and arg.fullname == 'builtins.None') 1abcdefg
1024 return False 1abcdefg
1025 # Has no default if the "default value" is Ellipsis (i.e., `field_name: Annotation = ...`)
1026 return not isinstance(expr, EllipsisExpr) 1abcdefg
1028 @staticmethod 1abcdefg
1029 def get_strict(stmt: AssignmentStmt) -> bool | None: 1abcdefg
1030 """Returns a the `strict` value of a field if defined, otherwise `None`."""
1031 expr = stmt.rvalue 1abcdefg
1032 if isinstance(expr, CallExpr) and isinstance(expr.callee, RefExpr) and expr.callee.fullname == FIELD_FULLNAME: 1abcdefg
1033 for arg, name in zip(expr.args, expr.arg_names): 1abcdefg
1034 if name != 'strict': 1abcdefg
1035 continue 1abcdefg
1036 if isinstance(arg, NameExpr): 1036 ↛ 1041line 1036 didn't jump to line 1041 because the condition on line 1036 was always true1abcdefg
1037 if arg.fullname == 'builtins.True': 1abcdefg
1038 return True 1abcdefg
1039 elif arg.fullname == 'builtins.False': 1039 ↛ 1041line 1039 didn't jump to line 1041 because the condition on line 1039 was always true1abcdefg
1040 return False 1abcdefg
1041 return None
1042 return None 1abcdefg
1044 @staticmethod 1abcdefg
1045 def get_alias_info(stmt: AssignmentStmt) -> tuple[str | None, bool]: 1abcdefg
1046 """Returns a pair (alias, has_dynamic_alias), extracted from the declaration of the field defined in `stmt`.
1048 `has_dynamic_alias` is True if and only if an alias is provided, but not as a string literal.
1049 If `has_dynamic_alias` is True, `alias` will be None.
1050 """
1051 expr = stmt.rvalue 1abcdefg
1052 if isinstance(expr, TempNode): 1abcdefg
1053 # TempNode means annotation-only
1054 return None, False 1abcdefg
1056 if not ( 1abc
1057 isinstance(expr, CallExpr) and isinstance(expr.callee, RefExpr) and expr.callee.fullname == FIELD_FULLNAME
1058 ):
1059 # Assigned value is not a call to pydantic.fields.Field
1060 return None, False 1abcdefg
1062 if 'validation_alias' in expr.arg_names: 1abcdefg
1063 arg = expr.args[expr.arg_names.index('validation_alias')] 1abcdefg
1064 elif 'alias' in expr.arg_names: 1abcdefg
1065 arg = expr.args[expr.arg_names.index('alias')] 1abcdefg
1066 else:
1067 return None, False 1abcdefg
1069 if isinstance(arg, StrExpr): 1abcdefg
1070 return arg.value, False 1abcdefg
1071 else:
1072 return None, True 1abcdefg
1074 @staticmethod 1abcdefg
1075 def is_field_frozen(stmt: AssignmentStmt) -> bool: 1abcdefg
1076 """Returns whether the field is frozen, extracted from the declaration of the field defined in `stmt`.
1078 Note that this is only whether the field was declared to be frozen in a `<field_name> = Field(frozen=True)`
1079 sense; this does not determine whether the field is frozen because the entire model is frozen; that is
1080 handled separately.
1081 """
1082 expr = stmt.rvalue 1abcdefg
1083 if isinstance(expr, TempNode): 1abcdefg
1084 # TempNode means annotation-only
1085 return False 1abcdefg
1087 if not ( 1abc
1088 isinstance(expr, CallExpr) and isinstance(expr.callee, RefExpr) and expr.callee.fullname == FIELD_FULLNAME
1089 ):
1090 # Assigned value is not a call to pydantic.fields.Field
1091 return False 1abcdefg
1093 for i, arg_name in enumerate(expr.arg_names): 1abcdefg
1094 if arg_name == 'frozen': 1abcdefg
1095 arg = expr.args[i] 1abcdefg
1096 return isinstance(arg, NameExpr) and arg.fullname == 'builtins.True' 1abcdefg
1097 return False 1abcdefg
1099 def get_field_arguments( 1abcdefg
1100 self,
1101 fields: list[PydanticModelField],
1102 typed: bool,
1103 model_strict: bool,
1104 use_alias: bool,
1105 requires_dynamic_aliases: bool,
1106 is_settings: bool,
1107 is_root_model: bool,
1108 force_typevars_invariant: bool = False,
1109 ) -> list[Argument]:
1110 """Helper function used during the construction of the `__init__` and `model_construct` method signatures.
1112 Returns a list of mypy Argument instances for use in the generated signatures.
1113 """
1114 info = self._cls.info 1abcdefg
1115 arguments = [ 1abcdefg
1116 field.to_argument(
1117 info,
1118 typed=typed,
1119 model_strict=model_strict,
1120 force_optional=requires_dynamic_aliases or is_settings,
1121 use_alias=use_alias,
1122 api=self._api,
1123 force_typevars_invariant=force_typevars_invariant,
1124 is_root_model_root=is_root_model and field.name == 'root',
1125 )
1126 for field in fields
1127 if not (use_alias and field.has_dynamic_alias)
1128 ]
1129 return arguments 1abcdefg
1131 def should_init_forbid_extra(self, fields: list[PydanticModelField], config: ModelConfigData) -> bool: 1abcdefg
1132 """Indicates whether the generated `__init__` should get a `**kwargs` at the end of its signature.
1134 We disallow arbitrary kwargs if the extra config setting is "forbid", or if the plugin config says to,
1135 *unless* a required dynamic alias is present (since then we can't determine a valid signature).
1136 """
1137 if not (config.validate_by_name or config.populate_by_name): 1abcdefg
1138 if self.is_dynamic_alias_present(fields, bool(config.has_alias_generator)): 1abcdefg
1139 return False 1abcdefg
1140 if config.forbid_extra: 1abcdefg
1141 return True 1abcdefg
1142 return self.plugin_config.init_forbid_extra 1abcdefg
1144 @staticmethod 1abcdefg
1145 def is_dynamic_alias_present(fields: list[PydanticModelField], has_alias_generator: bool) -> bool: 1abcdefg
1146 """Returns whether any fields on the model have a "dynamic alias", i.e., an alias that cannot be
1147 determined during static analysis.
1148 """
1149 for field in fields: 1abcdefg
1150 if field.has_dynamic_alias: 1abcdefg
1151 return True 1abcdefg
1152 if has_alias_generator: 1abcdefg
1153 for field in fields: 1abcdefg
1154 if field.alias is None: 1abcdefg
1155 return True 1abcdefg
1156 return False 1abcdefg
1159class ChangeExplicitTypeOfAny(TypeTranslator): 1abcdefg
1160 """A type translator used to change type of Any's, if explicit."""
1162 def __init__(self, type_of_any: int) -> None: 1abcdefg
1163 self._type_of_any = type_of_any 1abcdefg
1164 super().__init__() 1abcdefg
1166 def visit_any(self, t: AnyType) -> Type: # noqa: D102 1abcdefg
1167 if t.type_of_any == TypeOfAny.explicit: 1167 ↛ 1170line 1167 didn't jump to line 1170 because the condition on line 1167 was always true1abcdefg
1168 return t.copy_modified(type_of_any=self._type_of_any) 1abcdefg
1169 else:
1170 return t
1173class ModelConfigData: 1abcdefg
1174 """Pydantic mypy plugin model config class."""
1176 def __init__( 1abcdefg
1177 self,
1178 forbid_extra: bool | None = None,
1179 frozen: bool | None = None,
1180 from_attributes: bool | None = None,
1181 populate_by_name: bool | None = None,
1182 validate_by_alias: bool | None = None,
1183 validate_by_name: bool | None = None,
1184 has_alias_generator: bool | None = None,
1185 strict: bool | None = None,
1186 ):
1187 self.forbid_extra = forbid_extra 1abcdefg
1188 self.frozen = frozen 1abcdefg
1189 self.from_attributes = from_attributes 1abcdefg
1190 self.populate_by_name = populate_by_name 1abcdefg
1191 self.validate_by_alias = validate_by_alias 1abcdefg
1192 self.validate_by_name = validate_by_name 1abcdefg
1193 self.has_alias_generator = has_alias_generator 1abcdefg
1194 self.strict = strict 1abcdefg
1196 def get_values_dict(self) -> dict[str, Any]: 1abcdefg
1197 """Returns a dict of Pydantic model config names to their values.
1199 It includes the config if config value is not `None`.
1200 """
1201 return {k: v for k, v in self.__dict__.items() if v is not None} 1abcdefg
1203 def update(self, config: ModelConfigData | None) -> None: 1abcdefg
1204 """Update Pydantic model config values."""
1205 if config is None: 1abcdefg
1206 return 1abcdefg
1207 for k, v in config.get_values_dict().items(): 1abcdefg
1208 setattr(self, k, v) 1abcdefg
1210 def setdefault(self, key: str, value: Any) -> None: 1abcdefg
1211 """Set default value for Pydantic model config if config value is `None`."""
1212 if getattr(self, key) is None: 1abcdefg
1213 setattr(self, key, value) 1abcdefg
1216def is_root_model(info: TypeInfo) -> bool: 1abcdefg
1217 """Return whether the type info is a root model subclass (or the `RootModel` class itself)."""
1218 return info.has_base(ROOT_MODEL_FULLNAME) 1abcdefg
1221ERROR_ORM = ErrorCode('pydantic-orm', 'Invalid from_attributes call', 'Pydantic') 1abcdefg
1222ERROR_CONFIG = ErrorCode('pydantic-config', 'Invalid config value', 'Pydantic') 1abcdefg
1223ERROR_ALIAS = ErrorCode('pydantic-alias', 'Dynamic alias disallowed', 'Pydantic') 1abcdefg
1224ERROR_UNEXPECTED = ErrorCode('pydantic-unexpected', 'Unexpected behavior', 'Pydantic') 1abcdefg
1225ERROR_UNTYPED = ErrorCode('pydantic-field', 'Untyped field disallowed', 'Pydantic') 1abcdefg
1226ERROR_FIELD_DEFAULTS = ErrorCode('pydantic-field', 'Invalid Field defaults', 'Pydantic') 1abcdefg
1227ERROR_EXTRA_FIELD_ROOT_MODEL = ErrorCode('pydantic-field', 'Extra field on RootModel subclass', 'Pydantic') 1abcdefg
1230def error_from_attributes(model_name: str, api: CheckerPluginInterface, context: Context) -> None: 1abcdefg
1231 """Emits an error when the model does not have `from_attributes=True`."""
1232 api.fail(f'"{model_name}" does not have from_attributes=True', context, code=ERROR_ORM) 1abcdefg
1235def error_invalid_config_value(name: str, api: SemanticAnalyzerPluginInterface, context: Context) -> None: 1abcdefg
1236 """Emits an error when the config value is invalid."""
1237 api.fail(f'Invalid value for "Config.{name}"', context, code=ERROR_CONFIG) 1abcdefg
1240def error_required_dynamic_aliases(api: SemanticAnalyzerPluginInterface, context: Context) -> None: 1abcdefg
1241 """Emits required dynamic aliases error.
1243 This will be called when `warn_required_dynamic_aliases=True`.
1244 """
1245 api.fail('Required dynamic aliases disallowed', context, code=ERROR_ALIAS) 1abcdefg
1248def error_unexpected_behavior( 1abcdefg
1249 detail: str, api: CheckerPluginInterface | SemanticAnalyzerPluginInterface, context: Context 1defg
1250) -> None: # pragma: no cover 1defg
1251 """Emits unexpected behavior error."""
1252 # Can't think of a good way to test this, but I confirmed it renders as desired by adding to a non-error path
1253 link = 'https://github.com/pydantic/pydantic/issues/new/choose'
1254 full_message = f'The pydantic mypy plugin ran into unexpected behavior: {detail}\n'
1255 full_message += f'Please consider reporting this bug at {link} so we can try to fix it!'
1256 api.fail(full_message, context, code=ERROR_UNEXPECTED)
1259def error_untyped_fields(api: SemanticAnalyzerPluginInterface, context: Context) -> None: 1abcdefg
1260 """Emits an error when there is an untyped field in the model."""
1261 api.fail('Untyped fields disallowed', context, code=ERROR_UNTYPED) 1abcdefg
1264def error_extra_fields_on_root_model(api: CheckerPluginInterface, context: Context) -> None: 1abcdefg
1265 """Emits an error when there is more than just a root field defined for a subclass of RootModel."""
1266 api.fail('Only `root` is allowed as a field of a `RootModel`', context, code=ERROR_EXTRA_FIELD_ROOT_MODEL) 1abcdefg
1269def add_method( 1abcdefg
1270 api: SemanticAnalyzerPluginInterface | CheckerPluginInterface,
1271 cls: ClassDef,
1272 name: str,
1273 args: list[Argument],
1274 return_type: Type,
1275 self_type: Type | None = None,
1276 tvar_def: TypeVarType | None = None,
1277 is_classmethod: bool = False,
1278) -> None:
1279 """Very closely related to `mypy.plugins.common.add_method_to_class`, with a few pydantic-specific changes."""
1280 info = cls.info 1abcdefg
1282 # First remove any previously generated methods with the same name
1283 # to avoid clashes and problems in the semantic analyzer.
1284 if name in info.names: 1abcdefg
1285 sym = info.names[name] 1abcdefg
1286 if sym.plugin_generated and isinstance(sym.node, FuncDef): 1abcdefg
1287 cls.defs.body.remove(sym.node) # pragma: no cover 1abcdefg
1289 if isinstance(api, SemanticAnalyzerPluginInterface): 1289 ↛ 1292line 1289 didn't jump to line 1292 because the condition on line 1289 was always true1abcdefg
1290 function_type = api.named_type('builtins.function') 1abcdefg
1291 else:
1292 function_type = api.named_generic_type('builtins.function', [])
1294 if is_classmethod: 1abcdefg
1295 self_type = self_type or TypeType(fill_typevars(info)) 1abcdefg
1296 first = [Argument(Var('_cls'), self_type, None, ARG_POS, True)] 1abcdefg
1297 else:
1298 self_type = self_type or fill_typevars(info) 1abcdefg
1299 # `self` is positional *ONLY* here, but this can't be expressed
1300 # fully in the mypy internal API. ARG_POS is the closest we can get.
1301 # Using ARG_POS will, however, give mypy errors if a `self` field
1302 # is present on a model:
1303 #
1304 # Name "self" already defined (possibly by an import) [no-redef]
1305 #
1306 # As a workaround, we give this argument a name that will
1307 # never conflict. By its positional nature, this name will not
1308 # be used or exposed to users.
1309 first = [Argument(Var('__pydantic_self__'), self_type, None, ARG_POS)] 1abcdefg
1310 args = first + args 1abcdefg
1312 arg_types, arg_names, arg_kinds = [], [], [] 1abcdefg
1313 for arg in args: 1abcdefg
1314 assert arg.type_annotation, 'All arguments must be fully typed.' 1abcdefg
1315 arg_types.append(arg.type_annotation) 1abcdefg
1316 arg_names.append(arg.variable.name) 1abcdefg
1317 arg_kinds.append(arg.kind) 1abcdefg
1319 signature = CallableType(arg_types, arg_kinds, arg_names, return_type, function_type) 1abcdefg
1320 if tvar_def: 1320 ↛ 1321line 1320 didn't jump to line 1321 because the condition on line 1320 was never true1abcdefg
1321 signature.variables = [tvar_def]
1323 func = FuncDef(name, args, Block([PassStmt()])) 1abcdefg
1324 func.info = info 1abcdefg
1325 func.type = set_callable_name(signature, func) 1abcdefg
1326 func.is_class = is_classmethod 1abcdefg
1327 func._fullname = info.fullname + '.' + name 1abcdefg
1328 func.line = info.line 1abcdefg
1330 # NOTE: we would like the plugin generated node to dominate, but we still
1331 # need to keep any existing definitions so they get semantically analyzed.
1332 if name in info.names: 1abcdefg
1333 # Get a nice unique name instead.
1334 r_name = get_unique_redefinition_name(name, info.names) 1abcdefg
1335 info.names[r_name] = info.names[name] 1abcdefg
1337 # Add decorator for is_classmethod
1338 # The dataclasses plugin claims this is unnecessary for classmethods, but not including it results in a
1339 # signature incompatible with the superclass, which causes mypy errors to occur for every subclass of BaseModel.
1340 if is_classmethod: 1abcdefg
1341 func.is_decorated = True 1abcdefg
1342 v = Var(name, func.type) 1abcdefg
1343 v.info = info 1abcdefg
1344 v._fullname = func._fullname 1abcdefg
1345 v.is_classmethod = True 1abcdefg
1346 dec = Decorator(func, [NameExpr('classmethod')], v) 1abcdefg
1347 dec.line = info.line 1abcdefg
1348 sym = SymbolTableNode(MDEF, dec) 1abcdefg
1349 else:
1350 sym = SymbolTableNode(MDEF, func) 1abcdefg
1351 sym.plugin_generated = True 1abcdefg
1352 info.names[name] = sym 1abcdefg
1354 info.defn.defs.body.append(func) 1abcdefg
1357def parse_toml(config_file: str) -> dict[str, Any] | None: 1abcdefg
1358 """Returns a dict of config keys to values.
1360 It reads configs from toml file and returns `None` if the file is not a toml file.
1361 """
1362 if not config_file.endswith('.toml'): 1abcdefg
1363 return None 1abcdefg
1365 if sys.version_info >= (3, 11): 1abcdefg
1366 import tomllib as toml_ 1cdefg
1367 else:
1368 try: 1ab
1369 import tomli as toml_ 1ab
1370 except ImportError: # pragma: no cover
1371 import warnings
1373 warnings.warn('No TOML parser installed, cannot read configuration from `pyproject.toml`.')
1374 return None
1376 with open(config_file, 'rb') as rf: 1abcdefg
1377 return toml_.load(rf) 1abcdefg