Coverage for pydantic/mypy.py: 90.86%
619 statements
« prev ^ index » next coverage.py v7.8.0, created at 2025-04-26 07:51 +0000
« prev ^ index » next coverage.py v7.8.0, created at 2025-04-26 07:51 +0000
1"""This module includes classes and functions designed specifically for use with the mypy plugin."""
3from __future__ import annotations 1hijklmnopqrstuvwxyzABCDEabcdefgFGHIJKLMNOP
5import sys 1hijklmnopqrstuvwxyzABCDEabcdefgFGHIJKLMNOP
6from collections.abc import Iterator 1hijklmnopqrstuvwxyzABCDEabcdefgFGHIJKLMNOP
7from configparser import ConfigParser 1hijklmnopqrstuvwxyzABCDEabcdefgFGHIJKLMNOP
8from typing import Any, Callable 1hijklmnopqrstuvwxyzABCDEabcdefgFGHIJKLMNOP
10from mypy.errorcodes import ErrorCode 1hijklmnopqrstuvwxyzABCDEabcdefgFGHIJKLMNOP
11from mypy.expandtype import expand_type, expand_type_by_instance 1abcdefg
12from mypy.nodes import ( 1abcdefg
13 ARG_NAMED,
14 ARG_NAMED_OPT,
15 ARG_OPT,
16 ARG_POS,
17 ARG_STAR2,
18 INVARIANT,
19 MDEF,
20 Argument,
21 AssignmentStmt,
22 Block,
23 CallExpr,
24 ClassDef,
25 Context,
26 Decorator,
27 DictExpr,
28 EllipsisExpr,
29 Expression,
30 FuncDef,
31 IfStmt,
32 JsonDict,
33 MemberExpr,
34 NameExpr,
35 PassStmt,
36 PlaceholderNode,
37 RefExpr,
38 Statement,
39 StrExpr,
40 SymbolTableNode,
41 TempNode,
42 TypeAlias,
43 TypeInfo,
44 Var,
45)
46from mypy.options import Options 1abcdefg
47from mypy.plugin import ( 1abcdefg
48 CheckerPluginInterface,
49 ClassDefContext,
50 MethodContext,
51 Plugin,
52 ReportConfigContext,
53 SemanticAnalyzerPluginInterface,
54)
55from mypy.plugins.common import ( 1abcdefg
56 deserialize_and_fixup_type,
57)
58from mypy.semanal import set_callable_name 1abcdefg
59from mypy.server.trigger import make_wildcard_trigger 1abcdefg
60from mypy.state import state 1abcdefg
61from mypy.type_visitor import TypeTranslator 1abcdefg
62from mypy.typeops import map_type_from_supertype 1abcdefg
63from mypy.types import ( 1abcdefg
64 AnyType,
65 CallableType,
66 Instance,
67 NoneType,
68 Type,
69 TypeOfAny,
70 TypeType,
71 TypeVarType,
72 UnionType,
73 get_proper_type,
74)
75from mypy.typevars import fill_typevars 1abcdefg
76from mypy.util import get_unique_redefinition_name 1abcdefg
77from mypy.version import __version__ as mypy_version 1abcdefg
79from pydantic._internal import _fields 1abcdefg
80from pydantic.version import parse_mypy_version 1abcdefg
82CONFIGFILE_KEY = 'pydantic-mypy' 1abcdefg
83METADATA_KEY = 'pydantic-mypy-metadata' 1abcdefg
84BASEMODEL_FULLNAME = 'pydantic.main.BaseModel' 1abcdefg
85BASESETTINGS_FULLNAME = 'pydantic_settings.main.BaseSettings' 1abcdefg
86ROOT_MODEL_FULLNAME = 'pydantic.root_model.RootModel' 1abcdefg
87MODEL_METACLASS_FULLNAME = 'pydantic._internal._model_construction.ModelMetaclass' 1abcdefg
88FIELD_FULLNAME = 'pydantic.fields.Field' 1abcdefg
89DATACLASS_FULLNAME = 'pydantic.dataclasses.dataclass' 1abcdefg
90MODEL_VALIDATOR_FULLNAME = 'pydantic.functional_validators.model_validator' 1abcdefg
91DECORATOR_FULLNAMES = { 1abcdefg
92 'pydantic.functional_validators.field_validator',
93 'pydantic.functional_validators.model_validator',
94 'pydantic.functional_serializers.serializer',
95 'pydantic.functional_serializers.model_serializer',
96 'pydantic.deprecated.class_validators.validator',
97 'pydantic.deprecated.class_validators.root_validator',
98}
99IMPLICIT_CLASSMETHOD_DECORATOR_FULLNAMES = DECORATOR_FULLNAMES - {'pydantic.functional_serializers.model_serializer'} 1abcdefg
102MYPY_VERSION_TUPLE = parse_mypy_version(mypy_version) 1abcdefg
103BUILTINS_NAME = 'builtins' 1abcdefg
105# Increment version if plugin changes and mypy caches should be invalidated
106__version__ = 2 1abcdefg
109def plugin(version: str) -> type[Plugin]: 1abcdefg
110 """`version` is the mypy version string.
112 We might want to use this to print a warning if the mypy version being used is
113 newer, or especially older, than we expect (or need).
115 Args:
116 version: The mypy version string.
118 Return:
119 The Pydantic mypy plugin type.
120 """
121 return PydanticPlugin 1abcdefg
124class PydanticPlugin(Plugin): 1abcdefg
125 """The Pydantic mypy plugin."""
127 def __init__(self, options: Options) -> None: 1abcdefg
128 self.plugin_config = PydanticPluginConfig(options) 1abcdefg
129 self._plugin_data = self.plugin_config.to_data() 1abcdefg
130 super().__init__(options) 1abcdefg
132 def get_base_class_hook(self, fullname: str) -> Callable[[ClassDefContext], None] | None: 1abcdefg
133 """Update Pydantic model class."""
134 sym = self.lookup_fully_qualified(fullname) 1abcdefg
135 if sym and isinstance(sym.node, TypeInfo): # pragma: no branch 1abcdefg
136 # No branching may occur if the mypy cache has not been cleared
137 if sym.node.has_base(BASEMODEL_FULLNAME): 1abcdefg
138 return self._pydantic_model_class_maker_callback 1abcdefg
139 return None 1abcdefg
141 def get_metaclass_hook(self, fullname: str) -> Callable[[ClassDefContext], None] | None: 1abcdefg
142 """Update Pydantic `ModelMetaclass` definition."""
143 if fullname == MODEL_METACLASS_FULLNAME: 1abcdefg
144 return self._pydantic_model_metaclass_marker_callback 1abcdefg
145 return None 1abcdefg
147 def get_method_hook(self, fullname: str) -> Callable[[MethodContext], Type] | None: 1abcdefg
148 """Adjust return type of `from_orm` method call."""
149 if fullname.endswith('.from_orm'): 1abcdefg
150 return from_attributes_callback 1abcdefg
151 return None 1abcdefg
153 def report_config_data(self, ctx: ReportConfigContext) -> dict[str, Any]: 1abcdefg
154 """Return all plugin config data.
156 Used by mypy to determine if cache needs to be discarded.
157 """
158 return self._plugin_data 1abcdefg
160 def _pydantic_model_class_maker_callback(self, ctx: ClassDefContext) -> None: 1abcdefg
161 transformer = PydanticModelTransformer(ctx.cls, ctx.reason, ctx.api, self.plugin_config) 1abcdefg
162 transformer.transform() 1abcdefg
164 def _pydantic_model_metaclass_marker_callback(self, ctx: ClassDefContext) -> None: 1abcdefg
165 """Reset dataclass_transform_spec attribute of ModelMetaclass.
167 Let the plugin handle it. This behavior can be disabled
168 if 'debug_dataclass_transform' is set to True', for testing purposes.
169 """
170 if self.plugin_config.debug_dataclass_transform: 170 ↛ 171line 170 didn't jump to line 171 because the condition on line 170 was never true1abcdefg
171 return
172 info_metaclass = ctx.cls.info.declared_metaclass 1abcdefg
173 assert info_metaclass, "callback not passed from 'get_metaclass_hook'" 1abcdefg
174 if getattr(info_metaclass.type, 'dataclass_transform_spec', None): 1abcdefg
175 info_metaclass.type.dataclass_transform_spec = None 1abcdefg
178class PydanticPluginConfig: 1abcdefg
179 """A Pydantic mypy plugin config holder.
181 Attributes:
182 init_forbid_extra: Whether to add a `**kwargs` at the end of the generated `__init__` signature.
183 init_typed: Whether to annotate fields in the generated `__init__`.
184 warn_required_dynamic_aliases: Whether to raise required dynamic aliases error.
185 debug_dataclass_transform: Whether to not reset `dataclass_transform_spec` attribute
186 of `ModelMetaclass` for testing purposes.
187 """
189 __slots__ = ( 1abcdefg
190 'init_forbid_extra',
191 'init_typed',
192 'warn_required_dynamic_aliases',
193 'debug_dataclass_transform',
194 )
195 init_forbid_extra: bool 1abcdefg
196 init_typed: bool 1abcdefg
197 warn_required_dynamic_aliases: bool 1abcdefg
198 debug_dataclass_transform: bool # undocumented 1abcdefg
200 def __init__(self, options: Options) -> None: 1abcdefg
201 if options.config_file is None: # pragma: no cover 1abcdefg
202 return
204 toml_config = parse_toml(options.config_file) 1abcdefg
205 if toml_config is not None: 1abcdefg
206 config = toml_config.get('tool', {}).get('pydantic-mypy', {}) 1abcdefg
207 for key in self.__slots__: 1abcdefg
208 setting = config.get(key, False) 1abcdefg
209 if not isinstance(setting, bool): 1abcdefg
210 raise ValueError(f'Configuration value must be a boolean for key: {key}') 1abcdefg
211 setattr(self, key, setting) 1abcdefg
212 else:
213 plugin_config = ConfigParser() 1abcdefg
214 plugin_config.read(options.config_file) 1abcdefg
215 for key in self.__slots__: 1abcdefg
216 setting = plugin_config.getboolean(CONFIGFILE_KEY, key, fallback=False) 1abcdefg
217 setattr(self, key, setting) 1abcdefg
219 def to_data(self) -> dict[str, Any]: 1abcdefg
220 """Returns a dict of config names to their values."""
221 return {key: getattr(self, key) for key in self.__slots__} 1abcdefg
224def from_attributes_callback(ctx: MethodContext) -> Type: 1abcdefg
225 """Raise an error if from_attributes is not enabled."""
226 model_type: Instance
227 ctx_type = ctx.type 1abcdefg
228 if isinstance(ctx_type, TypeType): 1abcdefg
229 ctx_type = ctx_type.item 1abcdefg
230 if isinstance(ctx_type, CallableType) and isinstance(ctx_type.ret_type, Instance): 1abcdefg
231 model_type = ctx_type.ret_type # called on the class 1abcdefg
232 elif isinstance(ctx_type, Instance): 1abcdefg
233 model_type = ctx_type # called on an instance (unusual, but still valid) 1abcdefg
234 else: # pragma: no cover
235 detail = f'ctx.type: {ctx_type} (of type {ctx_type.__class__.__name__})'
236 error_unexpected_behavior(detail, ctx.api, ctx.context)
237 return ctx.default_return_type
238 pydantic_metadata = model_type.type.metadata.get(METADATA_KEY) 1abcdefg
239 if pydantic_metadata is None: 1abcdefg
240 return ctx.default_return_type 1abcdefg
241 if not model_type.type.has_base(BASEMODEL_FULLNAME): 241 ↛ 243line 241 didn't jump to line 243 because the condition on line 241 was never true1abcdefg
242 # not a Pydantic v2 model
243 return ctx.default_return_type
244 from_attributes = pydantic_metadata.get('config', {}).get('from_attributes') 1abcdefg
245 if from_attributes is not True: 1abcdefg
246 error_from_attributes(model_type.type.name, ctx.api, ctx.context) 1abcdefg
247 return ctx.default_return_type 1abcdefg
250class PydanticModelField: 1abcdefg
251 """Based on mypy.plugins.dataclasses.DataclassAttribute."""
253 def __init__( 1abcdefg
254 self,
255 name: str,
256 alias: str | None,
257 is_frozen: bool,
258 has_dynamic_alias: bool,
259 has_default: bool,
260 strict: bool | None,
261 line: int,
262 column: int,
263 type: Type | None,
264 info: TypeInfo,
265 ):
266 self.name = name 1abcdefg
267 self.alias = alias 1abcdefg
268 self.is_frozen = is_frozen 1abcdefg
269 self.has_dynamic_alias = has_dynamic_alias 1abcdefg
270 self.has_default = has_default 1abcdefg
271 self.strict = strict 1abcdefg
272 self.line = line 1abcdefg
273 self.column = column 1abcdefg
274 self.type = type 1abcdefg
275 self.info = info 1abcdefg
277 def to_argument( 1abcdefg
278 self,
279 current_info: TypeInfo,
280 typed: bool,
281 model_strict: bool,
282 force_optional: bool,
283 use_alias: bool,
284 api: SemanticAnalyzerPluginInterface,
285 force_typevars_invariant: bool,
286 is_root_model_root: bool,
287 ) -> Argument:
288 """Based on mypy.plugins.dataclasses.DataclassAttribute.to_argument."""
289 variable = self.to_var(current_info, api, use_alias, force_typevars_invariant) 1abcdefg
291 strict = model_strict if self.strict is None else self.strict 1abcdefg
292 if typed or strict: 1abcdefg
293 type_annotation = self.expand_type(current_info, api, include_root_type=True) 1abcdefg
294 else:
295 type_annotation = AnyType(TypeOfAny.explicit) 1abcdefg
297 return Argument( 1abcdefg
298 variable=variable,
299 type_annotation=type_annotation,
300 initializer=None,
301 kind=ARG_OPT
302 if is_root_model_root
303 else (ARG_NAMED_OPT if force_optional or self.has_default else ARG_NAMED),
304 )
306 def expand_type( 1abcdefg
307 self,
308 current_info: TypeInfo,
309 api: SemanticAnalyzerPluginInterface,
310 force_typevars_invariant: bool = False,
311 include_root_type: bool = False,
312 ) -> Type | None:
313 """Based on mypy.plugins.dataclasses.DataclassAttribute.expand_type."""
314 if force_typevars_invariant: 1abcdefg
315 # In some cases, mypy will emit an error "Cannot use a covariant type variable as a parameter"
316 # To prevent that, we add an option to replace typevars with invariant ones while building certain
317 # method signatures (in particular, `__init__`). There may be a better way to do this, if this causes
318 # us problems in the future, we should look into why the dataclasses plugin doesn't have this issue.
319 if isinstance(self.type, TypeVarType): 1abcdefg
320 modified_type = self.type.copy_modified() 1abcdefg
321 modified_type.variance = INVARIANT 1abcdefg
322 self.type = modified_type 1abcdefg
324 if self.type is not None and self.info.self_type is not None: 324 ↛ 349line 324 didn't jump to line 349 because the condition on line 324 was always true1abcdefg
325 # In general, it is not safe to call `expand_type()` during semantic analysis,
326 # however this plugin is called very late, so all types should be fully ready.
327 # Also, it is tricky to avoid eager expansion of Self types here (e.g. because
328 # we serialize attributes).
329 with state.strict_optional_set(api.options.strict_optional): 1abcdefg
330 filled_with_typevars = fill_typevars(current_info) 1abcdefg
331 # Cannot be TupleType as current_info represents a Pydantic model:
332 assert isinstance(filled_with_typevars, Instance) 1abcdefg
333 if force_typevars_invariant: 1abcdefg
334 for arg in filled_with_typevars.args: 1abcdefg
335 if isinstance(arg, TypeVarType): 335 ↛ 334line 335 didn't jump to line 334 because the condition on line 335 was always true1abcdefg
336 arg.variance = INVARIANT 1abcdefg
338 expanded_type = expand_type(self.type, {self.info.self_type.id: filled_with_typevars}) 1abcdefg
339 if include_root_type and isinstance(expanded_type, Instance) and is_root_model(expanded_type.type): 1abcdefg
340 # When a root model is used as a field, Pydantic allows both an instance of the root model
341 # as well as instances of the `root` field type:
342 root_type = expanded_type.type['root'].type 1abcdefg
343 if root_type is None: 343 ↛ 345line 343 didn't jump to line 345 because the condition on line 343 was never true1abcdefg
344 # Happens if the hint for 'root' has unsolved forward references
345 return expanded_type
346 expanded_root_type = expand_type_by_instance(root_type, expanded_type) 1abcdefg
347 expanded_type = UnionType([expanded_type, expanded_root_type]) 1abcdefg
348 return expanded_type 1abcdefg
349 return self.type
351 def to_var( 1abcdefg
352 self,
353 current_info: TypeInfo,
354 api: SemanticAnalyzerPluginInterface,
355 use_alias: bool,
356 force_typevars_invariant: bool = False,
357 ) -> Var:
358 """Based on mypy.plugins.dataclasses.DataclassAttribute.to_var."""
359 if use_alias and self.alias is not None: 1abcdefg
360 name = self.alias 1abcdefg
361 else:
362 name = self.name 1abcdefg
364 return Var(name, self.expand_type(current_info, api, force_typevars_invariant)) 1abcdefg
366 def serialize(self) -> JsonDict: 1abcdefg
367 """Based on mypy.plugins.dataclasses.DataclassAttribute.serialize."""
368 assert self.type 1abcdefg
369 return { 1abcdefg
370 'name': self.name,
371 'alias': self.alias,
372 'is_frozen': self.is_frozen,
373 'has_dynamic_alias': self.has_dynamic_alias,
374 'has_default': self.has_default,
375 'strict': self.strict,
376 'line': self.line,
377 'column': self.column,
378 'type': self.type.serialize(),
379 }
381 @classmethod 1abcdefg
382 def deserialize(cls, info: TypeInfo, data: JsonDict, api: SemanticAnalyzerPluginInterface) -> PydanticModelField: 1abcdefg
383 """Based on mypy.plugins.dataclasses.DataclassAttribute.deserialize."""
384 data = data.copy() 1abcdefg
385 typ = deserialize_and_fixup_type(data.pop('type'), api) 1abcdefg
386 return cls(type=typ, info=info, **data) 1abcdefg
388 def expand_typevar_from_subtype(self, sub_type: TypeInfo, api: SemanticAnalyzerPluginInterface) -> None: 1abcdefg
389 """Expands type vars in the context of a subtype when an attribute is inherited
390 from a generic super type.
391 """
392 if self.type is not None: 392 ↛ exitline 392 didn't return from function 'expand_typevar_from_subtype' because the condition on line 392 was always true1abcdefg
393 with state.strict_optional_set(api.options.strict_optional): 1abcdefg
394 self.type = map_type_from_supertype(self.type, sub_type, self.info) 1abcdefg
397class PydanticModelClassVar: 1abcdefg
398 """Based on mypy.plugins.dataclasses.DataclassAttribute.
400 ClassVars are ignored by subclasses.
402 Attributes:
403 name: the ClassVar name
404 """
406 def __init__(self, name): 1abcdefg
407 self.name = name 1abcdefg
409 @classmethod 1abcdefg
410 def deserialize(cls, data: JsonDict) -> PydanticModelClassVar: 1abcdefg
411 """Based on mypy.plugins.dataclasses.DataclassAttribute.deserialize."""
412 data = data.copy()
413 return cls(**data)
415 def serialize(self) -> JsonDict: 1abcdefg
416 """Based on mypy.plugins.dataclasses.DataclassAttribute.serialize."""
417 return { 1abcdefg
418 'name': self.name,
419 }
422class PydanticModelTransformer: 1abcdefg
423 """Transform the BaseModel subclass according to the plugin settings.
425 Attributes:
426 tracked_config_fields: A set of field configs that the plugin has to track their value.
427 """
429 tracked_config_fields: set[str] = { 1abcdefg
430 'extra',
431 'frozen',
432 'from_attributes',
433 'populate_by_name',
434 'validate_by_alias',
435 'validate_by_name',
436 'alias_generator',
437 'strict',
438 }
440 def __init__( 1abcdefg
441 self,
442 cls: ClassDef,
443 reason: Expression | Statement,
444 api: SemanticAnalyzerPluginInterface,
445 plugin_config: PydanticPluginConfig,
446 ) -> None:
447 self._cls = cls 1abcdefg
448 self._reason = reason 1abcdefg
449 self._api = api 1abcdefg
451 self.plugin_config = plugin_config 1abcdefg
453 def transform(self) -> bool: 1abcdefg
454 """Configures the BaseModel subclass according to the plugin settings.
456 In particular:
458 * determines the model config and fields,
459 * adds a fields-aware signature for the initializer and construct methods
460 * freezes the class if frozen = True
461 * stores the fields, config, and if the class is settings in the mypy metadata for access by subclasses
462 """
463 info = self._cls.info 1abcdefg
464 is_a_root_model = is_root_model(info) 1abcdefg
465 config = self.collect_config() 1abcdefg
466 fields, class_vars = self.collect_fields_and_class_vars(config, is_a_root_model) 1abcdefg
467 if fields is None or class_vars is None: 467 ↛ 469line 467 didn't jump to line 469 because the condition on line 467 was never true1abcdefg
468 # Some definitions are not ready. We need another pass.
469 return False
470 for field in fields: 1abcdefg
471 if field.type is None: 1abcdefg
472 return False 1abcdefg
474 is_settings = info.has_base(BASESETTINGS_FULLNAME) 1abcdefg
475 self.add_initializer(fields, config, is_settings, is_a_root_model) 1abcdefg
476 self.add_model_construct_method(fields, config, is_settings, is_a_root_model) 1abcdefg
477 self.set_frozen(fields, self._api, frozen=config.frozen is True) 1abcdefg
479 self.adjust_decorator_signatures() 1abcdefg
481 info.metadata[METADATA_KEY] = { 1abcdefg
482 'fields': {field.name: field.serialize() for field in fields},
483 'class_vars': {class_var.name: class_var.serialize() for class_var in class_vars},
484 'config': config.get_values_dict(),
485 }
487 return True 1abcdefg
489 def adjust_decorator_signatures(self) -> None: 1abcdefg
490 """When we decorate a function `f` with `pydantic.validator(...)`, `pydantic.field_validator`
491 or `pydantic.serializer(...)`, mypy sees `f` as a regular method taking a `self` instance,
492 even though pydantic internally wraps `f` with `classmethod` if necessary.
494 Teach mypy this by marking any function whose outermost decorator is a `validator()`,
495 `field_validator()` or `serializer()` call as a `classmethod`.
496 """
497 for sym in self._cls.info.names.values(): 1abcdefg
498 if isinstance(sym.node, Decorator): 1abcdefg
499 first_dec = sym.node.original_decorators[0] 1abcdefg
500 if ( 1abc
501 isinstance(first_dec, CallExpr)
502 and isinstance(first_dec.callee, NameExpr)
503 and first_dec.callee.fullname in IMPLICIT_CLASSMETHOD_DECORATOR_FULLNAMES
504 # @model_validator(mode="after") is an exception, it expects a regular method
505 and not (
506 first_dec.callee.fullname == MODEL_VALIDATOR_FULLNAME
507 and any(
508 first_dec.arg_names[i] == 'mode' and isinstance(arg, StrExpr) and arg.value == 'after'
509 for i, arg in enumerate(first_dec.args)
510 )
511 )
512 ):
513 # TODO: Only do this if the first argument of the decorated function is `cls`
514 sym.node.func.is_class = True 1abcdefg
516 def collect_config(self) -> ModelConfigData: # noqa: C901 (ignore complexity) 1abcdefg
517 """Collects the values of the config attributes that are used by the plugin, accounting for parent classes."""
518 cls = self._cls 1abcdefg
519 config = ModelConfigData() 1abcdefg
521 has_config_kwargs = False 1abcdefg
522 has_config_from_namespace = False 1abcdefg
524 # Handle `class MyModel(BaseModel, <name>=<expr>, ...):`
525 for name, expr in cls.keywords.items(): 1abcdefg
526 config_data = self.get_config_update(name, expr) 1abcdefg
527 if config_data: 1abcdefg
528 has_config_kwargs = True 1abcdefg
529 config.update(config_data) 1abcdefg
531 # Handle `model_config`
532 stmt: Statement | None = None 1abcdefg
533 for stmt in cls.defs.body: 1abcdefg
534 if not isinstance(stmt, (AssignmentStmt, ClassDef)): 1abcdefg
535 continue 1abcdefg
537 if isinstance(stmt, AssignmentStmt): 1abcdefg
538 lhs = stmt.lvalues[0] 1abcdefg
539 if not isinstance(lhs, NameExpr) or lhs.name != 'model_config': 1abcdefg
540 continue 1abcdefg
542 if isinstance(stmt.rvalue, CallExpr): # calls to `dict` or `ConfigDict` 1abcdefg
543 for arg_name, arg in zip(stmt.rvalue.arg_names, stmt.rvalue.args): 1abcdefg
544 if arg_name is None: 544 ↛ 545line 544 didn't jump to line 545 because the condition on line 544 was never true1abcdefg
545 continue
546 config.update(self.get_config_update(arg_name, arg, lax_extra=True)) 1abcdefg
547 elif isinstance(stmt.rvalue, DictExpr): # dict literals 547 ↛ 564line 547 didn't jump to line 564 because the condition on line 547 was always true1abcdefg
548 for key_expr, value_expr in stmt.rvalue.items: 1abcdefg
549 if not isinstance(key_expr, StrExpr): 549 ↛ 550line 549 didn't jump to line 550 because the condition on line 549 was never true1abcdefg
550 continue
551 config.update(self.get_config_update(key_expr.value, value_expr)) 1abcdefg
553 elif isinstance(stmt, ClassDef): 553 ↛ 564line 553 didn't jump to line 564 because the condition on line 553 was always true1abcdefg
554 if stmt.name != 'Config': # 'deprecated' Config-class 1abcdefg
555 continue 1abcdefg
556 for substmt in stmt.defs.body: 1abcdefg
557 if not isinstance(substmt, AssignmentStmt): 1abcdefg
558 continue 1abcdefg
559 lhs = substmt.lvalues[0] 1abcdefg
560 if not isinstance(lhs, NameExpr): 560 ↛ 561line 560 didn't jump to line 561 because the condition on line 560 was never true1abcdefg
561 continue
562 config.update(self.get_config_update(lhs.name, substmt.rvalue)) 1abcdefg
564 if has_config_kwargs: 564 ↛ 565line 564 didn't jump to line 565 because the condition on line 564 was never true1abcdefg
565 self._api.fail(
566 'Specifying config in two places is ambiguous, use either Config attribute or class kwargs',
567 cls,
568 )
569 break
571 has_config_from_namespace = True 1abcdefg
573 if has_config_kwargs or has_config_from_namespace: 1abcdefg
574 if ( 1abc
575 stmt
576 and config.has_alias_generator
577 and not (config.validate_by_name or config.populate_by_name)
578 and self.plugin_config.warn_required_dynamic_aliases
579 ):
580 error_required_dynamic_aliases(self._api, stmt) 1abcdefg
582 for info in cls.info.mro[1:]: # 0 is the current class 1abcdefg
583 if METADATA_KEY not in info.metadata: 1abcdefg
584 continue 1abcdefg
586 # Each class depends on the set of fields in its ancestors
587 self._api.add_plugin_dependency(make_wildcard_trigger(info.fullname)) 1abcdefg
588 for name, value in info.metadata[METADATA_KEY]['config'].items(): 1abcdefg
589 config.setdefault(name, value) 1abcdefg
590 return config 1abcdefg
592 def collect_fields_and_class_vars( 1abcdefg
593 self, model_config: ModelConfigData, is_root_model: bool
594 ) -> tuple[list[PydanticModelField] | None, list[PydanticModelClassVar] | None]:
595 """Collects the fields for the model, accounting for parent classes."""
596 cls = self._cls 1abcdefg
598 # First, collect fields and ClassVars belonging to any class in the MRO, ignoring duplicates.
599 #
600 # We iterate through the MRO in reverse because attrs defined in the parent must appear
601 # earlier in the attributes list than attrs defined in the child. See:
602 # https://docs.python.org/3/library/dataclasses.html#inheritance
603 #
604 # However, we also want fields defined in the subtype to override ones defined
605 # in the parent. We can implement this via a dict without disrupting the attr order
606 # because dicts preserve insertion order in Python 3.7+.
607 found_fields: dict[str, PydanticModelField] = {} 1abcdefg
608 found_class_vars: dict[str, PydanticModelClassVar] = {} 1abcdefg
609 for info in reversed(cls.info.mro[1:-1]): # 0 is the current class, -2 is BaseModel, -1 is object 1abcdefg
610 # if BASEMODEL_METADATA_TAG_KEY in info.metadata and BASEMODEL_METADATA_KEY not in info.metadata:
611 # # We haven't processed the base class yet. Need another pass.
612 # return None, None
613 if METADATA_KEY not in info.metadata: 1abcdefg
614 continue 1abcdefg
616 # Each class depends on the set of attributes in its dataclass ancestors.
617 self._api.add_plugin_dependency(make_wildcard_trigger(info.fullname)) 1abcdefg
619 for name, data in info.metadata[METADATA_KEY]['fields'].items(): 1abcdefg
620 field = PydanticModelField.deserialize(info, data, self._api) 1abcdefg
621 # (The following comment comes directly from the dataclasses plugin)
622 # TODO: We shouldn't be performing type operations during the main
623 # semantic analysis pass, since some TypeInfo attributes might
624 # still be in flux. This should be performed in a later phase.
625 field.expand_typevar_from_subtype(cls.info, self._api) 1abcdefg
626 found_fields[name] = field 1abcdefg
628 sym_node = cls.info.names.get(name) 1abcdefg
629 if sym_node and sym_node.node and not isinstance(sym_node.node, Var): 629 ↛ 630line 629 didn't jump to line 630 because the condition on line 629 was never true1abcdefg
630 self._api.fail(
631 'BaseModel field may only be overridden by another field',
632 sym_node.node,
633 )
634 # Collect ClassVars
635 for name, data in info.metadata[METADATA_KEY]['class_vars'].items(): 635 ↛ 636line 635 didn't jump to line 636 because the loop on line 635 never started1abcdefg
636 found_class_vars[name] = PydanticModelClassVar.deserialize(data)
638 # Second, collect fields and ClassVars belonging to the current class.
639 current_field_names: set[str] = set() 1abcdefg
640 current_class_vars_names: set[str] = set() 1abcdefg
641 for stmt in self._get_assignment_statements_from_block(cls.defs): 1abcdefg
642 maybe_field = self.collect_field_or_class_var_from_stmt(stmt, model_config, found_class_vars) 1abcdefg
643 if maybe_field is None: 1abcdefg
644 continue 1abcdefg
646 lhs = stmt.lvalues[0] 1abcdefg
647 assert isinstance(lhs, NameExpr) # collect_field_or_class_var_from_stmt guarantees this 1abcdefg
648 if isinstance(maybe_field, PydanticModelField): 1abcdefg
649 if is_root_model and lhs.name != 'root': 1abcdefg
650 error_extra_fields_on_root_model(self._api, stmt) 1abcdefg
651 else:
652 current_field_names.add(lhs.name) 1abcdefg
653 found_fields[lhs.name] = maybe_field 1abcdefg
654 elif isinstance(maybe_field, PydanticModelClassVar): 654 ↛ 641line 654 didn't jump to line 641 because the condition on line 654 was always true1abcdefg
655 current_class_vars_names.add(lhs.name) 1abcdefg
656 found_class_vars[lhs.name] = maybe_field 1abcdefg
658 return list(found_fields.values()), list(found_class_vars.values()) 1abcdefg
660 def _get_assignment_statements_from_if_statement(self, stmt: IfStmt) -> Iterator[AssignmentStmt]: 1abcdefg
661 for body in stmt.body: 1abcdefg
662 if not body.is_unreachable: 662 ↛ 661line 662 didn't jump to line 661 because the condition on line 662 was always true1abcdefg
663 yield from self._get_assignment_statements_from_block(body) 1abcdefg
664 if stmt.else_body is not None and not stmt.else_body.is_unreachable: 664 ↛ 665line 664 didn't jump to line 665 because the condition on line 664 was never true1abcdefg
665 yield from self._get_assignment_statements_from_block(stmt.else_body)
667 def _get_assignment_statements_from_block(self, block: Block) -> Iterator[AssignmentStmt]: 1abcdefg
668 for stmt in block.body: 1abcdefg
669 if isinstance(stmt, AssignmentStmt): 1abcdefg
670 yield stmt 1abcdefg
671 elif isinstance(stmt, IfStmt): 1abcdefg
672 yield from self._get_assignment_statements_from_if_statement(stmt) 1abcdefg
674 def collect_field_or_class_var_from_stmt( # noqa C901 1abcdefg
675 self, stmt: AssignmentStmt, model_config: ModelConfigData, class_vars: dict[str, PydanticModelClassVar]
676 ) -> PydanticModelField | PydanticModelClassVar | None:
677 """Get pydantic model field from statement.
679 Args:
680 stmt: The statement.
681 model_config: Configuration settings for the model.
682 class_vars: ClassVars already known to be defined on the model.
684 Returns:
685 A pydantic model field if it could find the field in statement. Otherwise, `None`.
686 """
687 cls = self._cls 1abcdefg
689 lhs = stmt.lvalues[0] 1abcdefg
690 if not isinstance(lhs, NameExpr) or not _fields.is_valid_field_name(lhs.name) or lhs.name == 'model_config': 1abcdefg
691 return None 1abcdefg
693 if not stmt.new_syntax: 1abcdefg
694 if ( 1abc
695 isinstance(stmt.rvalue, CallExpr)
696 and isinstance(stmt.rvalue.callee, CallExpr)
697 and isinstance(stmt.rvalue.callee.callee, NameExpr)
698 and stmt.rvalue.callee.callee.fullname in DECORATOR_FULLNAMES
699 ):
700 # This is a (possibly-reused) validator or serializer, not a field
701 # In particular, it looks something like: my_validator = validator('my_field')(f)
702 # Eventually, we may want to attempt to respect model_config['ignored_types']
703 return None 1abcdefg
705 if lhs.name in class_vars: 705 ↛ 707line 705 didn't jump to line 707 because the condition on line 705 was never true1abcdefg
706 # Class vars are not fields and are not required to be annotated
707 return None
709 # The assignment does not have an annotation, and it's not anything else we recognize
710 error_untyped_fields(self._api, stmt) 1abcdefg
711 return None 1abcdefg
713 lhs = stmt.lvalues[0] 1abcdefg
714 if not isinstance(lhs, NameExpr): 714 ↛ 715line 714 didn't jump to line 715 because the condition on line 714 was never true1abcdefg
715 return None
717 if not _fields.is_valid_field_name(lhs.name) or lhs.name == 'model_config': 717 ↛ 718line 717 didn't jump to line 718 because the condition on line 717 was never true1abcdefg
718 return None
720 sym = cls.info.names.get(lhs.name) 1abcdefg
721 if sym is None: # pragma: no cover 1abcdefg
722 # This is likely due to a star import (see the dataclasses plugin for a more detailed explanation)
723 # This is the same logic used in the dataclasses plugin
724 return None
726 node = sym.node 1abcdefg
727 if isinstance(node, PlaceholderNode): # pragma: no cover 1abcdefg
728 # See the PlaceholderNode docstring for more detail about how this can occur
729 # Basically, it is an edge case when dealing with complex import logic
731 # The dataclasses plugin now asserts this cannot happen, but I'd rather not error if it does..
732 return None
734 if isinstance(node, TypeAlias): 1abcdefg
735 self._api.fail(
736 'Type aliases inside BaseModel definitions are not supported at runtime',
737 node,
738 )
739 # Skip processing this node. This doesn't match the runtime behaviour,
740 # but the only alternative would be to modify the SymbolTable,
741 # and it's a little hairy to do that in a plugin.
742 return None
744 if not isinstance(node, Var): # pragma: no cover 1abcdefg
745 # Don't know if this edge case still happens with the `is_valid_field` check above
746 # but better safe than sorry
748 # The dataclasses plugin now asserts this cannot happen, but I'd rather not error if it does..
749 return None
751 # x: ClassVar[int] is not a field
752 if node.is_classvar: 1abcdefg
753 return PydanticModelClassVar(lhs.name) 1abcdefg
755 # x: InitVar[int] is not supported in BaseModel
756 node_type = get_proper_type(node.type) 1abcdefg
757 if isinstance(node_type, Instance) and node_type.type.fullname == 'dataclasses.InitVar': 757 ↛ 758line 757 didn't jump to line 758 because the condition on line 757 was never true1abcdefg
758 self._api.fail(
759 'InitVar is not supported in BaseModel',
760 node,
761 )
763 has_default = self.get_has_default(stmt) 1abcdefg
764 strict = self.get_strict(stmt) 1abcdefg
766 if sym.type is None and node.is_final and node.is_inferred: 766 ↛ 774line 766 didn't jump to line 774 because the condition on line 766 was never true1abcdefg
767 # This follows the logic from the dataclasses plugin. The following comment is taken verbatim:
768 #
769 # This is a special case, assignment like x: Final = 42 is classified
770 # annotated above, but mypy strips the `Final` turning it into x = 42.
771 # We do not support inferred types in dataclasses, so we can try inferring
772 # type for simple literals, and otherwise require an explicit type
773 # argument for Final[...].
774 typ = self._api.analyze_simple_literal_type(stmt.rvalue, is_final=True)
775 if typ:
776 node.type = typ
777 else:
778 self._api.fail(
779 'Need type argument for Final[...] with non-literal default in BaseModel',
780 stmt,
781 )
782 node.type = AnyType(TypeOfAny.from_error)
784 if node.is_final and has_default: 1abcdefg
785 # TODO this path should be removed (see https://github.com/pydantic/pydantic/issues/11119)
786 return PydanticModelClassVar(lhs.name) 1abcdefg
788 alias, has_dynamic_alias = self.get_alias_info(stmt) 1abcdefg
789 if ( 1abc
790 has_dynamic_alias
791 and not (model_config.validate_by_name or model_config.populate_by_name)
792 and self.plugin_config.warn_required_dynamic_aliases
793 ):
794 error_required_dynamic_aliases(self._api, stmt) 1abcdefg
795 is_frozen = self.is_field_frozen(stmt) 1abcdefg
797 init_type = self._infer_dataclass_attr_init_type(sym, lhs.name, stmt) 1abcdefg
798 return PydanticModelField( 1abcdefg
799 name=lhs.name,
800 has_dynamic_alias=has_dynamic_alias,
801 has_default=has_default,
802 strict=strict,
803 alias=alias,
804 is_frozen=is_frozen,
805 line=stmt.line,
806 column=stmt.column,
807 type=init_type,
808 info=cls.info,
809 )
811 def _infer_dataclass_attr_init_type(self, sym: SymbolTableNode, name: str, context: Context) -> Type | None: 1abcdefg
812 """Infer __init__ argument type for an attribute.
814 In particular, possibly use the signature of __set__.
815 """
816 default = sym.type 1abcdefg
817 if sym.implicit: 817 ↛ 818line 817 didn't jump to line 818 because the condition on line 817 was never true1abcdefg
818 return default
819 t = get_proper_type(sym.type) 1abcdefg
821 # Perform a simple-minded inference from the signature of __set__, if present.
822 # We can't use mypy.checkmember here, since this plugin runs before type checking.
823 # We only support some basic scanerios here, which is hopefully sufficient for
824 # the vast majority of use cases.
825 if not isinstance(t, Instance): 1abcdefg
826 return default 1abcdefg
827 setter = t.type.get('__set__') 1abcdefg
828 if setter: 828 ↛ 829line 828 didn't jump to line 829 because the condition on line 828 was never true1abcdefg
829 if isinstance(setter.node, FuncDef):
830 super_info = t.type.get_containing_type_info('__set__')
831 assert super_info
832 if setter.type:
833 setter_type = get_proper_type(map_type_from_supertype(setter.type, t.type, super_info))
834 else:
835 return AnyType(TypeOfAny.unannotated)
836 if isinstance(setter_type, CallableType) and setter_type.arg_kinds == [
837 ARG_POS,
838 ARG_POS,
839 ARG_POS,
840 ]:
841 return expand_type_by_instance(setter_type.arg_types[2], t)
842 else:
843 self._api.fail(f'Unsupported signature for "__set__" in "{t.type.name}"', context)
844 else:
845 self._api.fail(f'Unsupported "__set__" in "{t.type.name}"', context)
847 return default 1abcdefg
849 def add_initializer( 1abcdefg
850 self, fields: list[PydanticModelField], config: ModelConfigData, is_settings: bool, is_root_model: bool
851 ) -> None:
852 """Adds a fields-aware `__init__` method to the class.
854 The added `__init__` will be annotated with types vs. all `Any` depending on the plugin settings.
855 """
856 if '__init__' in self._cls.info.names and not self._cls.info.names['__init__'].plugin_generated: 1abcdefg
857 return # Don't generate an __init__ if one already exists 1abcdefg
859 typed = self.plugin_config.init_typed 1abcdefg
860 model_strict = bool(config.strict) 1abcdefg
861 use_alias = not (config.validate_by_name or config.populate_by_name) and config.validate_by_alias is not False 1abcdefg
862 requires_dynamic_aliases = bool(config.has_alias_generator and not config.validate_by_name) 1abcdefg
863 args = self.get_field_arguments( 1abcdefg
864 fields,
865 typed=typed,
866 model_strict=model_strict,
867 requires_dynamic_aliases=requires_dynamic_aliases,
868 use_alias=use_alias,
869 is_settings=is_settings,
870 is_root_model=is_root_model,
871 force_typevars_invariant=True,
872 )
874 if is_settings: 1abcdefg
875 base_settings_node = self._api.lookup_fully_qualified(BASESETTINGS_FULLNAME).node 1abcdefg
876 assert isinstance(base_settings_node, TypeInfo) 1abcdefg
877 if '__init__' in base_settings_node.names: 877 ↛ 897line 877 didn't jump to line 897 because the condition on line 877 was always true1abcdefg
878 base_settings_init_node = base_settings_node.names['__init__'].node 1abcdefg
879 assert isinstance(base_settings_init_node, FuncDef) 1abcdefg
880 if base_settings_init_node is not None and base_settings_init_node.type is not None: 880 ↛ 897line 880 didn't jump to line 897 because the condition on line 880 was always true1abcdefg
881 func_type = base_settings_init_node.type 1abcdefg
882 assert isinstance(func_type, CallableType) 1abcdefg
883 for arg_idx, arg_name in enumerate(func_type.arg_names): 1abcdefg
884 if arg_name is None or arg_name.startswith('__') or not arg_name.startswith('_'): 1abcdefg
885 continue 1abcdefg
886 analyzed_variable_type = self._api.anal_type(func_type.arg_types[arg_idx]) 1abcdefg
887 if analyzed_variable_type is not None and arg_name == '_cli_settings_source': 1abcdefg
888 # _cli_settings_source is defined as CliSettingsSource[Any], and as such
889 # the Any causes issues with --disallow-any-explicit. As a workaround, change
890 # the Any type (as if CliSettingsSource was left unparameterized):
891 analyzed_variable_type = analyzed_variable_type.accept( 1abcdefg
892 ChangeExplicitTypeOfAny(TypeOfAny.from_omitted_generics)
893 )
894 variable = Var(arg_name, analyzed_variable_type) 1abcdefg
895 args.append(Argument(variable, analyzed_variable_type, None, ARG_OPT)) 1abcdefg
897 if not self.should_init_forbid_extra(fields, config): 1abcdefg
898 var = Var('kwargs') 1abcdefg
899 args.append(Argument(var, AnyType(TypeOfAny.explicit), None, ARG_STAR2)) 1abcdefg
901 add_method(self._api, self._cls, '__init__', args=args, return_type=NoneType()) 1abcdefg
903 def add_model_construct_method( 1abcdefg
904 self,
905 fields: list[PydanticModelField],
906 config: ModelConfigData,
907 is_settings: bool,
908 is_root_model: bool,
909 ) -> None:
910 """Adds a fully typed `model_construct` classmethod to the class.
912 Similar to the fields-aware __init__ method, but always uses the field names (not aliases),
913 and does not treat settings fields as optional.
914 """
915 set_str = self._api.named_type(f'{BUILTINS_NAME}.set', [self._api.named_type(f'{BUILTINS_NAME}.str')]) 1abcdefg
916 optional_set_str = UnionType([set_str, NoneType()]) 1abcdefg
917 fields_set_argument = Argument(Var('_fields_set', optional_set_str), optional_set_str, None, ARG_OPT) 1abcdefg
918 with state.strict_optional_set(self._api.options.strict_optional): 1abcdefg
919 args = self.get_field_arguments( 1abcdefg
920 fields,
921 typed=True,
922 model_strict=bool(config.strict),
923 requires_dynamic_aliases=False,
924 use_alias=False,
925 is_settings=is_settings,
926 is_root_model=is_root_model,
927 )
928 if not self.should_init_forbid_extra(fields, config): 1abcdefg
929 var = Var('kwargs') 1abcdefg
930 args.append(Argument(var, AnyType(TypeOfAny.explicit), None, ARG_STAR2)) 1abcdefg
932 args = args + [fields_set_argument] if is_root_model else [fields_set_argument] + args 1abcdefg
934 add_method( 1abcdefg
935 self._api,
936 self._cls,
937 'model_construct',
938 args=args,
939 return_type=fill_typevars(self._cls.info),
940 is_classmethod=True,
941 )
943 def set_frozen(self, fields: list[PydanticModelField], api: SemanticAnalyzerPluginInterface, frozen: bool) -> None: 1abcdefg
944 """Marks all fields as properties so that attempts to set them trigger mypy errors.
946 This is the same approach used by the attrs and dataclasses plugins.
947 """
948 info = self._cls.info 1abcdefg
949 for field in fields: 1abcdefg
950 sym_node = info.names.get(field.name) 1abcdefg
951 if sym_node is not None: 1abcdefg
952 var = sym_node.node 1abcdefg
953 if isinstance(var, Var): 953 ↛ 955line 953 didn't jump to line 955 because the condition on line 953 was always true1abcdefg
954 var.is_property = frozen or field.is_frozen 1abcdefg
955 elif isinstance(var, PlaceholderNode) and not self._api.final_iteration:
956 # See https://github.com/pydantic/pydantic/issues/5191 to hit this branch for test coverage
957 self._api.defer()
958 else: # pragma: no cover
959 # I don't know whether it's possible to hit this branch, but I've added it for safety
960 try:
961 var_str = str(var)
962 except TypeError:
963 # This happens for PlaceholderNode; perhaps it will happen for other types in the future..
964 var_str = repr(var)
965 detail = f'sym_node.node: {var_str} (of type {var.__class__})'
966 error_unexpected_behavior(detail, self._api, self._cls)
967 else:
968 var = field.to_var(info, api, use_alias=False) 1abcdefg
969 var.info = info 1abcdefg
970 var.is_property = frozen 1abcdefg
971 var._fullname = info.fullname + '.' + var.name 1abcdefg
972 info.names[var.name] = SymbolTableNode(MDEF, var) 1abcdefg
974 def get_config_update(self, name: str, arg: Expression, lax_extra: bool = False) -> ModelConfigData | None: 1abcdefg
975 """Determines the config update due to a single kwarg in the ConfigDict definition.
977 Warns if a tracked config attribute is set to a value the plugin doesn't know how to interpret (e.g., an int)
978 """
979 if name not in self.tracked_config_fields: 1abcdefg
980 return None 1abcdefg
981 if name == 'extra': 1abcdefg
982 if isinstance(arg, StrExpr): 1abcdefg
983 forbid_extra = arg.value == 'forbid' 1abcdefg
984 elif isinstance(arg, MemberExpr): 1abcdefg
985 forbid_extra = arg.name == 'forbid' 1abcdefg
986 else:
987 if not lax_extra: 1abcdefg
988 # Only emit an error for other types of `arg` (e.g., `NameExpr`, `ConditionalExpr`, etc.) when
989 # reading from a config class, etc. If a ConfigDict is used, then we don't want to emit an error
990 # because you'll get type checking from the ConfigDict itself.
991 #
992 # It would be nice if we could introspect the types better otherwise, but I don't know what the API
993 # is to evaluate an expr into its type and then check if that type is compatible with the expected
994 # type. Note that you can still get proper type checking via: `model_config = ConfigDict(...)`, just
995 # if you don't use an explicit string, the plugin won't be able to infer whether extra is forbidden.
996 error_invalid_config_value(name, self._api, arg) 1abcdefg
997 return None 1abcdefg
998 return ModelConfigData(forbid_extra=forbid_extra) 1abcdefg
999 if name == 'alias_generator': 1abcdefg
1000 has_alias_generator = True 1abcdefg
1001 if isinstance(arg, NameExpr) and arg.fullname == 'builtins.None': 1abcdefg
1002 has_alias_generator = False 1abcdefg
1003 return ModelConfigData(has_alias_generator=has_alias_generator) 1abcdefg
1004 if isinstance(arg, NameExpr) and arg.fullname in ('builtins.True', 'builtins.False'): 1abcdefg
1005 return ModelConfigData(**{name: arg.fullname == 'builtins.True'}) 1abcdefg
1006 error_invalid_config_value(name, self._api, arg) 1abcdefg
1007 return None 1abcdefg
1009 @staticmethod 1abcdefg
1010 def get_has_default(stmt: AssignmentStmt) -> bool: 1abcdefg
1011 """Returns a boolean indicating whether the field defined in `stmt` is a required field."""
1012 expr = stmt.rvalue 1abcdefg
1013 if isinstance(expr, TempNode): 1abcdefg
1014 # TempNode means annotation-only, so has no default
1015 return False 1abcdefg
1016 if isinstance(expr, CallExpr) and isinstance(expr.callee, RefExpr) and expr.callee.fullname == FIELD_FULLNAME: 1abcdefg
1017 # The "default value" is a call to `Field`; at this point, the field has a default if and only if:
1018 # * there is a positional argument that is not `...`
1019 # * there is a keyword argument named "default" that is not `...`
1020 # * there is a "default_factory" that is not `None`
1021 for arg, name in zip(expr.args, expr.arg_names): 1abcdefg
1022 # If name is None, then this arg is the default because it is the only positional argument.
1023 if name is None or name == 'default': 1abcdefg
1024 return arg.__class__ is not EllipsisExpr 1abcdefg
1025 if name == 'default_factory': 1abcdefg
1026 return not (isinstance(arg, NameExpr) and arg.fullname == 'builtins.None') 1abcdefg
1027 return False 1abcdefg
1028 # Has no default if the "default value" is Ellipsis (i.e., `field_name: Annotation = ...`)
1029 return not isinstance(expr, EllipsisExpr) 1abcdefg
1031 @staticmethod 1abcdefg
1032 def get_strict(stmt: AssignmentStmt) -> bool | None: 1abcdefg
1033 """Returns a the `strict` value of a field if defined, otherwise `None`."""
1034 expr = stmt.rvalue 1abcdefg
1035 if isinstance(expr, CallExpr) and isinstance(expr.callee, RefExpr) and expr.callee.fullname == FIELD_FULLNAME: 1abcdefg
1036 for arg, name in zip(expr.args, expr.arg_names): 1abcdefg
1037 if name != 'strict': 1abcdefg
1038 continue 1abcdefg
1039 if isinstance(arg, NameExpr): 1039 ↛ 1044line 1039 didn't jump to line 1044 because the condition on line 1039 was always true1abcdefg
1040 if arg.fullname == 'builtins.True': 1abcdefg
1041 return True 1abcdefg
1042 elif arg.fullname == 'builtins.False': 1042 ↛ 1044line 1042 didn't jump to line 1044 because the condition on line 1042 was always true1abcdefg
1043 return False 1abcdefg
1044 return None
1045 return None 1abcdefg
1047 @staticmethod 1abcdefg
1048 def get_alias_info(stmt: AssignmentStmt) -> tuple[str | None, bool]: 1abcdefg
1049 """Returns a pair (alias, has_dynamic_alias), extracted from the declaration of the field defined in `stmt`.
1051 `has_dynamic_alias` is True if and only if an alias is provided, but not as a string literal.
1052 If `has_dynamic_alias` is True, `alias` will be None.
1053 """
1054 expr = stmt.rvalue 1abcdefg
1055 if isinstance(expr, TempNode): 1abcdefg
1056 # TempNode means annotation-only
1057 return None, False 1abcdefg
1059 if not ( 1abc
1060 isinstance(expr, CallExpr) and isinstance(expr.callee, RefExpr) and expr.callee.fullname == FIELD_FULLNAME
1061 ):
1062 # Assigned value is not a call to pydantic.fields.Field
1063 return None, False 1abcdefg
1065 if 'validation_alias' in expr.arg_names: 1abcdefg
1066 arg = expr.args[expr.arg_names.index('validation_alias')] 1abcdefg
1067 elif 'alias' in expr.arg_names: 1abcdefg
1068 arg = expr.args[expr.arg_names.index('alias')] 1abcdefg
1069 else:
1070 return None, False 1abcdefg
1072 if isinstance(arg, StrExpr): 1abcdefg
1073 return arg.value, False 1abcdefg
1074 else:
1075 return None, True 1abcdefg
1077 @staticmethod 1abcdefg
1078 def is_field_frozen(stmt: AssignmentStmt) -> bool: 1abcdefg
1079 """Returns whether the field is frozen, extracted from the declaration of the field defined in `stmt`.
1081 Note that this is only whether the field was declared to be frozen in a `<field_name> = Field(frozen=True)`
1082 sense; this does not determine whether the field is frozen because the entire model is frozen; that is
1083 handled separately.
1084 """
1085 expr = stmt.rvalue 1abcdefg
1086 if isinstance(expr, TempNode): 1abcdefg
1087 # TempNode means annotation-only
1088 return False 1abcdefg
1090 if not ( 1abc
1091 isinstance(expr, CallExpr) and isinstance(expr.callee, RefExpr) and expr.callee.fullname == FIELD_FULLNAME
1092 ):
1093 # Assigned value is not a call to pydantic.fields.Field
1094 return False 1abcdefg
1096 for i, arg_name in enumerate(expr.arg_names): 1abcdefg
1097 if arg_name == 'frozen': 1abcdefg
1098 arg = expr.args[i] 1abcdefg
1099 return isinstance(arg, NameExpr) and arg.fullname == 'builtins.True' 1abcdefg
1100 return False 1abcdefg
1102 def get_field_arguments( 1abcdefg
1103 self,
1104 fields: list[PydanticModelField],
1105 typed: bool,
1106 model_strict: bool,
1107 use_alias: bool,
1108 requires_dynamic_aliases: bool,
1109 is_settings: bool,
1110 is_root_model: bool,
1111 force_typevars_invariant: bool = False,
1112 ) -> list[Argument]:
1113 """Helper function used during the construction of the `__init__` and `model_construct` method signatures.
1115 Returns a list of mypy Argument instances for use in the generated signatures.
1116 """
1117 info = self._cls.info 1abcdefg
1118 arguments = [ 1abcdefg
1119 field.to_argument(
1120 info,
1121 typed=typed,
1122 model_strict=model_strict,
1123 force_optional=requires_dynamic_aliases or is_settings,
1124 use_alias=use_alias,
1125 api=self._api,
1126 force_typevars_invariant=force_typevars_invariant,
1127 is_root_model_root=is_root_model and field.name == 'root',
1128 )
1129 for field in fields
1130 if not (use_alias and field.has_dynamic_alias)
1131 ]
1132 return arguments 1abcdefg
1134 def should_init_forbid_extra(self, fields: list[PydanticModelField], config: ModelConfigData) -> bool: 1abcdefg
1135 """Indicates whether the generated `__init__` should get a `**kwargs` at the end of its signature.
1137 We disallow arbitrary kwargs if the extra config setting is "forbid", or if the plugin config says to,
1138 *unless* a required dynamic alias is present (since then we can't determine a valid signature).
1139 """
1140 if not (config.validate_by_name or config.populate_by_name): 1abcdefg
1141 if self.is_dynamic_alias_present(fields, bool(config.has_alias_generator)): 1abcdefg
1142 return False 1abcdefg
1143 if config.forbid_extra: 1abcdefg
1144 return True 1abcdefg
1145 return self.plugin_config.init_forbid_extra 1abcdefg
1147 @staticmethod 1abcdefg
1148 def is_dynamic_alias_present(fields: list[PydanticModelField], has_alias_generator: bool) -> bool: 1abcdefg
1149 """Returns whether any fields on the model have a "dynamic alias", i.e., an alias that cannot be
1150 determined during static analysis.
1151 """
1152 for field in fields: 1abcdefg
1153 if field.has_dynamic_alias: 1abcdefg
1154 return True 1abcdefg
1155 if has_alias_generator: 1abcdefg
1156 for field in fields: 1abcdefg
1157 if field.alias is None: 1abcdefg
1158 return True 1abcdefg
1159 return False 1abcdefg
1162class ChangeExplicitTypeOfAny(TypeTranslator): 1abcdefg
1163 """A type translator used to change type of Any's, if explicit."""
1165 def __init__(self, type_of_any: int) -> None: 1abcdefg
1166 self._type_of_any = type_of_any 1abcdefg
1167 super().__init__() 1abcdefg
1169 def visit_any(self, t: AnyType) -> Type: # noqa: D102 1abcdefg
1170 if t.type_of_any == TypeOfAny.explicit: 1170 ↛ 1173line 1170 didn't jump to line 1173 because the condition on line 1170 was always true1abcdefg
1171 return t.copy_modified(type_of_any=self._type_of_any) 1abcdefg
1172 else:
1173 return t
1176class ModelConfigData: 1abcdefg
1177 """Pydantic mypy plugin model config class."""
1179 def __init__( 1abcdefg
1180 self,
1181 forbid_extra: bool | None = None,
1182 frozen: bool | None = None,
1183 from_attributes: bool | None = None,
1184 populate_by_name: bool | None = None,
1185 validate_by_alias: bool | None = None,
1186 validate_by_name: bool | None = None,
1187 has_alias_generator: bool | None = None,
1188 strict: bool | None = None,
1189 ):
1190 self.forbid_extra = forbid_extra 1abcdefg
1191 self.frozen = frozen 1abcdefg
1192 self.from_attributes = from_attributes 1abcdefg
1193 self.populate_by_name = populate_by_name 1abcdefg
1194 self.validate_by_alias = validate_by_alias 1abcdefg
1195 self.validate_by_name = validate_by_name 1abcdefg
1196 self.has_alias_generator = has_alias_generator 1abcdefg
1197 self.strict = strict 1abcdefg
1199 def get_values_dict(self) -> dict[str, Any]: 1abcdefg
1200 """Returns a dict of Pydantic model config names to their values.
1202 It includes the config if config value is not `None`.
1203 """
1204 return {k: v for k, v in self.__dict__.items() if v is not None} 1abcdefg
1206 def update(self, config: ModelConfigData | None) -> None: 1abcdefg
1207 """Update Pydantic model config values."""
1208 if config is None: 1abcdefg
1209 return 1abcdefg
1210 for k, v in config.get_values_dict().items(): 1abcdefg
1211 setattr(self, k, v) 1abcdefg
1213 def setdefault(self, key: str, value: Any) -> None: 1abcdefg
1214 """Set default value for Pydantic model config if config value is `None`."""
1215 if getattr(self, key) is None: 1abcdefg
1216 setattr(self, key, value) 1abcdefg
1219def is_root_model(info: TypeInfo) -> bool: 1abcdefg
1220 """Return whether the type info is a root model subclass (or the `RootModel` class itself)."""
1221 return info.has_base(ROOT_MODEL_FULLNAME) 1abcdefg
1224ERROR_ORM = ErrorCode('pydantic-orm', 'Invalid from_attributes call', 'Pydantic') 1abcdefg
1225ERROR_CONFIG = ErrorCode('pydantic-config', 'Invalid config value', 'Pydantic') 1abcdefg
1226ERROR_ALIAS = ErrorCode('pydantic-alias', 'Dynamic alias disallowed', 'Pydantic') 1abcdefg
1227ERROR_UNEXPECTED = ErrorCode('pydantic-unexpected', 'Unexpected behavior', 'Pydantic') 1abcdefg
1228ERROR_UNTYPED = ErrorCode('pydantic-field', 'Untyped field disallowed', 'Pydantic') 1abcdefg
1229ERROR_FIELD_DEFAULTS = ErrorCode('pydantic-field', 'Invalid Field defaults', 'Pydantic') 1abcdefg
1230ERROR_EXTRA_FIELD_ROOT_MODEL = ErrorCode('pydantic-field', 'Extra field on RootModel subclass', 'Pydantic') 1abcdefg
1233def error_from_attributes(model_name: str, api: CheckerPluginInterface, context: Context) -> None: 1abcdefg
1234 """Emits an error when the model does not have `from_attributes=True`."""
1235 api.fail(f'"{model_name}" does not have from_attributes=True', context, code=ERROR_ORM) 1abcdefg
1238def error_invalid_config_value(name: str, api: SemanticAnalyzerPluginInterface, context: Context) -> None: 1abcdefg
1239 """Emits an error when the config value is invalid."""
1240 api.fail(f'Invalid value for "Config.{name}"', context, code=ERROR_CONFIG) 1abcdefg
1243def error_required_dynamic_aliases(api: SemanticAnalyzerPluginInterface, context: Context) -> None: 1abcdefg
1244 """Emits required dynamic aliases error.
1246 This will be called when `warn_required_dynamic_aliases=True`.
1247 """
1248 api.fail('Required dynamic aliases disallowed', context, code=ERROR_ALIAS) 1abcdefg
1251def error_unexpected_behavior( 1abcdefg
1252 detail: str, api: CheckerPluginInterface | SemanticAnalyzerPluginInterface, context: Context 1defg
1253) -> None: # pragma: no cover 1defg
1254 """Emits unexpected behavior error."""
1255 # Can't think of a good way to test this, but I confirmed it renders as desired by adding to a non-error path
1256 link = 'https://github.com/pydantic/pydantic/issues/new/choose'
1257 full_message = f'The pydantic mypy plugin ran into unexpected behavior: {detail}\n'
1258 full_message += f'Please consider reporting this bug at {link} so we can try to fix it!'
1259 api.fail(full_message, context, code=ERROR_UNEXPECTED)
1262def error_untyped_fields(api: SemanticAnalyzerPluginInterface, context: Context) -> None: 1abcdefg
1263 """Emits an error when there is an untyped field in the model."""
1264 api.fail('Untyped fields disallowed', context, code=ERROR_UNTYPED) 1abcdefg
1267def error_extra_fields_on_root_model(api: CheckerPluginInterface, context: Context) -> None: 1abcdefg
1268 """Emits an error when there is more than just a root field defined for a subclass of RootModel."""
1269 api.fail('Only `root` is allowed as a field of a `RootModel`', context, code=ERROR_EXTRA_FIELD_ROOT_MODEL) 1abcdefg
1272def add_method( 1abcdefg
1273 api: SemanticAnalyzerPluginInterface | CheckerPluginInterface,
1274 cls: ClassDef,
1275 name: str,
1276 args: list[Argument],
1277 return_type: Type,
1278 self_type: Type | None = None,
1279 tvar_def: TypeVarType | None = None,
1280 is_classmethod: bool = False,
1281) -> None:
1282 """Very closely related to `mypy.plugins.common.add_method_to_class`, with a few pydantic-specific changes."""
1283 info = cls.info 1abcdefg
1285 # First remove any previously generated methods with the same name
1286 # to avoid clashes and problems in the semantic analyzer.
1287 if name in info.names: 1abcdefg
1288 sym = info.names[name] 1abcdefg
1289 if sym.plugin_generated and isinstance(sym.node, FuncDef): 1abcdefg
1290 cls.defs.body.remove(sym.node) # pragma: no cover 1abcdefg
1292 if isinstance(api, SemanticAnalyzerPluginInterface): 1292 ↛ 1295line 1292 didn't jump to line 1295 because the condition on line 1292 was always true1abcdefg
1293 function_type = api.named_type('builtins.function') 1abcdefg
1294 else:
1295 function_type = api.named_generic_type('builtins.function', [])
1297 if is_classmethod: 1abcdefg
1298 self_type = self_type or TypeType(fill_typevars(info)) 1abcdefg
1299 first = [Argument(Var('_cls'), self_type, None, ARG_POS, True)] 1abcdefg
1300 else:
1301 self_type = self_type or fill_typevars(info) 1abcdefg
1302 # `self` is positional *ONLY* here, but this can't be expressed
1303 # fully in the mypy internal API. ARG_POS is the closest we can get.
1304 # Using ARG_POS will, however, give mypy errors if a `self` field
1305 # is present on a model:
1306 #
1307 # Name "self" already defined (possibly by an import) [no-redef]
1308 #
1309 # As a workaround, we give this argument a name that will
1310 # never conflict. By its positional nature, this name will not
1311 # be used or exposed to users.
1312 first = [Argument(Var('__pydantic_self__'), self_type, None, ARG_POS)] 1abcdefg
1313 args = first + args 1abcdefg
1315 arg_types, arg_names, arg_kinds = [], [], [] 1abcdefg
1316 for arg in args: 1abcdefg
1317 assert arg.type_annotation, 'All arguments must be fully typed.' 1abcdefg
1318 arg_types.append(arg.type_annotation) 1abcdefg
1319 arg_names.append(arg.variable.name) 1abcdefg
1320 arg_kinds.append(arg.kind) 1abcdefg
1322 signature = CallableType(arg_types, arg_kinds, arg_names, return_type, function_type) 1abcdefg
1323 if tvar_def: 1323 ↛ 1324line 1323 didn't jump to line 1324 because the condition on line 1323 was never true1abcdefg
1324 signature.variables = [tvar_def]
1326 func = FuncDef(name, args, Block([PassStmt()])) 1abcdefg
1327 func.info = info 1abcdefg
1328 func.type = set_callable_name(signature, func) 1abcdefg
1329 func.is_class = is_classmethod 1abcdefg
1330 func._fullname = info.fullname + '.' + name 1abcdefg
1331 func.line = info.line 1abcdefg
1333 # NOTE: we would like the plugin generated node to dominate, but we still
1334 # need to keep any existing definitions so they get semantically analyzed.
1335 if name in info.names: 1abcdefg
1336 # Get a nice unique name instead.
1337 r_name = get_unique_redefinition_name(name, info.names) 1abcdefg
1338 info.names[r_name] = info.names[name] 1abcdefg
1340 # Add decorator for is_classmethod
1341 # The dataclasses plugin claims this is unnecessary for classmethods, but not including it results in a
1342 # signature incompatible with the superclass, which causes mypy errors to occur for every subclass of BaseModel.
1343 if is_classmethod: 1abcdefg
1344 func.is_decorated = True 1abcdefg
1345 v = Var(name, func.type) 1abcdefg
1346 v.info = info 1abcdefg
1347 v._fullname = func._fullname 1abcdefg
1348 v.is_classmethod = True 1abcdefg
1349 dec = Decorator(func, [NameExpr('classmethod')], v) 1abcdefg
1350 dec.line = info.line 1abcdefg
1351 sym = SymbolTableNode(MDEF, dec) 1abcdefg
1352 else:
1353 sym = SymbolTableNode(MDEF, func) 1abcdefg
1354 sym.plugin_generated = True 1abcdefg
1355 info.names[name] = sym 1abcdefg
1357 info.defn.defs.body.append(func) 1abcdefg
1360def parse_toml(config_file: str) -> dict[str, Any] | None: 1abcdefg
1361 """Returns a dict of config keys to values.
1363 It reads configs from toml file and returns `None` if the file is not a toml file.
1364 """
1365 if not config_file.endswith('.toml'): 1abcdefg
1366 return None 1abcdefg
1368 if sys.version_info >= (3, 11): 1abcdefg
1369 import tomllib as toml_ 1cdefg
1370 else:
1371 try: 1ab
1372 import tomli as toml_ 1ab
1373 except ImportError: # pragma: no cover
1374 import warnings
1376 warnings.warn('No TOML parser installed, cannot read configuration from `pyproject.toml`.')
1377 return None
1379 with open(config_file, 'rb') as rf: 1abcdefg
1380 return toml_.load(rf) 1abcdefg