Coverage for pydantic/mypy.py: 90.97%
593 statements
« prev ^ index » next coverage.py v7.5.3, created at 2024-06-21 17:00 +0000
« prev ^ index » next coverage.py v7.5.3, created at 2024-06-21 17:00 +0000
1"""This module includes classes and functions designed specifically for use with the mypy plugin."""
3from __future__ import annotations 1jklmnopqrstuvwxyzABCDfgebcahidEFGHIJKLMN
5import sys 1jklmnopqrstuvwxyzABCDfgebcahidEFGHIJKLMN
6from configparser import ConfigParser 1jklmnopqrstuvwxyzABCDfgebcahidEFGHIJKLMN
7from typing import Any, Callable, Iterator 1jklmnopqrstuvwxyzABCDfgebcahidEFGHIJKLMN
9from mypy.errorcodes import ErrorCode 1jklmnopqrstuvwxyzABCDfgebcahidEFGHIJKLMN
10from mypy.expandtype import expand_type, expand_type_by_instance 1fgebcahid
11from mypy.nodes import ( 1fgebcahid
12 ARG_NAMED,
13 ARG_NAMED_OPT,
14 ARG_OPT,
15 ARG_POS,
16 ARG_STAR2,
17 INVARIANT,
18 MDEF,
19 Argument,
20 AssignmentStmt,
21 Block,
22 CallExpr,
23 ClassDef,
24 Context,
25 Decorator,
26 DictExpr,
27 EllipsisExpr,
28 Expression,
29 FuncDef,
30 IfStmt,
31 JsonDict,
32 MemberExpr,
33 NameExpr,
34 PassStmt,
35 PlaceholderNode,
36 RefExpr,
37 Statement,
38 StrExpr,
39 SymbolTableNode,
40 TempNode,
41 TypeAlias,
42 TypeInfo,
43 Var,
44)
45from mypy.options import Options 1fgebcahid
46from mypy.plugin import ( 1fgebcahid
47 CheckerPluginInterface,
48 ClassDefContext,
49 FunctionContext,
50 MethodContext,
51 Plugin,
52 ReportConfigContext,
53 SemanticAnalyzerPluginInterface,
54)
55from mypy.plugins import dataclasses 1fgebcahid
56from mypy.plugins.common import ( 1fgebcahid
57 deserialize_and_fixup_type,
58)
59from mypy.semanal import set_callable_name 1fgebcahid
60from mypy.server.trigger import make_wildcard_trigger 1fgebcahid
61from mypy.state import state 1fgebcahid
62from mypy.typeops import map_type_from_supertype 1fgebcahid
63from mypy.types import ( 1fgebcahid
64 AnyType,
65 CallableType,
66 Instance,
67 NoneType,
68 Overloaded,
69 Type,
70 TypeOfAny,
71 TypeType,
72 TypeVarType,
73 UnionType,
74 get_proper_type,
75)
76from mypy.typevars import fill_typevars 1fgebcahid
77from mypy.util import get_unique_redefinition_name 1fgebcahid
78from mypy.version import __version__ as mypy_version 1fgebcahid
80from pydantic._internal import _fields 1fgebcahid
81from pydantic.version import parse_mypy_version 1fgebcahid
83try: 1fgebcahid
84 from mypy.types import TypeVarDef # type: ignore[attr-defined] 1fgebcahid
85except ImportError: # pragma: no cover 1fgebcahid
86 # Backward-compatible with TypeVarDef from Mypy 0.930.
87 from mypy.types import TypeVarType as TypeVarDef 1fgebcahid
89CONFIGFILE_KEY = 'pydantic-mypy' 1fgebcahid
90METADATA_KEY = 'pydantic-mypy-metadata' 1fgebcahid
91BASEMODEL_FULLNAME = 'pydantic.main.BaseModel' 1fgebcahid
92BASESETTINGS_FULLNAME = 'pydantic_settings.main.BaseSettings' 1fgebcahid
93ROOT_MODEL_FULLNAME = 'pydantic.root_model.RootModel' 1fgebcahid
94MODEL_METACLASS_FULLNAME = 'pydantic._internal._model_construction.ModelMetaclass' 1fgebcahid
95FIELD_FULLNAME = 'pydantic.fields.Field' 1fgebcahid
96DATACLASS_FULLNAME = 'pydantic.dataclasses.dataclass' 1fgebcahid
97MODEL_VALIDATOR_FULLNAME = 'pydantic.functional_validators.model_validator' 1fgebcahid
98DECORATOR_FULLNAMES = { 1fgebcahid
99 'pydantic.functional_validators.field_validator',
100 'pydantic.functional_validators.model_validator',
101 'pydantic.functional_serializers.serializer',
102 'pydantic.functional_serializers.model_serializer',
103 'pydantic.deprecated.class_validators.validator',
104 'pydantic.deprecated.class_validators.root_validator',
105}
108MYPY_VERSION_TUPLE = parse_mypy_version(mypy_version) 1fgebcahid
109BUILTINS_NAME = 'builtins' if MYPY_VERSION_TUPLE >= (0, 930) else '__builtins__' 1fgebcahid
111# Increment version if plugin changes and mypy caches should be invalidated
112__version__ = 2 1fgebcahid
115def plugin(version: str) -> type[Plugin]: 1fgebcahid
116 """`version` is the mypy version string.
118 We might want to use this to print a warning if the mypy version being used is
119 newer, or especially older, than we expect (or need).
121 Args:
122 version: The mypy version string.
124 Return:
125 The Pydantic mypy plugin type.
126 """
127 return PydanticPlugin 1fgebcahid
130class PydanticPlugin(Plugin): 1fgebcahid
131 """The Pydantic mypy plugin."""
133 def __init__(self, options: Options) -> None: 1fgebcahid
134 self.plugin_config = PydanticPluginConfig(options) 1fgebcahid
135 self._plugin_data = self.plugin_config.to_data() 1fgebcahid
136 super().__init__(options) 1fgebcahid
138 def get_base_class_hook(self, fullname: str) -> Callable[[ClassDefContext], bool] | None: 1fgebcahid
139 """Update Pydantic model class."""
140 sym = self.lookup_fully_qualified(fullname) 1fgebcahid
141 if sym and isinstance(sym.node, TypeInfo): # pragma: no branch 1fgebcahid
142 # No branching may occur if the mypy cache has not been cleared
143 if any(base.fullname == BASEMODEL_FULLNAME for base in sym.node.mro): 1fgebcahid
144 return self._pydantic_model_class_maker_callback 1fgebcahid
145 return None 1fgebcahid
147 def get_metaclass_hook(self, fullname: str) -> Callable[[ClassDefContext], None] | None: 1fgebcahid
148 """Update Pydantic `ModelMetaclass` definition."""
149 if fullname == MODEL_METACLASS_FULLNAME: 1fgebcahid
150 return self._pydantic_model_metaclass_marker_callback 1fgebcahid
151 return None 1fgebcahid
153 def get_function_hook(self, fullname: str) -> Callable[[FunctionContext], Type] | None: 1fgebcahid
154 """Adjust the return type of the `Field` function."""
155 sym = self.lookup_fully_qualified(fullname) 1fgebcahid
156 if sym and sym.fullname == FIELD_FULLNAME: 1fgebcahid
157 return self._pydantic_field_callback 1fgebcahid
158 return None 1fgebcahid
160 def get_method_hook(self, fullname: str) -> Callable[[MethodContext], Type] | None: 1fgebcahid
161 """Adjust return type of `from_orm` method call."""
162 if fullname.endswith('.from_orm'): 1fgebcahid
163 return from_attributes_callback 1fgebcahid
164 return None 1fgebcahid
166 def get_class_decorator_hook(self, fullname: str) -> Callable[[ClassDefContext], None] | None: 1fgebcahid
167 """Mark pydantic.dataclasses as dataclass.
169 Mypy version 1.1.1 added support for `@dataclass_transform` decorator.
170 """
171 if fullname == DATACLASS_FULLNAME and MYPY_VERSION_TUPLE < (1, 1): 1fgebcahid
172 return dataclasses.dataclass_class_maker_callback # type: ignore[return-value] 1e
173 return None 1fgebcahid
175 def report_config_data(self, ctx: ReportConfigContext) -> dict[str, Any]: 1fgebcahid
176 """Return all plugin config data.
178 Used by mypy to determine if cache needs to be discarded.
179 """
180 return self._plugin_data 1fgebcahid
182 def _pydantic_model_class_maker_callback(self, ctx: ClassDefContext) -> bool: 1fgebcahid
183 transformer = PydanticModelTransformer(ctx.cls, ctx.reason, ctx.api, self.plugin_config) 1fgebcahid
184 return transformer.transform() 1fgebcahid
186 def _pydantic_model_metaclass_marker_callback(self, ctx: ClassDefContext) -> None: 1fgebcahid
187 """Reset dataclass_transform_spec attribute of ModelMetaclass.
189 Let the plugin handle it. This behavior can be disabled
190 if 'debug_dataclass_transform' is set to True', for testing purposes.
191 """
192 if self.plugin_config.debug_dataclass_transform: 192 ↛ 193line 192 didn't jump to line 193, because the condition on line 192 was never true1fgebcahid
193 return
194 info_metaclass = ctx.cls.info.declared_metaclass 1fgebcahid
195 assert info_metaclass, "callback not passed from 'get_metaclass_hook'" 1fgebcahid
196 if getattr(info_metaclass.type, 'dataclass_transform_spec', None): 1fgebcahid
197 info_metaclass.type.dataclass_transform_spec = None 1fgbcahid
199 def _pydantic_field_callback(self, ctx: FunctionContext) -> Type: 1fgebcahid
200 """Extract the type of the `default` argument from the Field function, and use it as the return type.
202 In particular:
203 * Check whether the default and default_factory argument is specified.
204 * Output an error if both are specified.
205 * Retrieve the type of the argument which is specified, and use it as return type for the function.
206 """
207 default_any_type = ctx.default_return_type 1fgebcahid
209 assert ctx.callee_arg_names[0] == 'default', '"default" is no longer first argument in Field()' 1fgebcahid
210 assert ctx.callee_arg_names[1] == 'default_factory', '"default_factory" is no longer second argument in Field()' 1fgebcahid
211 default_args = ctx.args[0] 1fgebcahid
212 default_factory_args = ctx.args[1] 1fgebcahid
214 if default_args and default_factory_args: 1fgebcahid
215 error_default_and_default_factory_specified(ctx.api, ctx.context) 1fgebcahid
216 return default_any_type 1fgebcahid
218 if default_args: 1fgebcahid
219 default_type = ctx.arg_types[0][0] 1fgebcahid
220 default_arg = default_args[0] 1fgebcahid
222 # Fallback to default Any type if the field is required
223 if not isinstance(default_arg, EllipsisExpr): 1fgebcahid
224 return default_type 1fgebcahid
226 elif default_factory_args: 1fgebcahid
227 default_factory_type = ctx.arg_types[1][0] 1fgebcahid
229 # Functions which use `ParamSpec` can be overloaded, exposing the callable's types as a parameter
230 # Pydantic calls the default factory without any argument, so we retrieve the first item
231 if isinstance(default_factory_type, Overloaded): 1fgebcahid
232 default_factory_type = default_factory_type.items[0] 1fgebcahid
234 if isinstance(default_factory_type, CallableType): 1fgebcahid
235 ret_type = default_factory_type.ret_type 1fgebcahid
236 # mypy doesn't think `ret_type` has `args`, you'd think mypy should know,
237 # add this check in case it varies by version
238 args = getattr(ret_type, 'args', None) 1fgebcahid
239 if args: 1fgebcahid
240 if all(isinstance(arg, TypeVarType) for arg in args): 1fgebcahid
241 # Looks like the default factory is a type like `list` or `dict`, replace all args with `Any`
242 ret_type.args = tuple(default_any_type for _ in args) # type: ignore[attr-defined] 1fgebcahid
243 return ret_type 1fgebcahid
245 return default_any_type 1fgebcahid
248class PydanticPluginConfig: 1fgebcahid
249 """A Pydantic mypy plugin config holder.
251 Attributes:
252 init_forbid_extra: Whether to add a `**kwargs` at the end of the generated `__init__` signature.
253 init_typed: Whether to annotate fields in the generated `__init__`.
254 warn_required_dynamic_aliases: Whether to raise required dynamic aliases error.
255 debug_dataclass_transform: Whether to not reset `dataclass_transform_spec` attribute
256 of `ModelMetaclass` for testing purposes.
257 """
259 __slots__ = ( 1fgebcahid
260 'init_forbid_extra',
261 'init_typed',
262 'warn_required_dynamic_aliases',
263 'debug_dataclass_transform',
264 )
265 init_forbid_extra: bool 1fgebcahid
266 init_typed: bool 1fgebcahid
267 warn_required_dynamic_aliases: bool 1fgebcahid
268 debug_dataclass_transform: bool # undocumented 1fgebcahid
270 def __init__(self, options: Options) -> None: 1fgebcahid
271 if options.config_file is None: # pragma: no cover 1fgebcahid
272 return
274 toml_config = parse_toml(options.config_file) 1fgebcahid
275 if toml_config is not None: 1fgebcahid
276 config = toml_config.get('tool', {}).get('pydantic-mypy', {}) 1fgebcahid
277 for key in self.__slots__: 1fgebcahid
278 setting = config.get(key, False) 1fgebcahid
279 if not isinstance(setting, bool): 1fgebcahid
280 raise ValueError(f'Configuration value must be a boolean for key: {key}') 1fgebcahid
281 setattr(self, key, setting) 1fgebcahid
282 else:
283 plugin_config = ConfigParser() 1fgebcahid
284 plugin_config.read(options.config_file) 1fgebcahid
285 for key in self.__slots__: 1fgebcahid
286 setting = plugin_config.getboolean(CONFIGFILE_KEY, key, fallback=False) 1fgebcahid
287 setattr(self, key, setting) 1fgebcahid
289 def to_data(self) -> dict[str, Any]: 1fgebcahid
290 """Returns a dict of config names to their values."""
291 return {key: getattr(self, key) for key in self.__slots__} 1fgebcahid
294def from_attributes_callback(ctx: MethodContext) -> Type: 1fgebcahid
295 """Raise an error if from_attributes is not enabled."""
296 model_type: Instance
297 ctx_type = ctx.type 1fgebcahid
298 if isinstance(ctx_type, TypeType): 1fgebcahid
299 ctx_type = ctx_type.item 1fgebcahid
300 if isinstance(ctx_type, CallableType) and isinstance(ctx_type.ret_type, Instance): 1fgebcahid
301 model_type = ctx_type.ret_type # called on the class 1bcad
302 elif isinstance(ctx_type, Instance): 1fgebcahid
303 model_type = ctx_type # called on an instance (unusual, but still valid) 1fgebcahid
304 else: # pragma: no cover
305 detail = f'ctx.type: {ctx_type} (of type {ctx_type.__class__.__name__})'
306 error_unexpected_behavior(detail, ctx.api, ctx.context)
307 return ctx.default_return_type
308 pydantic_metadata = model_type.type.metadata.get(METADATA_KEY) 1fgebcahid
309 if pydantic_metadata is None: 1fgebcahid
310 return ctx.default_return_type 1fgebcahid
311 from_attributes = pydantic_metadata.get('config', {}).get('from_attributes') 1bcad
312 if from_attributes is not True: 312 ↛ 314line 312 didn't jump to line 314, because the condition on line 312 was always true1bcad
313 error_from_attributes(model_type.type.name, ctx.api, ctx.context) 1bcad
314 return ctx.default_return_type 1bcad
317class PydanticModelField: 1fgebcahid
318 """Based on mypy.plugins.dataclasses.DataclassAttribute."""
320 def __init__( 1fgebcahid
321 self,
322 name: str,
323 alias: str | None,
324 has_dynamic_alias: bool,
325 has_default: bool,
326 line: int,
327 column: int,
328 type: Type | None,
329 info: TypeInfo,
330 ):
331 self.name = name 1fgebcahid
332 self.alias = alias 1fgebcahid
333 self.has_dynamic_alias = has_dynamic_alias 1fgebcahid
334 self.has_default = has_default 1fgebcahid
335 self.line = line 1fgebcahid
336 self.column = column 1fgebcahid
337 self.type = type 1fgebcahid
338 self.info = info 1fgebcahid
340 def to_argument( 1fgebcahid
341 self,
342 current_info: TypeInfo,
343 typed: bool,
344 force_optional: bool,
345 use_alias: bool,
346 api: SemanticAnalyzerPluginInterface,
347 force_typevars_invariant: bool,
348 ) -> Argument:
349 """Based on mypy.plugins.dataclasses.DataclassAttribute.to_argument."""
350 variable = self.to_var(current_info, api, use_alias, force_typevars_invariant) 1fgebcahid
351 type_annotation = self.expand_type(current_info, api) if typed else AnyType(TypeOfAny.explicit) 1fgebcahid
352 return Argument( 1fgebcahid
353 variable=variable,
354 type_annotation=type_annotation,
355 initializer=None,
356 kind=ARG_NAMED_OPT if force_optional or self.has_default else ARG_NAMED,
357 )
359 def expand_type( 1fgebcahid
360 self, current_info: TypeInfo, api: SemanticAnalyzerPluginInterface, force_typevars_invariant: bool = False
361 ) -> Type | None:
362 """Based on mypy.plugins.dataclasses.DataclassAttribute.expand_type."""
363 # The getattr in the next line is used to prevent errors in legacy versions of mypy without this attribute
364 if force_typevars_invariant: 1fgebcahid
365 # In some cases, mypy will emit an error "Cannot use a covariant type variable as a parameter"
366 # To prevent that, we add an option to replace typevars with invariant ones while building certain
367 # method signatures (in particular, `__init__`). There may be a better way to do this, if this causes
368 # us problems in the future, we should look into why the dataclasses plugin doesn't have this issue.
369 if isinstance(self.type, TypeVarType): 1fgebcahid
370 modified_type = self.type.copy_modified() 1fgebcahid
371 modified_type.variance = INVARIANT 1fgebcahid
372 self.type = modified_type 1fgebcahid
374 if self.type is not None and getattr(self.info, 'self_type', None) is not None: 1fgebcahid
375 # In general, it is not safe to call `expand_type()` during semantic analyzis,
376 # however this plugin is called very late, so all types should be fully ready.
377 # Also, it is tricky to avoid eager expansion of Self types here (e.g. because
378 # we serialize attributes).
379 with state.strict_optional_set(api.options.strict_optional): 1fgebcahid
380 filled_with_typevars = fill_typevars(current_info) 1fgebcahid
381 if force_typevars_invariant: 1fgebcahid
382 for arg in filled_with_typevars.args: 1fgebcahid
383 if isinstance(arg, TypeVarType): 383 ↛ 382line 383 didn't jump to line 382, because the condition on line 383 was always true1ad
384 arg.variance = INVARIANT 1ad
385 return expand_type(self.type, {self.info.self_type.id: filled_with_typevars}) 1fgebcahid
386 return self.type 1fgebchi
388 def to_var( 1fgebcahid
389 self,
390 current_info: TypeInfo,
391 api: SemanticAnalyzerPluginInterface,
392 use_alias: bool,
393 force_typevars_invariant: bool = False,
394 ) -> Var:
395 """Based on mypy.plugins.dataclasses.DataclassAttribute.to_var."""
396 if use_alias and self.alias is not None: 1fgebcahid
397 name = self.alias 1fgebcahid
398 else:
399 name = self.name 1fgebcahid
401 return Var(name, self.expand_type(current_info, api, force_typevars_invariant)) 1fgebcahid
403 def serialize(self) -> JsonDict: 1fgebcahid
404 """Based on mypy.plugins.dataclasses.DataclassAttribute.serialize."""
405 assert self.type 1fgebcahid
406 return { 1fgebcahid
407 'name': self.name,
408 'alias': self.alias,
409 'has_dynamic_alias': self.has_dynamic_alias,
410 'has_default': self.has_default,
411 'line': self.line,
412 'column': self.column,
413 'type': self.type.serialize(),
414 }
416 @classmethod 1fgebcahid
417 def deserialize(cls, info: TypeInfo, data: JsonDict, api: SemanticAnalyzerPluginInterface) -> PydanticModelField: 1fgebcahid
418 """Based on mypy.plugins.dataclasses.DataclassAttribute.deserialize."""
419 data = data.copy() 1fgebcahid
420 typ = deserialize_and_fixup_type(data.pop('type'), api) 1fgebcahid
421 return cls(type=typ, info=info, **data) 1fgebcahid
423 def expand_typevar_from_subtype(self, sub_type: TypeInfo, api: SemanticAnalyzerPluginInterface) -> None: 1fgebcahid
424 """Expands type vars in the context of a subtype when an attribute is inherited
425 from a generic super type.
426 """
427 if self.type is not None: 427 ↛ exitline 427 didn't return from function 'expand_typevar_from_subtype', because the condition on line 427 was always true1fgebcahid
428 with state.strict_optional_set(api.options.strict_optional): 1fgebcahid
429 self.type = map_type_from_supertype(self.type, sub_type, self.info) 1fgebcahid
432class PydanticModelClassVar: 1fgebcahid
433 """Based on mypy.plugins.dataclasses.DataclassAttribute.
435 ClassVars are ignored by subclasses.
437 Attributes:
438 name: the ClassVar name
439 """
441 def __init__(self, name): 1fgebcahid
442 self.name = name 1fgebcahid
444 @classmethod 1fgebcahid
445 def deserialize(cls, data: JsonDict) -> PydanticModelClassVar: 1fgebcahid
446 """Based on mypy.plugins.dataclasses.DataclassAttribute.deserialize."""
447 data = data.copy()
448 return cls(**data)
450 def serialize(self) -> JsonDict: 1fgebcahid
451 """Based on mypy.plugins.dataclasses.DataclassAttribute.serialize."""
452 return { 1fgebcahid
453 'name': self.name,
454 }
457class PydanticModelTransformer: 1fgebcahid
458 """Transform the BaseModel subclass according to the plugin settings.
460 Attributes:
461 tracked_config_fields: A set of field configs that the plugin has to track their value.
462 """
464 tracked_config_fields: set[str] = { 1fgebcahid
465 'extra',
466 'frozen',
467 'from_attributes',
468 'populate_by_name',
469 'alias_generator',
470 }
472 def __init__( 1fgebcahid
473 self,
474 cls: ClassDef,
475 reason: Expression | Statement,
476 api: SemanticAnalyzerPluginInterface,
477 plugin_config: PydanticPluginConfig,
478 ) -> None:
479 self._cls = cls 1fgebcahid
480 self._reason = reason 1fgebcahid
481 self._api = api 1fgebcahid
483 self.plugin_config = plugin_config 1fgebcahid
485 def transform(self) -> bool: 1fgebcahid
486 """Configures the BaseModel subclass according to the plugin settings.
488 In particular:
490 * determines the model config and fields,
491 * adds a fields-aware signature for the initializer and construct methods
492 * freezes the class if frozen = True
493 * stores the fields, config, and if the class is settings in the mypy metadata for access by subclasses
494 """
495 info = self._cls.info 1fgebcahid
496 is_root_model = any(ROOT_MODEL_FULLNAME in base.fullname for base in info.mro[:-1]) 1fgebcahid
497 config = self.collect_config() 1fgebcahid
498 fields, class_vars = self.collect_fields_and_class_vars(config, is_root_model) 1fgebcahid
499 if fields is None or class_vars is None: 499 ↛ 501line 499 didn't jump to line 501, because the condition on line 499 was never true1fgebcahid
500 # Some definitions are not ready. We need another pass.
501 return False
502 for field in fields: 1fgebcahid
503 if field.type is None: 1fgebcahid
504 return False 1fgebcahid
506 is_settings = any(base.fullname == BASESETTINGS_FULLNAME for base in info.mro[:-1]) 1fgebcahid
507 self.add_initializer(fields, config, is_settings, is_root_model) 1fgebcahid
508 if not is_root_model: 1fgebcahid
509 self.add_model_construct_method(fields, config, is_settings) 1fgebcahid
510 self.set_frozen(fields, self._api, frozen=config.frozen is True) 1fgebcahid
512 self.adjust_decorator_signatures() 1fgebcahid
514 info.metadata[METADATA_KEY] = { 1fgebcahid
515 'fields': {field.name: field.serialize() for field in fields},
516 'class_vars': {class_var.name: class_var.serialize() for class_var in class_vars},
517 'config': config.get_values_dict(),
518 }
520 return True 1fgebcahid
522 def adjust_decorator_signatures(self) -> None: 1fgebcahid
523 """When we decorate a function `f` with `pydantic.validator(...)`, `pydantic.field_validator`
524 or `pydantic.serializer(...)`, mypy sees `f` as a regular method taking a `self` instance,
525 even though pydantic internally wraps `f` with `classmethod` if necessary.
527 Teach mypy this by marking any function whose outermost decorator is a `validator()`,
528 `field_validator()` or `serializer()` call as a `classmethod`.
529 """
530 for name, sym in self._cls.info.names.items(): 1fgebcahid
531 if isinstance(sym.node, Decorator): 1fgebcahid
532 first_dec = sym.node.original_decorators[0] 1fgebcahid
533 if ( 1fgebcahi
534 isinstance(first_dec, CallExpr)
535 and isinstance(first_dec.callee, NameExpr)
536 and first_dec.callee.fullname in DECORATOR_FULLNAMES
537 # @model_validator(mode="after") is an exception, it expects a regular method
538 and not (
539 first_dec.callee.fullname == MODEL_VALIDATOR_FULLNAME
540 and any(
541 first_dec.arg_names[i] == 'mode' and isinstance(arg, StrExpr) and arg.value == 'after'
542 for i, arg in enumerate(first_dec.args)
543 )
544 )
545 ):
546 # TODO: Only do this if the first argument of the decorated function is `cls`
547 sym.node.func.is_class = True 1fgebcahid
549 def collect_config(self) -> ModelConfigData: # noqa: C901 (ignore complexity) 1fgebcahid
550 """Collects the values of the config attributes that are used by the plugin, accounting for parent classes."""
551 cls = self._cls 1fgebcahid
552 config = ModelConfigData() 1fgebcahid
554 has_config_kwargs = False 1fgebcahid
555 has_config_from_namespace = False 1fgebcahid
557 # Handle `class MyModel(BaseModel, <name>=<expr>, ...):`
558 for name, expr in cls.keywords.items(): 1fgebcahid
559 config_data = self.get_config_update(name, expr) 1fgebcahid
560 if config_data: 1fgebcahid
561 has_config_kwargs = True 1fgebcahid
562 config.update(config_data) 1fgebcahid
564 # Handle `model_config`
565 stmt: Statement | None = None 1fgebcahid
566 for stmt in cls.defs.body: 1fgebcahid
567 if not isinstance(stmt, (AssignmentStmt, ClassDef)): 1fgebcahid
568 continue 1fgebcahid
570 if isinstance(stmt, AssignmentStmt): 1fgebcahid
571 lhs = stmt.lvalues[0] 1fgebcahid
572 if not isinstance(lhs, NameExpr) or lhs.name != 'model_config': 1fgebcahid
573 continue 1fgebcahid
575 if isinstance(stmt.rvalue, CallExpr): # calls to `dict` or `ConfigDict` 575 ↛ 580line 575 didn't jump to line 580, because the condition on line 575 was always true1fgebcahid
576 for arg_name, arg in zip(stmt.rvalue.arg_names, stmt.rvalue.args): 1fgebcahid
577 if arg_name is None: 577 ↛ 578line 577 didn't jump to line 578, because the condition on line 577 was never true1fgebcahid
578 continue
579 config.update(self.get_config_update(arg_name, arg, lax_extra=True)) 1fgebcahid
580 elif isinstance(stmt.rvalue, DictExpr): # dict literals
581 for key_expr, value_expr in stmt.rvalue.items:
582 if not isinstance(key_expr, StrExpr):
583 continue
584 config.update(self.get_config_update(key_expr.value, value_expr))
586 elif isinstance(stmt, ClassDef): 586 ↛ 597line 586 didn't jump to line 597, because the condition on line 586 was always true1fgebcahid
587 if stmt.name != 'Config': # 'deprecated' Config-class 1fgebcahid
588 continue 1fgebcahid
589 for substmt in stmt.defs.body: 1fgebcahid
590 if not isinstance(substmt, AssignmentStmt): 1fgebcahid
591 continue 1fgebcahid
592 lhs = substmt.lvalues[0] 1fgebcahid
593 if not isinstance(lhs, NameExpr): 593 ↛ 594line 593 didn't jump to line 594, because the condition on line 593 was never true1fgebcahid
594 continue
595 config.update(self.get_config_update(lhs.name, substmt.rvalue)) 1fgebcahid
597 if has_config_kwargs: 597 ↛ 598line 597 didn't jump to line 598, because the condition on line 597 was never true1fgebcahid
598 self._api.fail(
599 'Specifying config in two places is ambiguous, use either Config attribute or class kwargs',
600 cls,
601 )
602 break
604 has_config_from_namespace = True 1fgebcahid
606 if has_config_kwargs or has_config_from_namespace: 1fgebcahid
607 if ( 1fgebcahi
608 stmt
609 and config.has_alias_generator
610 and not config.populate_by_name
611 and self.plugin_config.warn_required_dynamic_aliases
612 ):
613 error_required_dynamic_aliases(self._api, stmt) 1fgebcahid
615 for info in cls.info.mro[1:]: # 0 is the current class 1fgebcahid
616 if METADATA_KEY not in info.metadata: 1fgebcahid
617 continue 1fgebcahid
619 # Each class depends on the set of fields in its ancestors
620 self._api.add_plugin_dependency(make_wildcard_trigger(info.fullname)) 1fgebcahid
621 for name, value in info.metadata[METADATA_KEY]['config'].items(): 1fgebcahid
622 config.setdefault(name, value) 1fgebcahid
623 return config 1fgebcahid
625 def collect_fields_and_class_vars( 1fgebcahid
626 self, model_config: ModelConfigData, is_root_model: bool
627 ) -> tuple[list[PydanticModelField] | None, list[PydanticModelClassVar] | None]:
628 """Collects the fields for the model, accounting for parent classes."""
629 cls = self._cls 1fgebcahid
631 # First, collect fields and ClassVars belonging to any class in the MRO, ignoring duplicates.
632 #
633 # We iterate through the MRO in reverse because attrs defined in the parent must appear
634 # earlier in the attributes list than attrs defined in the child. See:
635 # https://docs.python.org/3/library/dataclasses.html#inheritance
636 #
637 # However, we also want fields defined in the subtype to override ones defined
638 # in the parent. We can implement this via a dict without disrupting the attr order
639 # because dicts preserve insertion order in Python 3.7+.
640 found_fields: dict[str, PydanticModelField] = {} 1fgebcahid
641 found_class_vars: dict[str, PydanticModelClassVar] = {} 1fgebcahid
642 for info in reversed(cls.info.mro[1:-1]): # 0 is the current class, -2 is BaseModel, -1 is object 1fgebcahid
643 # if BASEMODEL_METADATA_TAG_KEY in info.metadata and BASEMODEL_METADATA_KEY not in info.metadata:
644 # # We haven't processed the base class yet. Need another pass.
645 # return None, None
646 if METADATA_KEY not in info.metadata: 1fgebcahid
647 continue 1fgebcahid
649 # Each class depends on the set of attributes in its dataclass ancestors.
650 self._api.add_plugin_dependency(make_wildcard_trigger(info.fullname)) 1fgebcahid
652 for name, data in info.metadata[METADATA_KEY]['fields'].items(): 1fgebcahid
653 field = PydanticModelField.deserialize(info, data, self._api) 1fgebcahid
654 # (The following comment comes directly from the dataclasses plugin)
655 # TODO: We shouldn't be performing type operations during the main
656 # semantic analysis pass, since some TypeInfo attributes might
657 # still be in flux. This should be performed in a later phase.
658 field.expand_typevar_from_subtype(cls.info, self._api) 1fgebcahid
659 found_fields[name] = field 1fgebcahid
661 sym_node = cls.info.names.get(name) 1fgebcahid
662 if sym_node and sym_node.node and not isinstance(sym_node.node, Var): 662 ↛ 663line 662 didn't jump to line 663, because the condition on line 662 was never true1fgebcahid
663 self._api.fail(
664 'BaseModel field may only be overridden by another field',
665 sym_node.node,
666 )
667 # Collect ClassVars
668 for name, data in info.metadata[METADATA_KEY]['class_vars'].items(): 668 ↛ 669line 668 didn't jump to line 669, because the loop on line 668 never started1fgebcahid
669 found_class_vars[name] = PydanticModelClassVar.deserialize(data)
671 # Second, collect fields and ClassVars belonging to the current class.
672 current_field_names: set[str] = set() 1fgebcahid
673 current_class_vars_names: set[str] = set() 1fgebcahid
674 for stmt in self._get_assignment_statements_from_block(cls.defs): 1fgebcahid
675 maybe_field = self.collect_field_or_class_var_from_stmt(stmt, model_config, found_class_vars) 1fgebcahid
676 if isinstance(maybe_field, PydanticModelField): 1fgebcahid
677 lhs = stmt.lvalues[0] 1fgebcahid
678 if is_root_model and lhs.name != 'root': 1fgebcahid
679 error_extra_fields_on_root_model(self._api, stmt) 1fgebcahid
680 else:
681 current_field_names.add(lhs.name) 1fgebcahid
682 found_fields[lhs.name] = maybe_field 1fgebcahid
683 elif isinstance(maybe_field, PydanticModelClassVar): 1fgebcahid
684 lhs = stmt.lvalues[0] 1fgebcahid
685 current_class_vars_names.add(lhs.name) 1fgebcahid
686 found_class_vars[lhs.name] = maybe_field 1fgebcahid
688 return list(found_fields.values()), list(found_class_vars.values()) 1fgebcahid
690 def _get_assignment_statements_from_if_statement(self, stmt: IfStmt) -> Iterator[AssignmentStmt]: 1fgebcahid
691 for body in stmt.body: 1fgebcahid
692 if not body.is_unreachable: 692 ↛ 691line 692 didn't jump to line 691, because the condition on line 692 was always true1fgebcahid
693 yield from self._get_assignment_statements_from_block(body) 1fgebcahid
694 if stmt.else_body is not None and not stmt.else_body.is_unreachable: 694 ↛ 695line 694 didn't jump to line 695, because the condition on line 694 was never true1fgebcahid
695 yield from self._get_assignment_statements_from_block(stmt.else_body)
697 def _get_assignment_statements_from_block(self, block: Block) -> Iterator[AssignmentStmt]: 1fgebcahid
698 for stmt in block.body: 1fgebcahid
699 if isinstance(stmt, AssignmentStmt): 1fgebcahid
700 yield stmt 1fgebcahid
701 elif isinstance(stmt, IfStmt): 1fgebcahid
702 yield from self._get_assignment_statements_from_if_statement(stmt) 1fgebcahid
704 def collect_field_or_class_var_from_stmt( # noqa C901 1fgebcahid
705 self, stmt: AssignmentStmt, model_config: ModelConfigData, class_vars: dict[str, PydanticModelClassVar]
706 ) -> PydanticModelField | PydanticModelClassVar | None:
707 """Get pydantic model field from statement.
709 Args:
710 stmt: The statement.
711 model_config: Configuration settings for the model.
712 class_vars: ClassVars already known to be defined on the model.
714 Returns:
715 A pydantic model field if it could find the field in statement. Otherwise, `None`.
716 """
717 cls = self._cls 1fgebcahid
719 lhs = stmt.lvalues[0] 1fgebcahid
720 if not isinstance(lhs, NameExpr) or not _fields.is_valid_field_name(lhs.name) or lhs.name == 'model_config': 1fgebcahid
721 return None 1fgebcahid
723 if not stmt.new_syntax: 1fgebcahid
724 if ( 1fgebcahi
725 isinstance(stmt.rvalue, CallExpr)
726 and isinstance(stmt.rvalue.callee, CallExpr)
727 and isinstance(stmt.rvalue.callee.callee, NameExpr)
728 and stmt.rvalue.callee.callee.fullname in DECORATOR_FULLNAMES
729 ):
730 # This is a (possibly-reused) validator or serializer, not a field
731 # In particular, it looks something like: my_validator = validator('my_field')(f)
732 # Eventually, we may want to attempt to respect model_config['ignored_types']
733 return None 1fgebcahid
735 if lhs.name in class_vars: 735 ↛ 737line 735 didn't jump to line 737, because the condition on line 735 was never true1fgebcahid
736 # Class vars are not fields and are not required to be annotated
737 return None
739 # The assignment does not have an annotation, and it's not anything else we recognize
740 error_untyped_fields(self._api, stmt) 1fgebcahid
741 return None 1fgebcahid
743 lhs = stmt.lvalues[0] 1fgebcahid
744 if not isinstance(lhs, NameExpr): 744 ↛ 745line 744 didn't jump to line 745, because the condition on line 744 was never true1fgebcahid
745 return None
747 if not _fields.is_valid_field_name(lhs.name) or lhs.name == 'model_config': 747 ↛ 748line 747 didn't jump to line 748, because the condition on line 747 was never true1fgebcahid
748 return None
750 sym = cls.info.names.get(lhs.name) 1fgebcahid
751 if sym is None: # pragma: no cover 1fgebcahid
752 # This is likely due to a star import (see the dataclasses plugin for a more detailed explanation)
753 # This is the same logic used in the dataclasses plugin
754 return None
756 node = sym.node 1fgebcahid
757 if isinstance(node, PlaceholderNode): # pragma: no cover 1fgebcahid
758 # See the PlaceholderNode docstring for more detail about how this can occur
759 # Basically, it is an edge case when dealing with complex import logic
761 # The dataclasses plugin now asserts this cannot happen, but I'd rather not error if it does..
762 return None
764 if isinstance(node, TypeAlias): 1fgebcahid
765 self._api.fail(
766 'Type aliases inside BaseModel definitions are not supported at runtime',
767 node,
768 )
769 # Skip processing this node. This doesn't match the runtime behaviour,
770 # but the only alternative would be to modify the SymbolTable,
771 # and it's a little hairy to do that in a plugin.
772 return None
774 if not isinstance(node, Var): # pragma: no cover 1fgebcahid
775 # Don't know if this edge case still happens with the `is_valid_field` check above
776 # but better safe than sorry
778 # The dataclasses plugin now asserts this cannot happen, but I'd rather not error if it does..
779 return None
781 # x: ClassVar[int] is not a field
782 if node.is_classvar: 1fgebcahid
783 return PydanticModelClassVar(lhs.name) 1fgebcahid
785 # x: InitVar[int] is not supported in BaseModel
786 node_type = get_proper_type(node.type) 1fgebcahid
787 if isinstance(node_type, Instance) and node_type.type.fullname == 'dataclasses.InitVar': 787 ↛ 788line 787 didn't jump to line 788, because the condition on line 787 was never true1fgebcahid
788 self._api.fail(
789 'InitVar is not supported in BaseModel',
790 node,
791 )
793 has_default = self.get_has_default(stmt) 1fgebcahid
795 if sym.type is None and node.is_final and node.is_inferred: 795 ↛ 803line 795 didn't jump to line 803, because the condition on line 795 was never true1fgebcahid
796 # This follows the logic from the dataclasses plugin. The following comment is taken verbatim:
797 #
798 # This is a special case, assignment like x: Final = 42 is classified
799 # annotated above, but mypy strips the `Final` turning it into x = 42.
800 # We do not support inferred types in dataclasses, so we can try inferring
801 # type for simple literals, and otherwise require an explicit type
802 # argument for Final[...].
803 typ = self._api.analyze_simple_literal_type(stmt.rvalue, is_final=True)
804 if typ:
805 node.type = typ
806 else:
807 self._api.fail(
808 'Need type argument for Final[...] with non-literal default in BaseModel',
809 stmt,
810 )
811 node.type = AnyType(TypeOfAny.from_error)
813 alias, has_dynamic_alias = self.get_alias_info(stmt) 1fgebcahid
814 if has_dynamic_alias and not model_config.populate_by_name and self.plugin_config.warn_required_dynamic_aliases: 1fgebcahid
815 error_required_dynamic_aliases(self._api, stmt) 1fgebcahid
817 init_type = self._infer_dataclass_attr_init_type(sym, lhs.name, stmt) 1fgebcahid
818 return PydanticModelField( 1fgebcahid
819 name=lhs.name,
820 has_dynamic_alias=has_dynamic_alias,
821 has_default=has_default,
822 alias=alias,
823 line=stmt.line,
824 column=stmt.column,
825 type=init_type,
826 info=cls.info,
827 )
829 def _infer_dataclass_attr_init_type(self, sym: SymbolTableNode, name: str, context: Context) -> Type | None: 1fgebcahid
830 """Infer __init__ argument type for an attribute.
832 In particular, possibly use the signature of __set__.
833 """
834 default = sym.type 1fgebcahid
835 if sym.implicit: 835 ↛ 836line 835 didn't jump to line 836, because the condition on line 835 was never true1fgebcahid
836 return default
837 t = get_proper_type(sym.type) 1fgebcahid
839 # Perform a simple-minded inference from the signature of __set__, if present.
840 # We can't use mypy.checkmember here, since this plugin runs before type checking.
841 # We only support some basic scanerios here, which is hopefully sufficient for
842 # the vast majority of use cases.
843 if not isinstance(t, Instance): 1fgebcahid
844 return default 1fgebcahid
845 setter = t.type.get('__set__') 1fgebcahid
846 if setter: 846 ↛ 847line 846 didn't jump to line 847, because the condition on line 846 was never true1fgebcahid
847 if isinstance(setter.node, FuncDef):
848 super_info = t.type.get_containing_type_info('__set__')
849 assert super_info
850 if setter.type:
851 setter_type = get_proper_type(map_type_from_supertype(setter.type, t.type, super_info))
852 else:
853 return AnyType(TypeOfAny.unannotated)
854 if isinstance(setter_type, CallableType) and setter_type.arg_kinds == [
855 ARG_POS,
856 ARG_POS,
857 ARG_POS,
858 ]:
859 return expand_type_by_instance(setter_type.arg_types[2], t)
860 else:
861 self._api.fail(f'Unsupported signature for "__set__" in "{t.type.name}"', context)
862 else:
863 self._api.fail(f'Unsupported "__set__" in "{t.type.name}"', context)
865 return default 1fgebcahid
867 def add_initializer( 1fgebcahid
868 self, fields: list[PydanticModelField], config: ModelConfigData, is_settings: bool, is_root_model: bool
869 ) -> None:
870 """Adds a fields-aware `__init__` method to the class.
872 The added `__init__` will be annotated with types vs. all `Any` depending on the plugin settings.
873 """
874 if '__init__' in self._cls.info.names and not self._cls.info.names['__init__'].plugin_generated: 1fgebcahid
875 return # Don't generate an __init__ if one already exists 1fgebcahid
877 typed = self.plugin_config.init_typed 1fgebcahid
878 use_alias = config.populate_by_name is not True 1fgebcahid
879 requires_dynamic_aliases = bool(config.has_alias_generator and not config.populate_by_name) 1fgebcahid
880 args = self.get_field_arguments( 1fgebcahid
881 fields,
882 typed=typed,
883 requires_dynamic_aliases=requires_dynamic_aliases,
884 use_alias=use_alias,
885 is_settings=is_settings,
886 force_typevars_invariant=True,
887 )
889 if is_root_model and MYPY_VERSION_TUPLE <= (1, 0, 1): 1fgebcahid
890 # convert root argument to positional argument
891 # This is needed because mypy support for `dataclass_transform` isn't complete on 1.0.1
892 args[0].kind = ARG_POS if args[0].kind == ARG_NAMED else ARG_OPT 1e
894 if is_settings: 1fgebcahid
895 base_settings_node = self._api.lookup_fully_qualified(BASESETTINGS_FULLNAME).node 1fgebcahid
896 if '__init__' in base_settings_node.names: 896 ↛ 907line 896 didn't jump to line 907, because the condition on line 896 was always true1fgebcahid
897 base_settings_init_node = base_settings_node.names['__init__'].node 1fgebcahid
898 if base_settings_init_node is not None and base_settings_init_node.type is not None: 898 ↛ 907line 898 didn't jump to line 907, because the condition on line 898 was always true1fgebcahid
899 func_type = base_settings_init_node.type 1fgebcahid
900 for arg_idx, arg_name in enumerate(func_type.arg_names): 1fgebcahid
901 if arg_name.startswith('__') or not arg_name.startswith('_'): 1fgebcahid
902 continue 1fgebcahid
903 analyzed_variable_type = self._api.anal_type(func_type.arg_types[arg_idx]) 1fgebcahid
904 variable = Var(arg_name, analyzed_variable_type) 1fgebcahid
905 args.append(Argument(variable, analyzed_variable_type, None, ARG_OPT)) 1fgebcahid
907 if not self.should_init_forbid_extra(fields, config): 1fgebcahid
908 var = Var('kwargs') 1fgebcahid
909 args.append(Argument(var, AnyType(TypeOfAny.explicit), None, ARG_STAR2)) 1fgebcahid
911 add_method(self._api, self._cls, '__init__', args=args, return_type=NoneType()) 1fgebcahid
913 def add_model_construct_method( 1fgebcahid
914 self, fields: list[PydanticModelField], config: ModelConfigData, is_settings: bool
915 ) -> None:
916 """Adds a fully typed `model_construct` classmethod to the class.
918 Similar to the fields-aware __init__ method, but always uses the field names (not aliases),
919 and does not treat settings fields as optional.
920 """
921 set_str = self._api.named_type(f'{BUILTINS_NAME}.set', [self._api.named_type(f'{BUILTINS_NAME}.str')]) 1fgebcahid
922 optional_set_str = UnionType([set_str, NoneType()]) 1fgebcahid
923 fields_set_argument = Argument(Var('_fields_set', optional_set_str), optional_set_str, None, ARG_OPT) 1fgebcahid
924 with state.strict_optional_set(self._api.options.strict_optional): 1fgebcahid
925 args = self.get_field_arguments( 1fgebcahid
926 fields, typed=True, requires_dynamic_aliases=False, use_alias=False, is_settings=is_settings
927 )
928 if not self.should_init_forbid_extra(fields, config): 1fgebcahid
929 var = Var('kwargs') 1fgebcahid
930 args.append(Argument(var, AnyType(TypeOfAny.explicit), None, ARG_STAR2)) 1fgebcahid
932 args = [fields_set_argument] + args 1fgebcahid
934 add_method( 1fgebcahid
935 self._api,
936 self._cls,
937 'model_construct',
938 args=args,
939 return_type=fill_typevars(self._cls.info),
940 is_classmethod=True,
941 )
943 def set_frozen(self, fields: list[PydanticModelField], api: SemanticAnalyzerPluginInterface, frozen: bool) -> None: 1fgebcahid
944 """Marks all fields as properties so that attempts to set them trigger mypy errors.
946 This is the same approach used by the attrs and dataclasses plugins.
947 """
948 info = self._cls.info 1fgebcahid
949 for field in fields: 1fgebcahid
950 sym_node = info.names.get(field.name) 1fgebcahid
951 if sym_node is not None: 1fgebcahid
952 var = sym_node.node 1fgebcahid
953 if isinstance(var, Var): 953 ↛ 955line 953 didn't jump to line 955, because the condition on line 953 was always true1fgebcahid
954 var.is_property = frozen 1fgebcahid
955 elif isinstance(var, PlaceholderNode) and not self._api.final_iteration:
956 # See https://github.com/pydantic/pydantic/issues/5191 to hit this branch for test coverage
957 self._api.defer()
958 else: # pragma: no cover
959 # I don't know whether it's possible to hit this branch, but I've added it for safety
960 try:
961 var_str = str(var)
962 except TypeError:
963 # This happens for PlaceholderNode; perhaps it will happen for other types in the future..
964 var_str = repr(var)
965 detail = f'sym_node.node: {var_str} (of type {var.__class__})'
966 error_unexpected_behavior(detail, self._api, self._cls)
967 else:
968 var = field.to_var(info, api, use_alias=False) 1fgebcahid
969 var.info = info 1fgebcahid
970 var.is_property = frozen 1fgebcahid
971 var._fullname = info.fullname + '.' + var.name 1fgebcahid
972 info.names[var.name] = SymbolTableNode(MDEF, var) 1fgebcahid
974 def get_config_update(self, name: str, arg: Expression, lax_extra: bool = False) -> ModelConfigData | None: 1fgebcahid
975 """Determines the config update due to a single kwarg in the ConfigDict definition.
977 Warns if a tracked config attribute is set to a value the plugin doesn't know how to interpret (e.g., an int)
978 """
979 if name not in self.tracked_config_fields: 1fgebcahid
980 return None 1fgebcahid
981 if name == 'extra': 1fgebcahid
982 if isinstance(arg, StrExpr): 1fgebcahid
983 forbid_extra = arg.value == 'forbid' 1fgebcahid
984 elif isinstance(arg, MemberExpr): 1fgebcahid
985 forbid_extra = arg.name == 'forbid' 1fgebcahid
986 else:
987 if not lax_extra: 1fgebcahid
988 # Only emit an error for other types of `arg` (e.g., `NameExpr`, `ConditionalExpr`, etc.) when
989 # reading from a config class, etc. If a ConfigDict is used, then we don't want to emit an error
990 # because you'll get type checking from the ConfigDict itself.
991 #
992 # It would be nice if we could introspect the types better otherwise, but I don't know what the API
993 # is to evaluate an expr into its type and then check if that type is compatible with the expected
994 # type. Note that you can still get proper type checking via: `model_config = ConfigDict(...)`, just
995 # if you don't use an explicit string, the plugin won't be able to infer whether extra is forbidden.
996 error_invalid_config_value(name, self._api, arg) 1fgebcahid
997 return None 1fgebcahid
998 return ModelConfigData(forbid_extra=forbid_extra) 1fgebcahid
999 if name == 'alias_generator': 1fgebcahid
1000 has_alias_generator = True 1fgebcahid
1001 if isinstance(arg, NameExpr) and arg.fullname == 'builtins.None': 1fgebcahid
1002 has_alias_generator = False 1fgebcahid
1003 return ModelConfigData(has_alias_generator=has_alias_generator) 1fgebcahid
1004 if isinstance(arg, NameExpr) and arg.fullname in ('builtins.True', 'builtins.False'): 1fgebcahid
1005 return ModelConfigData(**{name: arg.fullname == 'builtins.True'}) 1fgebcahid
1006 error_invalid_config_value(name, self._api, arg) 1fgebcahid
1007 return None 1fgebcahid
1009 @staticmethod 1fgebcahid
1010 def get_has_default(stmt: AssignmentStmt) -> bool: 1fgebcahid
1011 """Returns a boolean indicating whether the field defined in `stmt` is a required field."""
1012 expr = stmt.rvalue 1fgebcahid
1013 if isinstance(expr, TempNode): 1fgebcahid
1014 # TempNode means annotation-only, so has no default
1015 return False 1fgebcahid
1016 if isinstance(expr, CallExpr) and isinstance(expr.callee, RefExpr) and expr.callee.fullname == FIELD_FULLNAME: 1fgebcahid
1017 # The "default value" is a call to `Field`; at this point, the field has a default if and only if:
1018 # * there is a positional argument that is not `...`
1019 # * there is a keyword argument named "default" that is not `...`
1020 # * there is a "default_factory" that is not `None`
1021 for arg, name in zip(expr.args, expr.arg_names): 1fgebcahid
1022 # If name is None, then this arg is the default because it is the only positional argument.
1023 if name is None or name == 'default': 1fgebcahid
1024 return arg.__class__ is not EllipsisExpr 1fgebcahid
1025 if name == 'default_factory': 1fgebcahid
1026 return not (isinstance(arg, NameExpr) and arg.fullname == 'builtins.None') 1fgebcahid
1027 return False 1fgebcahid
1028 # Has no default if the "default value" is Ellipsis (i.e., `field_name: Annotation = ...`)
1029 return not isinstance(expr, EllipsisExpr) 1fgebcahid
1031 @staticmethod 1fgebcahid
1032 def get_alias_info(stmt: AssignmentStmt) -> tuple[str | None, bool]: 1fgebcahid
1033 """Returns a pair (alias, has_dynamic_alias), extracted from the declaration of the field defined in `stmt`.
1035 `has_dynamic_alias` is True if and only if an alias is provided, but not as a string literal.
1036 If `has_dynamic_alias` is True, `alias` will be None.
1037 """
1038 expr = stmt.rvalue 1fgebcahid
1039 if isinstance(expr, TempNode): 1fgebcahid
1040 # TempNode means annotation-only
1041 return None, False 1fgebcahid
1043 if not ( 1fgebcahi
1044 isinstance(expr, CallExpr) and isinstance(expr.callee, RefExpr) and expr.callee.fullname == FIELD_FULLNAME
1045 ):
1046 # Assigned value is not a call to pydantic.fields.Field
1047 return None, False 1fgebcahid
1049 for i, arg_name in enumerate(expr.arg_names): 1fgebcahid
1050 if arg_name != 'alias': 1fgebcahid
1051 continue 1fgebcahid
1052 arg = expr.args[i] 1fgebcahid
1053 if isinstance(arg, StrExpr): 1fgebcahid
1054 return arg.value, False 1fgebcahid
1055 else:
1056 return None, True 1fgebcahid
1057 return None, False 1fgebcahid
1059 def get_field_arguments( 1fgebcahid
1060 self,
1061 fields: list[PydanticModelField],
1062 typed: bool,
1063 use_alias: bool,
1064 requires_dynamic_aliases: bool,
1065 is_settings: bool,
1066 force_typevars_invariant: bool = False,
1067 ) -> list[Argument]:
1068 """Helper function used during the construction of the `__init__` and `model_construct` method signatures.
1070 Returns a list of mypy Argument instances for use in the generated signatures.
1071 """
1072 info = self._cls.info 1fgebcahid
1073 arguments = [ 1fgebcahid
1074 field.to_argument(
1075 info,
1076 typed=typed,
1077 force_optional=requires_dynamic_aliases or is_settings,
1078 use_alias=use_alias,
1079 api=self._api,
1080 force_typevars_invariant=force_typevars_invariant,
1081 )
1082 for field in fields
1083 if not (use_alias and field.has_dynamic_alias)
1084 ]
1085 return arguments 1fgebcahid
1087 def should_init_forbid_extra(self, fields: list[PydanticModelField], config: ModelConfigData) -> bool: 1fgebcahid
1088 """Indicates whether the generated `__init__` should get a `**kwargs` at the end of its signature.
1090 We disallow arbitrary kwargs if the extra config setting is "forbid", or if the plugin config says to,
1091 *unless* a required dynamic alias is present (since then we can't determine a valid signature).
1092 """
1093 if not config.populate_by_name: 1fgebcahid
1094 if self.is_dynamic_alias_present(fields, bool(config.has_alias_generator)): 1fgebcahid
1095 return False 1fgebcahid
1096 if config.forbid_extra: 1fgebcahid
1097 return True 1fgebcahid
1098 return self.plugin_config.init_forbid_extra 1fgebcahid
1100 @staticmethod 1fgebcahid
1101 def is_dynamic_alias_present(fields: list[PydanticModelField], has_alias_generator: bool) -> bool: 1fgebcahid
1102 """Returns whether any fields on the model have a "dynamic alias", i.e., an alias that cannot be
1103 determined during static analysis.
1104 """
1105 for field in fields: 1fgebcahid
1106 if field.has_dynamic_alias: 1fgebcahid
1107 return True 1fgebcahid
1108 if has_alias_generator: 1fgebcahid
1109 for field in fields: 1fgebcahid
1110 if field.alias is None: 1fgebcahid
1111 return True 1fgebcahid
1112 return False 1fgebcahid
1115class ModelConfigData: 1fgebcahid
1116 """Pydantic mypy plugin model config class."""
1118 def __init__( 1fgebcahid
1119 self,
1120 forbid_extra: bool | None = None,
1121 frozen: bool | None = None,
1122 from_attributes: bool | None = None,
1123 populate_by_name: bool | None = None,
1124 has_alias_generator: bool | None = None,
1125 ):
1126 self.forbid_extra = forbid_extra 1fgebcahid
1127 self.frozen = frozen 1fgebcahid
1128 self.from_attributes = from_attributes 1fgebcahid
1129 self.populate_by_name = populate_by_name 1fgebcahid
1130 self.has_alias_generator = has_alias_generator 1fgebcahid
1132 def get_values_dict(self) -> dict[str, Any]: 1fgebcahid
1133 """Returns a dict of Pydantic model config names to their values.
1135 It includes the config if config value is not `None`.
1136 """
1137 return {k: v for k, v in self.__dict__.items() if v is not None} 1fgebcahid
1139 def update(self, config: ModelConfigData | None) -> None: 1fgebcahid
1140 """Update Pydantic model config values."""
1141 if config is None: 1fgebcahid
1142 return 1fgebcahid
1143 for k, v in config.get_values_dict().items(): 1fgebcahid
1144 setattr(self, k, v) 1fgebcahid
1146 def setdefault(self, key: str, value: Any) -> None: 1fgebcahid
1147 """Set default value for Pydantic model config if config value is `None`."""
1148 if getattr(self, key) is None: 1fgebcahid
1149 setattr(self, key, value) 1fgebcahid
1152ERROR_ORM = ErrorCode('pydantic-orm', 'Invalid from_attributes call', 'Pydantic') 1fgebcahid
1153ERROR_CONFIG = ErrorCode('pydantic-config', 'Invalid config value', 'Pydantic') 1fgebcahid
1154ERROR_ALIAS = ErrorCode('pydantic-alias', 'Dynamic alias disallowed', 'Pydantic') 1fgebcahid
1155ERROR_UNEXPECTED = ErrorCode('pydantic-unexpected', 'Unexpected behavior', 'Pydantic') 1fgebcahid
1156ERROR_UNTYPED = ErrorCode('pydantic-field', 'Untyped field disallowed', 'Pydantic') 1fgebcahid
1157ERROR_FIELD_DEFAULTS = ErrorCode('pydantic-field', 'Invalid Field defaults', 'Pydantic') 1fgebcahid
1158ERROR_EXTRA_FIELD_ROOT_MODEL = ErrorCode('pydantic-field', 'Extra field on RootModel subclass', 'Pydantic') 1fgebcahid
1161def error_from_attributes(model_name: str, api: CheckerPluginInterface, context: Context) -> None: 1fgebcahid
1162 """Emits an error when the model does not have `from_attributes=True`."""
1163 api.fail(f'"{model_name}" does not have from_attributes=True', context, code=ERROR_ORM) 1bcad
1166def error_invalid_config_value(name: str, api: SemanticAnalyzerPluginInterface, context: Context) -> None: 1fgebcahid
1167 """Emits an error when the config value is invalid."""
1168 api.fail(f'Invalid value for "Config.{name}"', context, code=ERROR_CONFIG) 1fgebcahid
1171def error_required_dynamic_aliases(api: SemanticAnalyzerPluginInterface, context: Context) -> None: 1fgebcahid
1172 """Emits required dynamic aliases error.
1174 This will be called when `warn_required_dynamic_aliases=True`.
1175 """
1176 api.fail('Required dynamic aliases disallowed', context, code=ERROR_ALIAS) 1fgebcahid
1179def error_unexpected_behavior( 1fgebcahid
1180 detail: str, api: CheckerPluginInterface | SemanticAnalyzerPluginInterface, context: Context 1d
1181) -> None: # pragma: no cover 1d
1182 """Emits unexpected behavior error."""
1183 # Can't think of a good way to test this, but I confirmed it renders as desired by adding to a non-error path
1184 link = 'https://github.com/pydantic/pydantic/issues/new/choose'
1185 full_message = f'The pydantic mypy plugin ran into unexpected behavior: {detail}\n'
1186 full_message += f'Please consider reporting this bug at {link} so we can try to fix it!'
1187 api.fail(full_message, context, code=ERROR_UNEXPECTED)
1190def error_untyped_fields(api: SemanticAnalyzerPluginInterface, context: Context) -> None: 1fgebcahid
1191 """Emits an error when there is an untyped field in the model."""
1192 api.fail('Untyped fields disallowed', context, code=ERROR_UNTYPED) 1fgebcahid
1195def error_extra_fields_on_root_model(api: CheckerPluginInterface, context: Context) -> None: 1fgebcahid
1196 """Emits an error when there is more than just a root field defined for a subclass of RootModel."""
1197 api.fail('Only `root` is allowed as a field of a `RootModel`', context, code=ERROR_EXTRA_FIELD_ROOT_MODEL) 1fgebcahid
1200def error_default_and_default_factory_specified(api: CheckerPluginInterface, context: Context) -> None: 1fgebcahid
1201 """Emits an error when `Field` has both `default` and `default_factory` together."""
1202 api.fail('Field default and default_factory cannot be specified together', context, code=ERROR_FIELD_DEFAULTS) 1fgebcahid
1205def add_method( 1fgebcahid
1206 api: SemanticAnalyzerPluginInterface | CheckerPluginInterface,
1207 cls: ClassDef,
1208 name: str,
1209 args: list[Argument],
1210 return_type: Type,
1211 self_type: Type | None = None,
1212 tvar_def: TypeVarDef | None = None,
1213 is_classmethod: bool = False,
1214) -> None:
1215 """Very closely related to `mypy.plugins.common.add_method_to_class`, with a few pydantic-specific changes."""
1216 info = cls.info 1fgebcahid
1218 # First remove any previously generated methods with the same name
1219 # to avoid clashes and problems in the semantic analyzer.
1220 if name in info.names: 1fgebcahid
1221 sym = info.names[name] 1fgebcahid
1222 if sym.plugin_generated and isinstance(sym.node, FuncDef): 1fgebcahid
1223 cls.defs.body.remove(sym.node) # pragma: no cover 1fgebcahid
1225 if isinstance(api, SemanticAnalyzerPluginInterface): 1225 ↛ 1228line 1225 didn't jump to line 1228, because the condition on line 1225 was always true1fgebcahid
1226 function_type = api.named_type('builtins.function') 1fgebcahid
1227 else:
1228 function_type = api.named_generic_type('builtins.function', [])
1230 if is_classmethod: 1fgebcahid
1231 self_type = self_type or TypeType(fill_typevars(info)) 1fgebcahid
1232 first = [Argument(Var('_cls'), self_type, None, ARG_POS, True)] 1fgebcahid
1233 else:
1234 self_type = self_type or fill_typevars(info) 1fgebcahid
1235 # `self` is positional *ONLY* here, but this can't be expressed
1236 # fully in the mypy internal API. ARG_POS is the closest we can get.
1237 # Using ARG_POS will, however, give mypy errors if a `self` field
1238 # is present on a model:
1239 #
1240 # Name "self" already defined (possibly by an import) [no-redef]
1241 #
1242 # As a workaround, we give this argument a name that will
1243 # never conflict. By its positional nature, this name will not
1244 # be used or exposed to users.
1245 first = [Argument(Var('__pydantic_self__'), self_type, None, ARG_POS)] 1fgebcahid
1246 args = first + args 1fgebcahid
1248 arg_types, arg_names, arg_kinds = [], [], [] 1fgebcahid
1249 for arg in args: 1fgebcahid
1250 assert arg.type_annotation, 'All arguments must be fully typed.' 1fgebcahid
1251 arg_types.append(arg.type_annotation) 1fgebcahid
1252 arg_names.append(arg.variable.name) 1fgebcahid
1253 arg_kinds.append(arg.kind) 1fgebcahid
1255 signature = CallableType(arg_types, arg_kinds, arg_names, return_type, function_type) 1fgebcahid
1256 if tvar_def: 1256 ↛ 1257line 1256 didn't jump to line 1257, because the condition on line 1256 was never true1fgebcahid
1257 signature.variables = [tvar_def]
1259 func = FuncDef(name, args, Block([PassStmt()])) 1fgebcahid
1260 func.info = info 1fgebcahid
1261 func.type = set_callable_name(signature, func) 1fgebcahid
1262 func.is_class = is_classmethod 1fgebcahid
1263 func._fullname = info.fullname + '.' + name 1fgebcahid
1264 func.line = info.line 1fgebcahid
1266 # NOTE: we would like the plugin generated node to dominate, but we still
1267 # need to keep any existing definitions so they get semantically analyzed.
1268 if name in info.names: 1fgebcahid
1269 # Get a nice unique name instead.
1270 r_name = get_unique_redefinition_name(name, info.names) 1fgebcahid
1271 info.names[r_name] = info.names[name] 1fgebcahid
1273 # Add decorator for is_classmethod
1274 # The dataclasses plugin claims this is unnecessary for classmethods, but not including it results in a
1275 # signature incompatible with the superclass, which causes mypy errors to occur for every subclass of BaseModel.
1276 if is_classmethod: 1fgebcahid
1277 func.is_decorated = True 1fgebcahid
1278 v = Var(name, func.type) 1fgebcahid
1279 v.info = info 1fgebcahid
1280 v._fullname = func._fullname 1fgebcahid
1281 v.is_classmethod = True 1fgebcahid
1282 dec = Decorator(func, [NameExpr('classmethod')], v) 1fgebcahid
1283 dec.line = info.line 1fgebcahid
1284 sym = SymbolTableNode(MDEF, dec) 1fgebcahid
1285 else:
1286 sym = SymbolTableNode(MDEF, func) 1fgebcahid
1287 sym.plugin_generated = True 1fgebcahid
1288 info.names[name] = sym 1fgebcahid
1290 info.defn.defs.body.append(func) 1fgebcahid
1293def parse_toml(config_file: str) -> dict[str, Any] | None: 1fgebcahid
1294 """Returns a dict of config keys to values.
1296 It reads configs from toml file and returns `None` if the file is not a toml file.
1297 """
1298 if not config_file.endswith('.toml'): 1fgebcahid
1299 return None 1fgebcahid
1301 if sys.version_info >= (3, 11): 1fgebcahid
1302 import tomllib as toml_ 1id
1303 else:
1304 try: 1fgebcah
1305 import tomli as toml_ 1fgebcah
1306 except ImportError: # pragma: no cover
1307 import warnings
1309 warnings.warn('No TOML parser installed, cannot read configuration from `pyproject.toml`.')
1310 return None
1312 with open(config_file, 'rb') as rf: 1fgebcahid
1313 return toml_.load(rf) 1fgebcahid