Coverage for pydantic/mypy.py: 91.09%

604 statements  

« prev     ^ index     » next       coverage.py v7.6.12, created at 2025-02-13 19:35 +0000

1"""This module includes classes and functions designed specifically for use with the mypy plugin.""" 

2 

3from __future__ import annotations 1hijklmnopqrstuvwxyzABCabcdefgDEFGHIJKLM

4 

5import sys 1hijklmnopqrstuvwxyzABCabcdefgDEFGHIJKLM

6from collections.abc import Iterator 1hijklmnopqrstuvwxyzABCabcdefgDEFGHIJKLM

7from configparser import ConfigParser 1hijklmnopqrstuvwxyzABCabcdefgDEFGHIJKLM

8from typing import Any, Callable, cast 1hijklmnopqrstuvwxyzABCabcdefgDEFGHIJKLM

9 

10from mypy.errorcodes import ErrorCode 1hijklmnopqrstuvwxyzABCabcdefgDEFGHIJKLM

11from mypy.expandtype import expand_type, expand_type_by_instance 1abcdefg

12from mypy.nodes import ( 1abcdefg

13 ARG_NAMED, 

14 ARG_NAMED_OPT, 

15 ARG_OPT, 

16 ARG_POS, 

17 ARG_STAR2, 

18 INVARIANT, 

19 MDEF, 

20 Argument, 

21 AssignmentStmt, 

22 Block, 

23 CallExpr, 

24 ClassDef, 

25 Context, 

26 Decorator, 

27 DictExpr, 

28 EllipsisExpr, 

29 Expression, 

30 FuncDef, 

31 IfStmt, 

32 JsonDict, 

33 MemberExpr, 

34 NameExpr, 

35 PassStmt, 

36 PlaceholderNode, 

37 RefExpr, 

38 Statement, 

39 StrExpr, 

40 SymbolTableNode, 

41 TempNode, 

42 TypeAlias, 

43 TypeInfo, 

44 Var, 

45) 

46from mypy.options import Options 1abcdefg

47from mypy.plugin import ( 1abcdefg

48 CheckerPluginInterface, 

49 ClassDefContext, 

50 MethodContext, 

51 Plugin, 

52 ReportConfigContext, 

53 SemanticAnalyzerPluginInterface, 

54) 

55from mypy.plugins.common import ( 1abcdefg

56 deserialize_and_fixup_type, 

57) 

58from mypy.semanal import set_callable_name 1abcdefg

59from mypy.server.trigger import make_wildcard_trigger 1abcdefg

60from mypy.state import state 1abcdefg

61from mypy.typeops import map_type_from_supertype 1abcdefg

62from mypy.types import ( 1abcdefg

63 AnyType, 

64 CallableType, 

65 Instance, 

66 NoneType, 

67 Type, 

68 TypeOfAny, 

69 TypeType, 

70 TypeVarType, 

71 UnionType, 

72 get_proper_type, 

73) 

74from mypy.typevars import fill_typevars 1abcdefg

75from mypy.util import get_unique_redefinition_name 1abcdefg

76from mypy.version import __version__ as mypy_version 1abcdefg

77 

78from pydantic._internal import _fields 1abcdefg

79from pydantic.version import parse_mypy_version 1abcdefg

80 

81CONFIGFILE_KEY = 'pydantic-mypy' 1abcdefg

82METADATA_KEY = 'pydantic-mypy-metadata' 1abcdefg

83BASEMODEL_FULLNAME = 'pydantic.main.BaseModel' 1abcdefg

84BASESETTINGS_FULLNAME = 'pydantic_settings.main.BaseSettings' 1abcdefg

85ROOT_MODEL_FULLNAME = 'pydantic.root_model.RootModel' 1abcdefg

86MODEL_METACLASS_FULLNAME = 'pydantic._internal._model_construction.ModelMetaclass' 1abcdefg

87FIELD_FULLNAME = 'pydantic.fields.Field' 1abcdefg

88DATACLASS_FULLNAME = 'pydantic.dataclasses.dataclass' 1abcdefg

89MODEL_VALIDATOR_FULLNAME = 'pydantic.functional_validators.model_validator' 1abcdefg

90DECORATOR_FULLNAMES = { 1abcdefg

91 'pydantic.functional_validators.field_validator', 

92 'pydantic.functional_validators.model_validator', 

93 'pydantic.functional_serializers.serializer', 

94 'pydantic.functional_serializers.model_serializer', 

95 'pydantic.deprecated.class_validators.validator', 

96 'pydantic.deprecated.class_validators.root_validator', 

97} 

98IMPLICIT_CLASSMETHOD_DECORATOR_FULLNAMES = DECORATOR_FULLNAMES - {'pydantic.functional_serializers.model_serializer'} 1abcdefg

99 

100 

101MYPY_VERSION_TUPLE = parse_mypy_version(mypy_version) 1abcdefg

102BUILTINS_NAME = 'builtins' 1abcdefg

103 

104# Increment version if plugin changes and mypy caches should be invalidated 

105__version__ = 2 1abcdefg

106 

107 

108def plugin(version: str) -> type[Plugin]: 1abcdefg

109 """`version` is the mypy version string. 

110 

111 We might want to use this to print a warning if the mypy version being used is 

112 newer, or especially older, than we expect (or need). 

113 

114 Args: 

115 version: The mypy version string. 

116 

117 Return: 

118 The Pydantic mypy plugin type. 

119 """ 

120 return PydanticPlugin 1abcdefg

121 

122 

123class PydanticPlugin(Plugin): 1abcdefg

124 """The Pydantic mypy plugin.""" 

125 

126 def __init__(self, options: Options) -> None: 1abcdefg

127 self.plugin_config = PydanticPluginConfig(options) 1abcdefg

128 self._plugin_data = self.plugin_config.to_data() 1abcdefg

129 super().__init__(options) 1abcdefg

130 

131 def get_base_class_hook(self, fullname: str) -> Callable[[ClassDefContext], None] | None: 1abcdefg

132 """Update Pydantic model class.""" 

133 sym = self.lookup_fully_qualified(fullname) 1abcdefg

134 if sym and isinstance(sym.node, TypeInfo): # pragma: no branch 1abcdefg

135 # No branching may occur if the mypy cache has not been cleared 

136 if sym.node.has_base(BASEMODEL_FULLNAME): 1abcdefg

137 return self._pydantic_model_class_maker_callback 1abcdefg

138 return None 1abcdefg

139 

140 def get_metaclass_hook(self, fullname: str) -> Callable[[ClassDefContext], None] | None: 1abcdefg

141 """Update Pydantic `ModelMetaclass` definition.""" 

142 if fullname == MODEL_METACLASS_FULLNAME: 1abcdefg

143 return self._pydantic_model_metaclass_marker_callback 1abcdefg

144 return None 1abcdefg

145 

146 def get_method_hook(self, fullname: str) -> Callable[[MethodContext], Type] | None: 1abcdefg

147 """Adjust return type of `from_orm` method call.""" 

148 if fullname.endswith('.from_orm'): 1abcdefg

149 return from_attributes_callback 1abcdefg

150 return None 1abcdefg

151 

152 def report_config_data(self, ctx: ReportConfigContext) -> dict[str, Any]: 1abcdefg

153 """Return all plugin config data. 

154 

155 Used by mypy to determine if cache needs to be discarded. 

156 """ 

157 return self._plugin_data 1abcdefg

158 

159 def _pydantic_model_class_maker_callback(self, ctx: ClassDefContext) -> None: 1abcdefg

160 transformer = PydanticModelTransformer(ctx.cls, ctx.reason, ctx.api, self.plugin_config) 1abcdefg

161 transformer.transform() 1abcdefg

162 

163 def _pydantic_model_metaclass_marker_callback(self, ctx: ClassDefContext) -> None: 1abcdefg

164 """Reset dataclass_transform_spec attribute of ModelMetaclass. 

165 

166 Let the plugin handle it. This behavior can be disabled 

167 if 'debug_dataclass_transform' is set to True', for testing purposes. 

168 """ 

169 if self.plugin_config.debug_dataclass_transform: 169 ↛ 170line 169 didn't jump to line 170 because the condition on line 169 was never true1abcdefg

170 return 

171 info_metaclass = ctx.cls.info.declared_metaclass 1abcdefg

172 assert info_metaclass, "callback not passed from 'get_metaclass_hook'" 1abcdefg

173 if getattr(info_metaclass.type, 'dataclass_transform_spec', None): 1abcdefg

174 info_metaclass.type.dataclass_transform_spec = None 1abcdefg

175 

176 

177class PydanticPluginConfig: 1abcdefg

178 """A Pydantic mypy plugin config holder. 

179 

180 Attributes: 

181 init_forbid_extra: Whether to add a `**kwargs` at the end of the generated `__init__` signature. 

182 init_typed: Whether to annotate fields in the generated `__init__`. 

183 warn_required_dynamic_aliases: Whether to raise required dynamic aliases error. 

184 debug_dataclass_transform: Whether to not reset `dataclass_transform_spec` attribute 

185 of `ModelMetaclass` for testing purposes. 

186 """ 

187 

188 __slots__ = ( 1abcdefg

189 'init_forbid_extra', 

190 'init_typed', 

191 'warn_required_dynamic_aliases', 

192 'debug_dataclass_transform', 

193 ) 

194 init_forbid_extra: bool 1abcdefg

195 init_typed: bool 1abcdefg

196 warn_required_dynamic_aliases: bool 1abcdefg

197 debug_dataclass_transform: bool # undocumented 1abcdefg

198 

199 def __init__(self, options: Options) -> None: 1abcdefg

200 if options.config_file is None: # pragma: no cover 1abcdefg

201 return 

202 

203 toml_config = parse_toml(options.config_file) 1abcdefg

204 if toml_config is not None: 1abcdefg

205 config = toml_config.get('tool', {}).get('pydantic-mypy', {}) 1abcdefg

206 for key in self.__slots__: 1abcdefg

207 setting = config.get(key, False) 1abcdefg

208 if not isinstance(setting, bool): 1abcdefg

209 raise ValueError(f'Configuration value must be a boolean for key: {key}') 1abcdefg

210 setattr(self, key, setting) 1abcdefg

211 else: 

212 plugin_config = ConfigParser() 1abcdefg

213 plugin_config.read(options.config_file) 1abcdefg

214 for key in self.__slots__: 1abcdefg

215 setting = plugin_config.getboolean(CONFIGFILE_KEY, key, fallback=False) 1abcdefg

216 setattr(self, key, setting) 1abcdefg

217 

218 def to_data(self) -> dict[str, Any]: 1abcdefg

219 """Returns a dict of config names to their values.""" 

220 return {key: getattr(self, key) for key in self.__slots__} 1abcdefg

221 

222 

223def from_attributes_callback(ctx: MethodContext) -> Type: 1abcdefg

224 """Raise an error if from_attributes is not enabled.""" 

225 model_type: Instance 

226 ctx_type = ctx.type 1abcdefg

227 if isinstance(ctx_type, TypeType): 1abcdefg

228 ctx_type = ctx_type.item 1abcdefg

229 if isinstance(ctx_type, CallableType) and isinstance(ctx_type.ret_type, Instance): 1abcdefg

230 model_type = ctx_type.ret_type # called on the class 1abcdefg

231 elif isinstance(ctx_type, Instance): 1abcdefg

232 model_type = ctx_type # called on an instance (unusual, but still valid) 1abcdefg

233 else: # pragma: no cover 

234 detail = f'ctx.type: {ctx_type} (of type {ctx_type.__class__.__name__})' 

235 error_unexpected_behavior(detail, ctx.api, ctx.context) 

236 return ctx.default_return_type 

237 pydantic_metadata = model_type.type.metadata.get(METADATA_KEY) 1abcdefg

238 if pydantic_metadata is None: 1abcdefg

239 return ctx.default_return_type 1abcdefg

240 if not model_type.type.has_base(BASEMODEL_FULLNAME): 240 ↛ 242line 240 didn't jump to line 242 because the condition on line 240 was never true1abcdefg

241 # not a Pydantic v2 model 

242 return ctx.default_return_type 

243 from_attributes = pydantic_metadata.get('config', {}).get('from_attributes') 1abcdefg

244 if from_attributes is not True: 1abcdefg

245 error_from_attributes(model_type.type.name, ctx.api, ctx.context) 1abcdefg

246 return ctx.default_return_type 1abcdefg

247 

248 

249class PydanticModelField: 1abcdefg

250 """Based on mypy.plugins.dataclasses.DataclassAttribute.""" 

251 

252 def __init__( 1abcdefg

253 self, 

254 name: str, 

255 alias: str | None, 

256 is_frozen: bool, 

257 has_dynamic_alias: bool, 

258 has_default: bool, 

259 strict: bool | None, 

260 line: int, 

261 column: int, 

262 type: Type | None, 

263 info: TypeInfo, 

264 ): 

265 self.name = name 1abcdefg

266 self.alias = alias 1abcdefg

267 self.is_frozen = is_frozen 1abcdefg

268 self.has_dynamic_alias = has_dynamic_alias 1abcdefg

269 self.has_default = has_default 1abcdefg

270 self.strict = strict 1abcdefg

271 self.line = line 1abcdefg

272 self.column = column 1abcdefg

273 self.type = type 1abcdefg

274 self.info = info 1abcdefg

275 

276 def to_argument( 1abcdefg

277 self, 

278 current_info: TypeInfo, 

279 typed: bool, 

280 model_strict: bool, 

281 force_optional: bool, 

282 use_alias: bool, 

283 api: SemanticAnalyzerPluginInterface, 

284 force_typevars_invariant: bool, 

285 is_root_model_root: bool, 

286 ) -> Argument: 

287 """Based on mypy.plugins.dataclasses.DataclassAttribute.to_argument.""" 

288 variable = self.to_var(current_info, api, use_alias, force_typevars_invariant) 1abcdefg

289 

290 strict = model_strict if self.strict is None else self.strict 1abcdefg

291 if typed or strict: 1abcdefg

292 type_annotation = self.expand_type(current_info, api) 1abcdefg

293 else: 

294 type_annotation = AnyType(TypeOfAny.explicit) 1abcdefg

295 

296 return Argument( 1abcdefg

297 variable=variable, 

298 type_annotation=type_annotation, 

299 initializer=None, 

300 kind=ARG_OPT 

301 if is_root_model_root 

302 else (ARG_NAMED_OPT if force_optional or self.has_default else ARG_NAMED), 

303 ) 

304 

305 def expand_type( 1abcdefg

306 self, current_info: TypeInfo, api: SemanticAnalyzerPluginInterface, force_typevars_invariant: bool = False 

307 ) -> Type | None: 

308 """Based on mypy.plugins.dataclasses.DataclassAttribute.expand_type.""" 

309 if force_typevars_invariant: 1abcdefg

310 # In some cases, mypy will emit an error "Cannot use a covariant type variable as a parameter" 

311 # To prevent that, we add an option to replace typevars with invariant ones while building certain 

312 # method signatures (in particular, `__init__`). There may be a better way to do this, if this causes 

313 # us problems in the future, we should look into why the dataclasses plugin doesn't have this issue. 

314 if isinstance(self.type, TypeVarType): 1abcdefg

315 modified_type = self.type.copy_modified() 1abcdefg

316 modified_type.variance = INVARIANT 1abcdefg

317 self.type = modified_type 1abcdefg

318 

319 if self.type is not None and self.info.self_type is not None: 319 ↛ 341line 319 didn't jump to line 341 because the condition on line 319 was always true1abcdefg

320 # In general, it is not safe to call `expand_type()` during semantic analysis, 

321 # however this plugin is called very late, so all types should be fully ready. 

322 # Also, it is tricky to avoid eager expansion of Self types here (e.g. because 

323 # we serialize attributes). 

324 with state.strict_optional_set(api.options.strict_optional): 1abcdefg

325 filled_with_typevars = fill_typevars(current_info) 1abcdefg

326 # Cannot be TupleType as current_info represents a Pydantic model: 

327 assert isinstance(filled_with_typevars, Instance) 1abcdefg

328 if force_typevars_invariant: 1abcdefg

329 for arg in filled_with_typevars.args: 1abcdefg

330 if isinstance(arg, TypeVarType): 330 ↛ 329line 330 didn't jump to line 329 because the condition on line 330 was always true1abcdefg

331 arg.variance = INVARIANT 1abcdefg

332 

333 expanded_type = expand_type(self.type, {self.info.self_type.id: filled_with_typevars}) 1abcdefg

334 if isinstance(expanded_type, Instance) and is_root_model(expanded_type.type): 1abcdefg

335 # When a root model is used as a field, Pydantic allows both an instance of the root model 

336 # as well as instances of the `root` field type: 

337 root_type = cast(Type, expanded_type.type['root'].type) 1abcdefg

338 expanded_root_type = expand_type_by_instance(root_type, expanded_type) 1abcdefg

339 expanded_type = UnionType([expanded_type, expanded_root_type]) 1abcdefg

340 return expanded_type 1abcdefg

341 return self.type 

342 

343 def to_var( 1abcdefg

344 self, 

345 current_info: TypeInfo, 

346 api: SemanticAnalyzerPluginInterface, 

347 use_alias: bool, 

348 force_typevars_invariant: bool = False, 

349 ) -> Var: 

350 """Based on mypy.plugins.dataclasses.DataclassAttribute.to_var.""" 

351 if use_alias and self.alias is not None: 1abcdefg

352 name = self.alias 1abcdefg

353 else: 

354 name = self.name 1abcdefg

355 

356 return Var(name, self.expand_type(current_info, api, force_typevars_invariant)) 1abcdefg

357 

358 def serialize(self) -> JsonDict: 1abcdefg

359 """Based on mypy.plugins.dataclasses.DataclassAttribute.serialize.""" 

360 assert self.type 1abcdefg

361 return { 1abcdefg

362 'name': self.name, 

363 'alias': self.alias, 

364 'is_frozen': self.is_frozen, 

365 'has_dynamic_alias': self.has_dynamic_alias, 

366 'has_default': self.has_default, 

367 'strict': self.strict, 

368 'line': self.line, 

369 'column': self.column, 

370 'type': self.type.serialize(), 

371 } 

372 

373 @classmethod 1abcdefg

374 def deserialize(cls, info: TypeInfo, data: JsonDict, api: SemanticAnalyzerPluginInterface) -> PydanticModelField: 1abcdefg

375 """Based on mypy.plugins.dataclasses.DataclassAttribute.deserialize.""" 

376 data = data.copy() 1abcdefg

377 typ = deserialize_and_fixup_type(data.pop('type'), api) 1abcdefg

378 return cls(type=typ, info=info, **data) 1abcdefg

379 

380 def expand_typevar_from_subtype(self, sub_type: TypeInfo, api: SemanticAnalyzerPluginInterface) -> None: 1abcdefg

381 """Expands type vars in the context of a subtype when an attribute is inherited 

382 from a generic super type. 

383 """ 

384 if self.type is not None: 384 ↛ exitline 384 didn't return from function 'expand_typevar_from_subtype' because the condition on line 384 was always true1abcdefg

385 with state.strict_optional_set(api.options.strict_optional): 1abcdefg

386 self.type = map_type_from_supertype(self.type, sub_type, self.info) 1abcdefg

387 

388 

389class PydanticModelClassVar: 1abcdefg

390 """Based on mypy.plugins.dataclasses.DataclassAttribute. 

391 

392 ClassVars are ignored by subclasses. 

393 

394 Attributes: 

395 name: the ClassVar name 

396 """ 

397 

398 def __init__(self, name): 1abcdefg

399 self.name = name 1abcdefg

400 

401 @classmethod 1abcdefg

402 def deserialize(cls, data: JsonDict) -> PydanticModelClassVar: 1abcdefg

403 """Based on mypy.plugins.dataclasses.DataclassAttribute.deserialize.""" 

404 data = data.copy() 

405 return cls(**data) 

406 

407 def serialize(self) -> JsonDict: 1abcdefg

408 """Based on mypy.plugins.dataclasses.DataclassAttribute.serialize.""" 

409 return { 1abcdefg

410 'name': self.name, 

411 } 

412 

413 

414class PydanticModelTransformer: 1abcdefg

415 """Transform the BaseModel subclass according to the plugin settings. 

416 

417 Attributes: 

418 tracked_config_fields: A set of field configs that the plugin has to track their value. 

419 """ 

420 

421 tracked_config_fields: set[str] = { 1abcdefg

422 'extra', 

423 'frozen', 

424 'from_attributes', 

425 'populate_by_name', 

426 'alias_generator', 

427 'strict', 

428 } 

429 

430 def __init__( 1abcdefg

431 self, 

432 cls: ClassDef, 

433 reason: Expression | Statement, 

434 api: SemanticAnalyzerPluginInterface, 

435 plugin_config: PydanticPluginConfig, 

436 ) -> None: 

437 self._cls = cls 1abcdefg

438 self._reason = reason 1abcdefg

439 self._api = api 1abcdefg

440 

441 self.plugin_config = plugin_config 1abcdefg

442 

443 def transform(self) -> bool: 1abcdefg

444 """Configures the BaseModel subclass according to the plugin settings. 

445 

446 In particular: 

447 

448 * determines the model config and fields, 

449 * adds a fields-aware signature for the initializer and construct methods 

450 * freezes the class if frozen = True 

451 * stores the fields, config, and if the class is settings in the mypy metadata for access by subclasses 

452 """ 

453 info = self._cls.info 1abcdefg

454 is_a_root_model = is_root_model(info) 1abcdefg

455 config = self.collect_config() 1abcdefg

456 fields, class_vars = self.collect_fields_and_class_vars(config, is_a_root_model) 1abcdefg

457 if fields is None or class_vars is None: 457 ↛ 459line 457 didn't jump to line 459 because the condition on line 457 was never true1abcdefg

458 # Some definitions are not ready. We need another pass. 

459 return False 

460 for field in fields: 1abcdefg

461 if field.type is None: 1abcdefg

462 return False 1abcdefg

463 

464 is_settings = info.has_base(BASESETTINGS_FULLNAME) 1abcdefg

465 self.add_initializer(fields, config, is_settings, is_a_root_model) 1abcdefg

466 self.add_model_construct_method(fields, config, is_settings, is_a_root_model) 1abcdefg

467 self.set_frozen(fields, self._api, frozen=config.frozen is True) 1abcdefg

468 

469 self.adjust_decorator_signatures() 1abcdefg

470 

471 info.metadata[METADATA_KEY] = { 1abcdefg

472 'fields': {field.name: field.serialize() for field in fields}, 

473 'class_vars': {class_var.name: class_var.serialize() for class_var in class_vars}, 

474 'config': config.get_values_dict(), 

475 } 

476 

477 return True 1abcdefg

478 

479 def adjust_decorator_signatures(self) -> None: 1abcdefg

480 """When we decorate a function `f` with `pydantic.validator(...)`, `pydantic.field_validator` 

481 or `pydantic.serializer(...)`, mypy sees `f` as a regular method taking a `self` instance, 

482 even though pydantic internally wraps `f` with `classmethod` if necessary. 

483 

484 Teach mypy this by marking any function whose outermost decorator is a `validator()`, 

485 `field_validator()` or `serializer()` call as a `classmethod`. 

486 """ 

487 for sym in self._cls.info.names.values(): 1abcdefg

488 if isinstance(sym.node, Decorator): 1abcdefg

489 first_dec = sym.node.original_decorators[0] 1abcdefg

490 if ( 1abc

491 isinstance(first_dec, CallExpr) 

492 and isinstance(first_dec.callee, NameExpr) 

493 and first_dec.callee.fullname in IMPLICIT_CLASSMETHOD_DECORATOR_FULLNAMES 

494 # @model_validator(mode="after") is an exception, it expects a regular method 

495 and not ( 

496 first_dec.callee.fullname == MODEL_VALIDATOR_FULLNAME 

497 and any( 

498 first_dec.arg_names[i] == 'mode' and isinstance(arg, StrExpr) and arg.value == 'after' 

499 for i, arg in enumerate(first_dec.args) 

500 ) 

501 ) 

502 ): 

503 # TODO: Only do this if the first argument of the decorated function is `cls` 

504 sym.node.func.is_class = True 1abcdefg

505 

506 def collect_config(self) -> ModelConfigData: # noqa: C901 (ignore complexity) 1abcdefg

507 """Collects the values of the config attributes that are used by the plugin, accounting for parent classes.""" 

508 cls = self._cls 1abcdefg

509 config = ModelConfigData() 1abcdefg

510 

511 has_config_kwargs = False 1abcdefg

512 has_config_from_namespace = False 1abcdefg

513 

514 # Handle `class MyModel(BaseModel, <name>=<expr>, ...):` 

515 for name, expr in cls.keywords.items(): 1abcdefg

516 config_data = self.get_config_update(name, expr) 1abcdefg

517 if config_data: 1abcdefg

518 has_config_kwargs = True 1abcdefg

519 config.update(config_data) 1abcdefg

520 

521 # Handle `model_config` 

522 stmt: Statement | None = None 1abcdefg

523 for stmt in cls.defs.body: 1abcdefg

524 if not isinstance(stmt, (AssignmentStmt, ClassDef)): 1abcdefg

525 continue 1abcdefg

526 

527 if isinstance(stmt, AssignmentStmt): 1abcdefg

528 lhs = stmt.lvalues[0] 1abcdefg

529 if not isinstance(lhs, NameExpr) or lhs.name != 'model_config': 1abcdefg

530 continue 1abcdefg

531 

532 if isinstance(stmt.rvalue, CallExpr): # calls to `dict` or `ConfigDict` 1abcdefg

533 for arg_name, arg in zip(stmt.rvalue.arg_names, stmt.rvalue.args): 1abcdefg

534 if arg_name is None: 534 ↛ 535line 534 didn't jump to line 535 because the condition on line 534 was never true1abcdefg

535 continue 

536 config.update(self.get_config_update(arg_name, arg, lax_extra=True)) 1abcdefg

537 elif isinstance(stmt.rvalue, DictExpr): # dict literals 537 ↛ 554line 537 didn't jump to line 554 because the condition on line 537 was always true1abcdefg

538 for key_expr, value_expr in stmt.rvalue.items: 1abcdefg

539 if not isinstance(key_expr, StrExpr): 539 ↛ 540line 539 didn't jump to line 540 because the condition on line 539 was never true1abcdefg

540 continue 

541 config.update(self.get_config_update(key_expr.value, value_expr)) 1abcdefg

542 

543 elif isinstance(stmt, ClassDef): 543 ↛ 554line 543 didn't jump to line 554 because the condition on line 543 was always true1abcdefg

544 if stmt.name != 'Config': # 'deprecated' Config-class 1abcdefg

545 continue 1abcdefg

546 for substmt in stmt.defs.body: 1abcdefg

547 if not isinstance(substmt, AssignmentStmt): 1abcdefg

548 continue 1abcdefg

549 lhs = substmt.lvalues[0] 1abcdefg

550 if not isinstance(lhs, NameExpr): 550 ↛ 551line 550 didn't jump to line 551 because the condition on line 550 was never true1abcdefg

551 continue 

552 config.update(self.get_config_update(lhs.name, substmt.rvalue)) 1abcdefg

553 

554 if has_config_kwargs: 554 ↛ 555line 554 didn't jump to line 555 because the condition on line 554 was never true1abcdefg

555 self._api.fail( 

556 'Specifying config in two places is ambiguous, use either Config attribute or class kwargs', 

557 cls, 

558 ) 

559 break 

560 

561 has_config_from_namespace = True 1abcdefg

562 

563 if has_config_kwargs or has_config_from_namespace: 1abcdefg

564 if ( 1abc

565 stmt 

566 and config.has_alias_generator 

567 and not config.populate_by_name 

568 and self.plugin_config.warn_required_dynamic_aliases 

569 ): 

570 error_required_dynamic_aliases(self._api, stmt) 1abcdefg

571 

572 for info in cls.info.mro[1:]: # 0 is the current class 1abcdefg

573 if METADATA_KEY not in info.metadata: 1abcdefg

574 continue 1abcdefg

575 

576 # Each class depends on the set of fields in its ancestors 

577 self._api.add_plugin_dependency(make_wildcard_trigger(info.fullname)) 1abcdefg

578 for name, value in info.metadata[METADATA_KEY]['config'].items(): 1abcdefg

579 config.setdefault(name, value) 1abcdefg

580 return config 1abcdefg

581 

582 def collect_fields_and_class_vars( 1abcdefg

583 self, model_config: ModelConfigData, is_root_model: bool 

584 ) -> tuple[list[PydanticModelField] | None, list[PydanticModelClassVar] | None]: 

585 """Collects the fields for the model, accounting for parent classes.""" 

586 cls = self._cls 1abcdefg

587 

588 # First, collect fields and ClassVars belonging to any class in the MRO, ignoring duplicates. 

589 # 

590 # We iterate through the MRO in reverse because attrs defined in the parent must appear 

591 # earlier in the attributes list than attrs defined in the child. See: 

592 # https://docs.python.org/3/library/dataclasses.html#inheritance 

593 # 

594 # However, we also want fields defined in the subtype to override ones defined 

595 # in the parent. We can implement this via a dict without disrupting the attr order 

596 # because dicts preserve insertion order in Python 3.7+. 

597 found_fields: dict[str, PydanticModelField] = {} 1abcdefg

598 found_class_vars: dict[str, PydanticModelClassVar] = {} 1abcdefg

599 for info in reversed(cls.info.mro[1:-1]): # 0 is the current class, -2 is BaseModel, -1 is object 1abcdefg

600 # if BASEMODEL_METADATA_TAG_KEY in info.metadata and BASEMODEL_METADATA_KEY not in info.metadata: 

601 # # We haven't processed the base class yet. Need another pass. 

602 # return None, None 

603 if METADATA_KEY not in info.metadata: 1abcdefg

604 continue 1abcdefg

605 

606 # Each class depends on the set of attributes in its dataclass ancestors. 

607 self._api.add_plugin_dependency(make_wildcard_trigger(info.fullname)) 1abcdefg

608 

609 for name, data in info.metadata[METADATA_KEY]['fields'].items(): 1abcdefg

610 field = PydanticModelField.deserialize(info, data, self._api) 1abcdefg

611 # (The following comment comes directly from the dataclasses plugin) 

612 # TODO: We shouldn't be performing type operations during the main 

613 # semantic analysis pass, since some TypeInfo attributes might 

614 # still be in flux. This should be performed in a later phase. 

615 field.expand_typevar_from_subtype(cls.info, self._api) 1abcdefg

616 found_fields[name] = field 1abcdefg

617 

618 sym_node = cls.info.names.get(name) 1abcdefg

619 if sym_node and sym_node.node and not isinstance(sym_node.node, Var): 619 ↛ 620line 619 didn't jump to line 620 because the condition on line 619 was never true1abcdefg

620 self._api.fail( 

621 'BaseModel field may only be overridden by another field', 

622 sym_node.node, 

623 ) 

624 # Collect ClassVars 

625 for name, data in info.metadata[METADATA_KEY]['class_vars'].items(): 625 ↛ 626line 625 didn't jump to line 626 because the loop on line 625 never started1abcdefg

626 found_class_vars[name] = PydanticModelClassVar.deserialize(data) 

627 

628 # Second, collect fields and ClassVars belonging to the current class. 

629 current_field_names: set[str] = set() 1abcdefg

630 current_class_vars_names: set[str] = set() 1abcdefg

631 for stmt in self._get_assignment_statements_from_block(cls.defs): 1abcdefg

632 maybe_field = self.collect_field_or_class_var_from_stmt(stmt, model_config, found_class_vars) 1abcdefg

633 if maybe_field is None: 1abcdefg

634 continue 1abcdefg

635 

636 lhs = stmt.lvalues[0] 1abcdefg

637 assert isinstance(lhs, NameExpr) # collect_field_or_class_var_from_stmt guarantees this 1abcdefg

638 if isinstance(maybe_field, PydanticModelField): 1abcdefg

639 if is_root_model and lhs.name != 'root': 1abcdefg

640 error_extra_fields_on_root_model(self._api, stmt) 1abcdefg

641 else: 

642 current_field_names.add(lhs.name) 1abcdefg

643 found_fields[lhs.name] = maybe_field 1abcdefg

644 elif isinstance(maybe_field, PydanticModelClassVar): 644 ↛ 631line 644 didn't jump to line 631 because the condition on line 644 was always true1abcdefg

645 current_class_vars_names.add(lhs.name) 1abcdefg

646 found_class_vars[lhs.name] = maybe_field 1abcdefg

647 

648 return list(found_fields.values()), list(found_class_vars.values()) 1abcdefg

649 

650 def _get_assignment_statements_from_if_statement(self, stmt: IfStmt) -> Iterator[AssignmentStmt]: 1abcdefg

651 for body in stmt.body: 1abcdefg

652 if not body.is_unreachable: 652 ↛ 651line 652 didn't jump to line 651 because the condition on line 652 was always true1abcdefg

653 yield from self._get_assignment_statements_from_block(body) 1abcdefg

654 if stmt.else_body is not None and not stmt.else_body.is_unreachable: 654 ↛ 655line 654 didn't jump to line 655 because the condition on line 654 was never true1abcdefg

655 yield from self._get_assignment_statements_from_block(stmt.else_body) 

656 

657 def _get_assignment_statements_from_block(self, block: Block) -> Iterator[AssignmentStmt]: 1abcdefg

658 for stmt in block.body: 1abcdefg

659 if isinstance(stmt, AssignmentStmt): 1abcdefg

660 yield stmt 1abcdefg

661 elif isinstance(stmt, IfStmt): 1abcdefg

662 yield from self._get_assignment_statements_from_if_statement(stmt) 1abcdefg

663 

664 def collect_field_or_class_var_from_stmt( # noqa C901 1abcdefg

665 self, stmt: AssignmentStmt, model_config: ModelConfigData, class_vars: dict[str, PydanticModelClassVar] 

666 ) -> PydanticModelField | PydanticModelClassVar | None: 

667 """Get pydantic model field from statement. 

668 

669 Args: 

670 stmt: The statement. 

671 model_config: Configuration settings for the model. 

672 class_vars: ClassVars already known to be defined on the model. 

673 

674 Returns: 

675 A pydantic model field if it could find the field in statement. Otherwise, `None`. 

676 """ 

677 cls = self._cls 1abcdefg

678 

679 lhs = stmt.lvalues[0] 1abcdefg

680 if not isinstance(lhs, NameExpr) or not _fields.is_valid_field_name(lhs.name) or lhs.name == 'model_config': 1abcdefg

681 return None 1abcdefg

682 

683 if not stmt.new_syntax: 1abcdefg

684 if ( 1abc

685 isinstance(stmt.rvalue, CallExpr) 

686 and isinstance(stmt.rvalue.callee, CallExpr) 

687 and isinstance(stmt.rvalue.callee.callee, NameExpr) 

688 and stmt.rvalue.callee.callee.fullname in DECORATOR_FULLNAMES 

689 ): 

690 # This is a (possibly-reused) validator or serializer, not a field 

691 # In particular, it looks something like: my_validator = validator('my_field')(f) 

692 # Eventually, we may want to attempt to respect model_config['ignored_types'] 

693 return None 1abcdefg

694 

695 if lhs.name in class_vars: 695 ↛ 697line 695 didn't jump to line 697 because the condition on line 695 was never true1abcdefg

696 # Class vars are not fields and are not required to be annotated 

697 return None 

698 

699 # The assignment does not have an annotation, and it's not anything else we recognize 

700 error_untyped_fields(self._api, stmt) 1abcdefg

701 return None 1abcdefg

702 

703 lhs = stmt.lvalues[0] 1abcdefg

704 if not isinstance(lhs, NameExpr): 704 ↛ 705line 704 didn't jump to line 705 because the condition on line 704 was never true1abcdefg

705 return None 

706 

707 if not _fields.is_valid_field_name(lhs.name) or lhs.name == 'model_config': 707 ↛ 708line 707 didn't jump to line 708 because the condition on line 707 was never true1abcdefg

708 return None 

709 

710 sym = cls.info.names.get(lhs.name) 1abcdefg

711 if sym is None: # pragma: no cover 1abcdefg

712 # This is likely due to a star import (see the dataclasses plugin for a more detailed explanation) 

713 # This is the same logic used in the dataclasses plugin 

714 return None 

715 

716 node = sym.node 1abcdefg

717 if isinstance(node, PlaceholderNode): # pragma: no cover 1abcdefg

718 # See the PlaceholderNode docstring for more detail about how this can occur 

719 # Basically, it is an edge case when dealing with complex import logic 

720 

721 # The dataclasses plugin now asserts this cannot happen, but I'd rather not error if it does.. 

722 return None 

723 

724 if isinstance(node, TypeAlias): 1abcdefg

725 self._api.fail( 

726 'Type aliases inside BaseModel definitions are not supported at runtime', 

727 node, 

728 ) 

729 # Skip processing this node. This doesn't match the runtime behaviour, 

730 # but the only alternative would be to modify the SymbolTable, 

731 # and it's a little hairy to do that in a plugin. 

732 return None 

733 

734 if not isinstance(node, Var): # pragma: no cover 1abcdefg

735 # Don't know if this edge case still happens with the `is_valid_field` check above 

736 # but better safe than sorry 

737 

738 # The dataclasses plugin now asserts this cannot happen, but I'd rather not error if it does.. 

739 return None 

740 

741 # x: ClassVar[int] is not a field 

742 if node.is_classvar: 1abcdefg

743 return PydanticModelClassVar(lhs.name) 1abcdefg

744 

745 # x: InitVar[int] is not supported in BaseModel 

746 node_type = get_proper_type(node.type) 1abcdefg

747 if isinstance(node_type, Instance) and node_type.type.fullname == 'dataclasses.InitVar': 747 ↛ 748line 747 didn't jump to line 748 because the condition on line 747 was never true1abcdefg

748 self._api.fail( 

749 'InitVar is not supported in BaseModel', 

750 node, 

751 ) 

752 

753 has_default = self.get_has_default(stmt) 1abcdefg

754 strict = self.get_strict(stmt) 1abcdefg

755 

756 if sym.type is None and node.is_final and node.is_inferred: 756 ↛ 764line 756 didn't jump to line 764 because the condition on line 756 was never true1abcdefg

757 # This follows the logic from the dataclasses plugin. The following comment is taken verbatim: 

758 # 

759 # This is a special case, assignment like x: Final = 42 is classified 

760 # annotated above, but mypy strips the `Final` turning it into x = 42. 

761 # We do not support inferred types in dataclasses, so we can try inferring 

762 # type for simple literals, and otherwise require an explicit type 

763 # argument for Final[...]. 

764 typ = self._api.analyze_simple_literal_type(stmt.rvalue, is_final=True) 

765 if typ: 

766 node.type = typ 

767 else: 

768 self._api.fail( 

769 'Need type argument for Final[...] with non-literal default in BaseModel', 

770 stmt, 

771 ) 

772 node.type = AnyType(TypeOfAny.from_error) 

773 

774 if node.is_final and has_default: 1abcdefg

775 # TODO this path should be removed (see https://github.com/pydantic/pydantic/issues/11119) 

776 return PydanticModelClassVar(lhs.name) 1abcdefg

777 

778 alias, has_dynamic_alias = self.get_alias_info(stmt) 1abcdefg

779 if has_dynamic_alias and not model_config.populate_by_name and self.plugin_config.warn_required_dynamic_aliases: 1abcdefg

780 error_required_dynamic_aliases(self._api, stmt) 1abcdefg

781 is_frozen = self.is_field_frozen(stmt) 1abcdefg

782 

783 init_type = self._infer_dataclass_attr_init_type(sym, lhs.name, stmt) 1abcdefg

784 return PydanticModelField( 1abcdefg

785 name=lhs.name, 

786 has_dynamic_alias=has_dynamic_alias, 

787 has_default=has_default, 

788 strict=strict, 

789 alias=alias, 

790 is_frozen=is_frozen, 

791 line=stmt.line, 

792 column=stmt.column, 

793 type=init_type, 

794 info=cls.info, 

795 ) 

796 

797 def _infer_dataclass_attr_init_type(self, sym: SymbolTableNode, name: str, context: Context) -> Type | None: 1abcdefg

798 """Infer __init__ argument type for an attribute. 

799 

800 In particular, possibly use the signature of __set__. 

801 """ 

802 default = sym.type 1abcdefg

803 if sym.implicit: 803 ↛ 804line 803 didn't jump to line 804 because the condition on line 803 was never true1abcdefg

804 return default 

805 t = get_proper_type(sym.type) 1abcdefg

806 

807 # Perform a simple-minded inference from the signature of __set__, if present. 

808 # We can't use mypy.checkmember here, since this plugin runs before type checking. 

809 # We only support some basic scanerios here, which is hopefully sufficient for 

810 # the vast majority of use cases. 

811 if not isinstance(t, Instance): 1abcdefg

812 return default 1abcdefg

813 setter = t.type.get('__set__') 1abcdefg

814 if setter: 814 ↛ 815line 814 didn't jump to line 815 because the condition on line 814 was never true1abcdefg

815 if isinstance(setter.node, FuncDef): 

816 super_info = t.type.get_containing_type_info('__set__') 

817 assert super_info 

818 if setter.type: 

819 setter_type = get_proper_type(map_type_from_supertype(setter.type, t.type, super_info)) 

820 else: 

821 return AnyType(TypeOfAny.unannotated) 

822 if isinstance(setter_type, CallableType) and setter_type.arg_kinds == [ 

823 ARG_POS, 

824 ARG_POS, 

825 ARG_POS, 

826 ]: 

827 return expand_type_by_instance(setter_type.arg_types[2], t) 

828 else: 

829 self._api.fail(f'Unsupported signature for "__set__" in "{t.type.name}"', context) 

830 else: 

831 self._api.fail(f'Unsupported "__set__" in "{t.type.name}"', context) 

832 

833 return default 1abcdefg

834 

835 def add_initializer( 1abcdefg

836 self, fields: list[PydanticModelField], config: ModelConfigData, is_settings: bool, is_root_model: bool 

837 ) -> None: 

838 """Adds a fields-aware `__init__` method to the class. 

839 

840 The added `__init__` will be annotated with types vs. all `Any` depending on the plugin settings. 

841 """ 

842 if '__init__' in self._cls.info.names and not self._cls.info.names['__init__'].plugin_generated: 1abcdefg

843 return # Don't generate an __init__ if one already exists 1abcdefg

844 

845 typed = self.plugin_config.init_typed 1abcdefg

846 model_strict = bool(config.strict) 1abcdefg

847 use_alias = config.populate_by_name is not True 1abcdefg

848 requires_dynamic_aliases = bool(config.has_alias_generator and not config.populate_by_name) 1abcdefg

849 args = self.get_field_arguments( 1abcdefg

850 fields, 

851 typed=typed, 

852 model_strict=model_strict, 

853 requires_dynamic_aliases=requires_dynamic_aliases, 

854 use_alias=use_alias, 

855 is_settings=is_settings, 

856 is_root_model=is_root_model, 

857 force_typevars_invariant=True, 

858 ) 

859 

860 if is_settings: 1abcdefg

861 base_settings_node = self._api.lookup_fully_qualified(BASESETTINGS_FULLNAME).node 1abcdefg

862 assert isinstance(base_settings_node, TypeInfo) 1abcdefg

863 if '__init__' in base_settings_node.names: 863 ↛ 876line 863 didn't jump to line 876 because the condition on line 863 was always true1abcdefg

864 base_settings_init_node = base_settings_node.names['__init__'].node 1abcdefg

865 assert isinstance(base_settings_init_node, FuncDef) 1abcdefg

866 if base_settings_init_node is not None and base_settings_init_node.type is not None: 866 ↛ 876line 866 didn't jump to line 876 because the condition on line 866 was always true1abcdefg

867 func_type = base_settings_init_node.type 1abcdefg

868 assert isinstance(func_type, CallableType) 1abcdefg

869 for arg_idx, arg_name in enumerate(func_type.arg_names): 1abcdefg

870 if arg_name is None or arg_name.startswith('__') or not arg_name.startswith('_'): 1abcdefg

871 continue 1abcdefg

872 analyzed_variable_type = self._api.anal_type(func_type.arg_types[arg_idx]) 1abcdefg

873 variable = Var(arg_name, analyzed_variable_type) 1abcdefg

874 args.append(Argument(variable, analyzed_variable_type, None, ARG_OPT)) 1abcdefg

875 

876 if not self.should_init_forbid_extra(fields, config): 1abcdefg

877 var = Var('kwargs') 1abcdefg

878 args.append(Argument(var, AnyType(TypeOfAny.explicit), None, ARG_STAR2)) 1abcdefg

879 

880 add_method(self._api, self._cls, '__init__', args=args, return_type=NoneType()) 1abcdefg

881 

882 def add_model_construct_method( 1abcdefg

883 self, 

884 fields: list[PydanticModelField], 

885 config: ModelConfigData, 

886 is_settings: bool, 

887 is_root_model: bool, 

888 ) -> None: 

889 """Adds a fully typed `model_construct` classmethod to the class. 

890 

891 Similar to the fields-aware __init__ method, but always uses the field names (not aliases), 

892 and does not treat settings fields as optional. 

893 """ 

894 set_str = self._api.named_type(f'{BUILTINS_NAME}.set', [self._api.named_type(f'{BUILTINS_NAME}.str')]) 1abcdefg

895 optional_set_str = UnionType([set_str, NoneType()]) 1abcdefg

896 fields_set_argument = Argument(Var('_fields_set', optional_set_str), optional_set_str, None, ARG_OPT) 1abcdefg

897 with state.strict_optional_set(self._api.options.strict_optional): 1abcdefg

898 args = self.get_field_arguments( 1abcdefg

899 fields, 

900 typed=True, 

901 model_strict=bool(config.strict), 

902 requires_dynamic_aliases=False, 

903 use_alias=False, 

904 is_settings=is_settings, 

905 is_root_model=is_root_model, 

906 ) 

907 if not self.should_init_forbid_extra(fields, config): 1abcdefg

908 var = Var('kwargs') 1abcdefg

909 args.append(Argument(var, AnyType(TypeOfAny.explicit), None, ARG_STAR2)) 1abcdefg

910 

911 args = args + [fields_set_argument] if is_root_model else [fields_set_argument] + args 1abcdefg

912 

913 add_method( 1abcdefg

914 self._api, 

915 self._cls, 

916 'model_construct', 

917 args=args, 

918 return_type=fill_typevars(self._cls.info), 

919 is_classmethod=True, 

920 ) 

921 

922 def set_frozen(self, fields: list[PydanticModelField], api: SemanticAnalyzerPluginInterface, frozen: bool) -> None: 1abcdefg

923 """Marks all fields as properties so that attempts to set them trigger mypy errors. 

924 

925 This is the same approach used by the attrs and dataclasses plugins. 

926 """ 

927 info = self._cls.info 1abcdefg

928 for field in fields: 1abcdefg

929 sym_node = info.names.get(field.name) 1abcdefg

930 if sym_node is not None: 1abcdefg

931 var = sym_node.node 1abcdefg

932 if isinstance(var, Var): 932 ↛ 934line 932 didn't jump to line 934 because the condition on line 932 was always true1abcdefg

933 var.is_property = frozen or field.is_frozen 1abcdefg

934 elif isinstance(var, PlaceholderNode) and not self._api.final_iteration: 

935 # See https://github.com/pydantic/pydantic/issues/5191 to hit this branch for test coverage 

936 self._api.defer() 

937 else: # pragma: no cover 

938 # I don't know whether it's possible to hit this branch, but I've added it for safety 

939 try: 

940 var_str = str(var) 

941 except TypeError: 

942 # This happens for PlaceholderNode; perhaps it will happen for other types in the future.. 

943 var_str = repr(var) 

944 detail = f'sym_node.node: {var_str} (of type {var.__class__})' 

945 error_unexpected_behavior(detail, self._api, self._cls) 

946 else: 

947 var = field.to_var(info, api, use_alias=False) 1abcdefg

948 var.info = info 1abcdefg

949 var.is_property = frozen 1abcdefg

950 var._fullname = info.fullname + '.' + var.name 1abcdefg

951 info.names[var.name] = SymbolTableNode(MDEF, var) 1abcdefg

952 

953 def get_config_update(self, name: str, arg: Expression, lax_extra: bool = False) -> ModelConfigData | None: 1abcdefg

954 """Determines the config update due to a single kwarg in the ConfigDict definition. 

955 

956 Warns if a tracked config attribute is set to a value the plugin doesn't know how to interpret (e.g., an int) 

957 """ 

958 if name not in self.tracked_config_fields: 1abcdefg

959 return None 1abcdefg

960 if name == 'extra': 1abcdefg

961 if isinstance(arg, StrExpr): 1abcdefg

962 forbid_extra = arg.value == 'forbid' 1abcdefg

963 elif isinstance(arg, MemberExpr): 1abcdefg

964 forbid_extra = arg.name == 'forbid' 1abcdefg

965 else: 

966 if not lax_extra: 1abcdefg

967 # Only emit an error for other types of `arg` (e.g., `NameExpr`, `ConditionalExpr`, etc.) when 

968 # reading from a config class, etc. If a ConfigDict is used, then we don't want to emit an error 

969 # because you'll get type checking from the ConfigDict itself. 

970 # 

971 # It would be nice if we could introspect the types better otherwise, but I don't know what the API 

972 # is to evaluate an expr into its type and then check if that type is compatible with the expected 

973 # type. Note that you can still get proper type checking via: `model_config = ConfigDict(...)`, just 

974 # if you don't use an explicit string, the plugin won't be able to infer whether extra is forbidden. 

975 error_invalid_config_value(name, self._api, arg) 1abcdefg

976 return None 1abcdefg

977 return ModelConfigData(forbid_extra=forbid_extra) 1abcdefg

978 if name == 'alias_generator': 1abcdefg

979 has_alias_generator = True 1abcdefg

980 if isinstance(arg, NameExpr) and arg.fullname == 'builtins.None': 1abcdefg

981 has_alias_generator = False 1abcdefg

982 return ModelConfigData(has_alias_generator=has_alias_generator) 1abcdefg

983 if isinstance(arg, NameExpr) and arg.fullname in ('builtins.True', 'builtins.False'): 1abcdefg

984 return ModelConfigData(**{name: arg.fullname == 'builtins.True'}) 1abcdefg

985 error_invalid_config_value(name, self._api, arg) 1abcdefg

986 return None 1abcdefg

987 

988 @staticmethod 1abcdefg

989 def get_has_default(stmt: AssignmentStmt) -> bool: 1abcdefg

990 """Returns a boolean indicating whether the field defined in `stmt` is a required field.""" 

991 expr = stmt.rvalue 1abcdefg

992 if isinstance(expr, TempNode): 1abcdefg

993 # TempNode means annotation-only, so has no default 

994 return False 1abcdefg

995 if isinstance(expr, CallExpr) and isinstance(expr.callee, RefExpr) and expr.callee.fullname == FIELD_FULLNAME: 1abcdefg

996 # The "default value" is a call to `Field`; at this point, the field has a default if and only if: 

997 # * there is a positional argument that is not `...` 

998 # * there is a keyword argument named "default" that is not `...` 

999 # * there is a "default_factory" that is not `None` 

1000 for arg, name in zip(expr.args, expr.arg_names): 1abcdefg

1001 # If name is None, then this arg is the default because it is the only positional argument. 

1002 if name is None or name == 'default': 1abcdefg

1003 return arg.__class__ is not EllipsisExpr 1abcdefg

1004 if name == 'default_factory': 1abcdefg

1005 return not (isinstance(arg, NameExpr) and arg.fullname == 'builtins.None') 1abcdefg

1006 return False 1abcdefg

1007 # Has no default if the "default value" is Ellipsis (i.e., `field_name: Annotation = ...`) 

1008 return not isinstance(expr, EllipsisExpr) 1abcdefg

1009 

1010 @staticmethod 1abcdefg

1011 def get_strict(stmt: AssignmentStmt) -> bool | None: 1abcdefg

1012 """Returns a the `strict` value of a field if defined, otherwise `None`.""" 

1013 expr = stmt.rvalue 1abcdefg

1014 if isinstance(expr, CallExpr) and isinstance(expr.callee, RefExpr) and expr.callee.fullname == FIELD_FULLNAME: 1abcdefg

1015 for arg, name in zip(expr.args, expr.arg_names): 1abcdefg

1016 if name != 'strict': 1abcdefg

1017 continue 1abcdefg

1018 if isinstance(arg, NameExpr): 1018 ↛ 1023line 1018 didn't jump to line 1023 because the condition on line 1018 was always true1abcdefg

1019 if arg.fullname == 'builtins.True': 1abcdefg

1020 return True 1abcdefg

1021 elif arg.fullname == 'builtins.False': 1021 ↛ 1023line 1021 didn't jump to line 1023 because the condition on line 1021 was always true1abcdefg

1022 return False 1abcdefg

1023 return None 

1024 return None 1abcdefg

1025 

1026 @staticmethod 1abcdefg

1027 def get_alias_info(stmt: AssignmentStmt) -> tuple[str | None, bool]: 1abcdefg

1028 """Returns a pair (alias, has_dynamic_alias), extracted from the declaration of the field defined in `stmt`. 

1029 

1030 `has_dynamic_alias` is True if and only if an alias is provided, but not as a string literal. 

1031 If `has_dynamic_alias` is True, `alias` will be None. 

1032 """ 

1033 expr = stmt.rvalue 1abcdefg

1034 if isinstance(expr, TempNode): 1abcdefg

1035 # TempNode means annotation-only 

1036 return None, False 1abcdefg

1037 

1038 if not ( 1abc

1039 isinstance(expr, CallExpr) and isinstance(expr.callee, RefExpr) and expr.callee.fullname == FIELD_FULLNAME 

1040 ): 

1041 # Assigned value is not a call to pydantic.fields.Field 

1042 return None, False 1abcdefg

1043 

1044 if 'validation_alias' in expr.arg_names: 1abcdefg

1045 arg = expr.args[expr.arg_names.index('validation_alias')] 1abcdefg

1046 elif 'alias' in expr.arg_names: 1abcdefg

1047 arg = expr.args[expr.arg_names.index('alias')] 1abcdefg

1048 else: 

1049 return None, False 1abcdefg

1050 

1051 if isinstance(arg, StrExpr): 1abcdefg

1052 return arg.value, False 1abcdefg

1053 else: 

1054 return None, True 1abcdefg

1055 

1056 @staticmethod 1abcdefg

1057 def is_field_frozen(stmt: AssignmentStmt) -> bool: 1abcdefg

1058 """Returns whether the field is frozen, extracted from the declaration of the field defined in `stmt`. 

1059 

1060 Note that this is only whether the field was declared to be frozen in a `<field_name> = Field(frozen=True)` 

1061 sense; this does not determine whether the field is frozen because the entire model is frozen; that is 

1062 handled separately. 

1063 """ 

1064 expr = stmt.rvalue 1abcdefg

1065 if isinstance(expr, TempNode): 1abcdefg

1066 # TempNode means annotation-only 

1067 return False 1abcdefg

1068 

1069 if not ( 1abc

1070 isinstance(expr, CallExpr) and isinstance(expr.callee, RefExpr) and expr.callee.fullname == FIELD_FULLNAME 

1071 ): 

1072 # Assigned value is not a call to pydantic.fields.Field 

1073 return False 1abcdefg

1074 

1075 for i, arg_name in enumerate(expr.arg_names): 1abcdefg

1076 if arg_name == 'frozen': 1abcdefg

1077 arg = expr.args[i] 1abcdefg

1078 return isinstance(arg, NameExpr) and arg.fullname == 'builtins.True' 1abcdefg

1079 return False 1abcdefg

1080 

1081 def get_field_arguments( 1abcdefg

1082 self, 

1083 fields: list[PydanticModelField], 

1084 typed: bool, 

1085 model_strict: bool, 

1086 use_alias: bool, 

1087 requires_dynamic_aliases: bool, 

1088 is_settings: bool, 

1089 is_root_model: bool, 

1090 force_typevars_invariant: bool = False, 

1091 ) -> list[Argument]: 

1092 """Helper function used during the construction of the `__init__` and `model_construct` method signatures. 

1093 

1094 Returns a list of mypy Argument instances for use in the generated signatures. 

1095 """ 

1096 info = self._cls.info 1abcdefg

1097 arguments = [ 1abcdefg

1098 field.to_argument( 

1099 info, 

1100 typed=typed, 

1101 model_strict=model_strict, 

1102 force_optional=requires_dynamic_aliases or is_settings, 

1103 use_alias=use_alias, 

1104 api=self._api, 

1105 force_typevars_invariant=force_typevars_invariant, 

1106 is_root_model_root=is_root_model and field.name == 'root', 

1107 ) 

1108 for field in fields 

1109 if not (use_alias and field.has_dynamic_alias) 

1110 ] 

1111 return arguments 1abcdefg

1112 

1113 def should_init_forbid_extra(self, fields: list[PydanticModelField], config: ModelConfigData) -> bool: 1abcdefg

1114 """Indicates whether the generated `__init__` should get a `**kwargs` at the end of its signature. 

1115 

1116 We disallow arbitrary kwargs if the extra config setting is "forbid", or if the plugin config says to, 

1117 *unless* a required dynamic alias is present (since then we can't determine a valid signature). 

1118 """ 

1119 if not config.populate_by_name: 1abcdefg

1120 if self.is_dynamic_alias_present(fields, bool(config.has_alias_generator)): 1abcdefg

1121 return False 1abcdefg

1122 if config.forbid_extra: 1abcdefg

1123 return True 1abcdefg

1124 return self.plugin_config.init_forbid_extra 1abcdefg

1125 

1126 @staticmethod 1abcdefg

1127 def is_dynamic_alias_present(fields: list[PydanticModelField], has_alias_generator: bool) -> bool: 1abcdefg

1128 """Returns whether any fields on the model have a "dynamic alias", i.e., an alias that cannot be 

1129 determined during static analysis. 

1130 """ 

1131 for field in fields: 1abcdefg

1132 if field.has_dynamic_alias: 1abcdefg

1133 return True 1abcdefg

1134 if has_alias_generator: 1abcdefg

1135 for field in fields: 1abcdefg

1136 if field.alias is None: 1abcdefg

1137 return True 1abcdefg

1138 return False 1abcdefg

1139 

1140 

1141class ModelConfigData: 1abcdefg

1142 """Pydantic mypy plugin model config class.""" 

1143 

1144 def __init__( 1abcdefg

1145 self, 

1146 forbid_extra: bool | None = None, 

1147 frozen: bool | None = None, 

1148 from_attributes: bool | None = None, 

1149 populate_by_name: bool | None = None, 

1150 has_alias_generator: bool | None = None, 

1151 strict: bool | None = None, 

1152 ): 

1153 self.forbid_extra = forbid_extra 1abcdefg

1154 self.frozen = frozen 1abcdefg

1155 self.from_attributes = from_attributes 1abcdefg

1156 self.populate_by_name = populate_by_name 1abcdefg

1157 self.has_alias_generator = has_alias_generator 1abcdefg

1158 self.strict = strict 1abcdefg

1159 

1160 def get_values_dict(self) -> dict[str, Any]: 1abcdefg

1161 """Returns a dict of Pydantic model config names to their values. 

1162 

1163 It includes the config if config value is not `None`. 

1164 """ 

1165 return {k: v for k, v in self.__dict__.items() if v is not None} 1abcdefg

1166 

1167 def update(self, config: ModelConfigData | None) -> None: 1abcdefg

1168 """Update Pydantic model config values.""" 

1169 if config is None: 1abcdefg

1170 return 1abcdefg

1171 for k, v in config.get_values_dict().items(): 1abcdefg

1172 setattr(self, k, v) 1abcdefg

1173 

1174 def setdefault(self, key: str, value: Any) -> None: 1abcdefg

1175 """Set default value for Pydantic model config if config value is `None`.""" 

1176 if getattr(self, key) is None: 1abcdefg

1177 setattr(self, key, value) 1abcdefg

1178 

1179 

1180def is_root_model(info: TypeInfo) -> bool: 1abcdefg

1181 """Return whether the type info is a root model subclass (or the `RootModel` class itself).""" 

1182 return info.has_base(ROOT_MODEL_FULLNAME) 1abcdefg

1183 

1184 

1185ERROR_ORM = ErrorCode('pydantic-orm', 'Invalid from_attributes call', 'Pydantic') 1abcdefg

1186ERROR_CONFIG = ErrorCode('pydantic-config', 'Invalid config value', 'Pydantic') 1abcdefg

1187ERROR_ALIAS = ErrorCode('pydantic-alias', 'Dynamic alias disallowed', 'Pydantic') 1abcdefg

1188ERROR_UNEXPECTED = ErrorCode('pydantic-unexpected', 'Unexpected behavior', 'Pydantic') 1abcdefg

1189ERROR_UNTYPED = ErrorCode('pydantic-field', 'Untyped field disallowed', 'Pydantic') 1abcdefg

1190ERROR_FIELD_DEFAULTS = ErrorCode('pydantic-field', 'Invalid Field defaults', 'Pydantic') 1abcdefg

1191ERROR_EXTRA_FIELD_ROOT_MODEL = ErrorCode('pydantic-field', 'Extra field on RootModel subclass', 'Pydantic') 1abcdefg

1192 

1193 

1194def error_from_attributes(model_name: str, api: CheckerPluginInterface, context: Context) -> None: 1abcdefg

1195 """Emits an error when the model does not have `from_attributes=True`.""" 

1196 api.fail(f'"{model_name}" does not have from_attributes=True', context, code=ERROR_ORM) 1abcdefg

1197 

1198 

1199def error_invalid_config_value(name: str, api: SemanticAnalyzerPluginInterface, context: Context) -> None: 1abcdefg

1200 """Emits an error when the config value is invalid.""" 

1201 api.fail(f'Invalid value for "Config.{name}"', context, code=ERROR_CONFIG) 1abcdefg

1202 

1203 

1204def error_required_dynamic_aliases(api: SemanticAnalyzerPluginInterface, context: Context) -> None: 1abcdefg

1205 """Emits required dynamic aliases error. 

1206 

1207 This will be called when `warn_required_dynamic_aliases=True`. 

1208 """ 

1209 api.fail('Required dynamic aliases disallowed', context, code=ERROR_ALIAS) 1abcdefg

1210 

1211 

1212def error_unexpected_behavior( 1abcdefg

1213 detail: str, api: CheckerPluginInterface | SemanticAnalyzerPluginInterface, context: Context 1defg

1214) -> None: # pragma: no cover 1defg

1215 """Emits unexpected behavior error.""" 

1216 # Can't think of a good way to test this, but I confirmed it renders as desired by adding to a non-error path 

1217 link = 'https://github.com/pydantic/pydantic/issues/new/choose' 

1218 full_message = f'The pydantic mypy plugin ran into unexpected behavior: {detail}\n' 

1219 full_message += f'Please consider reporting this bug at {link} so we can try to fix it!' 

1220 api.fail(full_message, context, code=ERROR_UNEXPECTED) 

1221 

1222 

1223def error_untyped_fields(api: SemanticAnalyzerPluginInterface, context: Context) -> None: 1abcdefg

1224 """Emits an error when there is an untyped field in the model.""" 

1225 api.fail('Untyped fields disallowed', context, code=ERROR_UNTYPED) 1abcdefg

1226 

1227 

1228def error_extra_fields_on_root_model(api: CheckerPluginInterface, context: Context) -> None: 1abcdefg

1229 """Emits an error when there is more than just a root field defined for a subclass of RootModel.""" 

1230 api.fail('Only `root` is allowed as a field of a `RootModel`', context, code=ERROR_EXTRA_FIELD_ROOT_MODEL) 1abcdefg

1231 

1232 

1233def add_method( 1abcdefg

1234 api: SemanticAnalyzerPluginInterface | CheckerPluginInterface, 

1235 cls: ClassDef, 

1236 name: str, 

1237 args: list[Argument], 

1238 return_type: Type, 

1239 self_type: Type | None = None, 

1240 tvar_def: TypeVarType | None = None, 

1241 is_classmethod: bool = False, 

1242) -> None: 

1243 """Very closely related to `mypy.plugins.common.add_method_to_class`, with a few pydantic-specific changes.""" 

1244 info = cls.info 1abcdefg

1245 

1246 # First remove any previously generated methods with the same name 

1247 # to avoid clashes and problems in the semantic analyzer. 

1248 if name in info.names: 1abcdefg

1249 sym = info.names[name] 1abcdefg

1250 if sym.plugin_generated and isinstance(sym.node, FuncDef): 1abcdefg

1251 cls.defs.body.remove(sym.node) # pragma: no cover 1abcdefg

1252 

1253 if isinstance(api, SemanticAnalyzerPluginInterface): 1253 ↛ 1256line 1253 didn't jump to line 1256 because the condition on line 1253 was always true1abcdefg

1254 function_type = api.named_type('builtins.function') 1abcdefg

1255 else: 

1256 function_type = api.named_generic_type('builtins.function', []) 

1257 

1258 if is_classmethod: 1abcdefg

1259 self_type = self_type or TypeType(fill_typevars(info)) 1abcdefg

1260 first = [Argument(Var('_cls'), self_type, None, ARG_POS, True)] 1abcdefg

1261 else: 

1262 self_type = self_type or fill_typevars(info) 1abcdefg

1263 # `self` is positional *ONLY* here, but this can't be expressed 

1264 # fully in the mypy internal API. ARG_POS is the closest we can get. 

1265 # Using ARG_POS will, however, give mypy errors if a `self` field 

1266 # is present on a model: 

1267 # 

1268 # Name "self" already defined (possibly by an import) [no-redef] 

1269 # 

1270 # As a workaround, we give this argument a name that will 

1271 # never conflict. By its positional nature, this name will not 

1272 # be used or exposed to users. 

1273 first = [Argument(Var('__pydantic_self__'), self_type, None, ARG_POS)] 1abcdefg

1274 args = first + args 1abcdefg

1275 

1276 arg_types, arg_names, arg_kinds = [], [], [] 1abcdefg

1277 for arg in args: 1abcdefg

1278 assert arg.type_annotation, 'All arguments must be fully typed.' 1abcdefg

1279 arg_types.append(arg.type_annotation) 1abcdefg

1280 arg_names.append(arg.variable.name) 1abcdefg

1281 arg_kinds.append(arg.kind) 1abcdefg

1282 

1283 signature = CallableType(arg_types, arg_kinds, arg_names, return_type, function_type) 1abcdefg

1284 if tvar_def: 1284 ↛ 1285line 1284 didn't jump to line 1285 because the condition on line 1284 was never true1abcdefg

1285 signature.variables = [tvar_def] 

1286 

1287 func = FuncDef(name, args, Block([PassStmt()])) 1abcdefg

1288 func.info = info 1abcdefg

1289 func.type = set_callable_name(signature, func) 1abcdefg

1290 func.is_class = is_classmethod 1abcdefg

1291 func._fullname = info.fullname + '.' + name 1abcdefg

1292 func.line = info.line 1abcdefg

1293 

1294 # NOTE: we would like the plugin generated node to dominate, but we still 

1295 # need to keep any existing definitions so they get semantically analyzed. 

1296 if name in info.names: 1abcdefg

1297 # Get a nice unique name instead. 

1298 r_name = get_unique_redefinition_name(name, info.names) 1abcdefg

1299 info.names[r_name] = info.names[name] 1abcdefg

1300 

1301 # Add decorator for is_classmethod 

1302 # The dataclasses plugin claims this is unnecessary for classmethods, but not including it results in a 

1303 # signature incompatible with the superclass, which causes mypy errors to occur for every subclass of BaseModel. 

1304 if is_classmethod: 1abcdefg

1305 func.is_decorated = True 1abcdefg

1306 v = Var(name, func.type) 1abcdefg

1307 v.info = info 1abcdefg

1308 v._fullname = func._fullname 1abcdefg

1309 v.is_classmethod = True 1abcdefg

1310 dec = Decorator(func, [NameExpr('classmethod')], v) 1abcdefg

1311 dec.line = info.line 1abcdefg

1312 sym = SymbolTableNode(MDEF, dec) 1abcdefg

1313 else: 

1314 sym = SymbolTableNode(MDEF, func) 1abcdefg

1315 sym.plugin_generated = True 1abcdefg

1316 info.names[name] = sym 1abcdefg

1317 

1318 info.defn.defs.body.append(func) 1abcdefg

1319 

1320 

1321def parse_toml(config_file: str) -> dict[str, Any] | None: 1abcdefg

1322 """Returns a dict of config keys to values. 

1323 

1324 It reads configs from toml file and returns `None` if the file is not a toml file. 

1325 """ 

1326 if not config_file.endswith('.toml'): 1abcdefg

1327 return None 1abcdefg

1328 

1329 if sys.version_info >= (3, 11): 1abcdefg

1330 import tomllib as toml_ 1cdefg

1331 else: 

1332 try: 1ab

1333 import tomli as toml_ 1ab

1334 except ImportError: # pragma: no cover 

1335 import warnings 

1336 

1337 warnings.warn('No TOML parser installed, cannot read configuration from `pyproject.toml`.') 

1338 return None 

1339 

1340 with open(config_file, 'rb') as rf: 1abcdefg

1341 return toml_.load(rf) 1abcdefg