Coverage for pydantic/mypy.py: 90.53%

619 statements  

« prev     ^ index     » next       coverage.py v7.8.0, created at 2025-05-02 16:20 +0000

1"""This module includes classes and functions designed specifically for use with the mypy plugin.""" 

2 

3from __future__ import annotations 1bcdefghijklmnopqrstuvwxyazABCDEFGHIJ

4 

5import sys 1bcdefghijklmnopqrstuvwxyazABCDEFGHIJ

6from collections.abc import Iterator 1bcdefghijklmnopqrstuvwxyazABCDEFGHIJ

7from configparser import ConfigParser 1bcdefghijklmnopqrstuvwxyazABCDEFGHIJ

8from typing import Any, Callable 1bcdefghijklmnopqrstuvwxyazABCDEFGHIJ

9 

10from mypy.errorcodes import ErrorCode 1bcdefghijklmnopqrstuvwxyazABCDEFGHIJ

11from mypy.expandtype import expand_type, expand_type_by_instance 1a

12from mypy.nodes import ( 1a

13 ARG_NAMED, 

14 ARG_NAMED_OPT, 

15 ARG_OPT, 

16 ARG_POS, 

17 ARG_STAR2, 

18 INVARIANT, 

19 MDEF, 

20 Argument, 

21 AssignmentStmt, 

22 Block, 

23 CallExpr, 

24 ClassDef, 

25 Context, 

26 Decorator, 

27 DictExpr, 

28 EllipsisExpr, 

29 Expression, 

30 FuncDef, 

31 IfStmt, 

32 JsonDict, 

33 MemberExpr, 

34 NameExpr, 

35 PassStmt, 

36 PlaceholderNode, 

37 RefExpr, 

38 Statement, 

39 StrExpr, 

40 SymbolTableNode, 

41 TempNode, 

42 TypeAlias, 

43 TypeInfo, 

44 Var, 

45) 

46from mypy.options import Options 1a

47from mypy.plugin import ( 1a

48 CheckerPluginInterface, 

49 ClassDefContext, 

50 MethodContext, 

51 Plugin, 

52 ReportConfigContext, 

53 SemanticAnalyzerPluginInterface, 

54) 

55from mypy.plugins.common import ( 1a

56 deserialize_and_fixup_type, 

57) 

58from mypy.semanal import set_callable_name 1a

59from mypy.server.trigger import make_wildcard_trigger 1a

60from mypy.state import state 1a

61from mypy.type_visitor import TypeTranslator 1a

62from mypy.typeops import map_type_from_supertype 1a

63from mypy.types import ( 1a

64 AnyType, 

65 CallableType, 

66 Instance, 

67 NoneType, 

68 Type, 

69 TypeOfAny, 

70 TypeType, 

71 TypeVarType, 

72 UnionType, 

73 get_proper_type, 

74) 

75from mypy.typevars import fill_typevars 1a

76from mypy.util import get_unique_redefinition_name 1a

77from mypy.version import __version__ as mypy_version 1a

78 

79from pydantic._internal import _fields 1a

80from pydantic.version import parse_mypy_version 1a

81 

82CONFIGFILE_KEY = 'pydantic-mypy' 1a

83METADATA_KEY = 'pydantic-mypy-metadata' 1a

84BASEMODEL_FULLNAME = 'pydantic.main.BaseModel' 1a

85BASESETTINGS_FULLNAME = 'pydantic_settings.main.BaseSettings' 1a

86ROOT_MODEL_FULLNAME = 'pydantic.root_model.RootModel' 1a

87MODEL_METACLASS_FULLNAME = 'pydantic._internal._model_construction.ModelMetaclass' 1a

88FIELD_FULLNAME = 'pydantic.fields.Field' 1a

89DATACLASS_FULLNAME = 'pydantic.dataclasses.dataclass' 1a

90MODEL_VALIDATOR_FULLNAME = 'pydantic.functional_validators.model_validator' 1a

91DECORATOR_FULLNAMES = { 1a

92 'pydantic.functional_validators.field_validator', 

93 'pydantic.functional_validators.model_validator', 

94 'pydantic.functional_serializers.serializer', 

95 'pydantic.functional_serializers.model_serializer', 

96 'pydantic.deprecated.class_validators.validator', 

97 'pydantic.deprecated.class_validators.root_validator', 

98} 

99IMPLICIT_CLASSMETHOD_DECORATOR_FULLNAMES = DECORATOR_FULLNAMES - {'pydantic.functional_serializers.model_serializer'} 1a

100 

101 

102MYPY_VERSION_TUPLE = parse_mypy_version(mypy_version) 1a

103BUILTINS_NAME = 'builtins' 1a

104 

105# Increment version if plugin changes and mypy caches should be invalidated 

106__version__ = 2 1a

107 

108 

109def plugin(version: str) -> type[Plugin]: 1a

110 """`version` is the mypy version string. 

111 

112 We might want to use this to print a warning if the mypy version being used is 

113 newer, or especially older, than we expect (or need). 

114 

115 Args: 

116 version: The mypy version string. 

117 

118 Return: 

119 The Pydantic mypy plugin type. 

120 """ 

121 return PydanticPlugin 1a

122 

123 

124class PydanticPlugin(Plugin): 1a

125 """The Pydantic mypy plugin.""" 

126 

127 def __init__(self, options: Options) -> None: 1a

128 self.plugin_config = PydanticPluginConfig(options) 1a

129 self._plugin_data = self.plugin_config.to_data() 1a

130 super().__init__(options) 1a

131 

132 def get_base_class_hook(self, fullname: str) -> Callable[[ClassDefContext], None] | None: 1a

133 """Update Pydantic model class.""" 

134 sym = self.lookup_fully_qualified(fullname) 1a

135 if sym and isinstance(sym.node, TypeInfo): # pragma: no branch 1a

136 # No branching may occur if the mypy cache has not been cleared 

137 if sym.node.has_base(BASEMODEL_FULLNAME): 1a

138 return self._pydantic_model_class_maker_callback 1a

139 return None 1a

140 

141 def get_metaclass_hook(self, fullname: str) -> Callable[[ClassDefContext], None] | None: 1a

142 """Update Pydantic `ModelMetaclass` definition.""" 

143 if fullname == MODEL_METACLASS_FULLNAME: 1a

144 return self._pydantic_model_metaclass_marker_callback 1a

145 return None 1a

146 

147 def get_method_hook(self, fullname: str) -> Callable[[MethodContext], Type] | None: 1a

148 """Adjust return type of `from_orm` method call.""" 

149 if fullname.endswith('.from_orm'): 1a

150 return from_attributes_callback 1a

151 return None 1a

152 

153 def report_config_data(self, ctx: ReportConfigContext) -> dict[str, Any]: 1a

154 """Return all plugin config data. 

155 

156 Used by mypy to determine if cache needs to be discarded. 

157 """ 

158 return self._plugin_data 1a

159 

160 def _pydantic_model_class_maker_callback(self, ctx: ClassDefContext) -> None: 1a

161 transformer = PydanticModelTransformer(ctx.cls, ctx.reason, ctx.api, self.plugin_config) 1a

162 transformer.transform() 1a

163 

164 def _pydantic_model_metaclass_marker_callback(self, ctx: ClassDefContext) -> None: 1a

165 """Reset dataclass_transform_spec attribute of ModelMetaclass. 

166 

167 Let the plugin handle it. This behavior can be disabled 

168 if 'debug_dataclass_transform' is set to True', for testing purposes. 

169 """ 

170 if self.plugin_config.debug_dataclass_transform: 170 ↛ 171line 170 didn't jump to line 171 because the condition on line 170 was never true1a

171 return 

172 info_metaclass = ctx.cls.info.declared_metaclass 1a

173 assert info_metaclass, "callback not passed from 'get_metaclass_hook'" 1a

174 if getattr(info_metaclass.type, 'dataclass_transform_spec', None): 1a

175 info_metaclass.type.dataclass_transform_spec = None 1a

176 

177 

178class PydanticPluginConfig: 1a

179 """A Pydantic mypy plugin config holder. 

180 

181 Attributes: 

182 init_forbid_extra: Whether to add a `**kwargs` at the end of the generated `__init__` signature. 

183 init_typed: Whether to annotate fields in the generated `__init__`. 

184 warn_required_dynamic_aliases: Whether to raise required dynamic aliases error. 

185 debug_dataclass_transform: Whether to not reset `dataclass_transform_spec` attribute 

186 of `ModelMetaclass` for testing purposes. 

187 """ 

188 

189 __slots__ = ( 1a

190 'init_forbid_extra', 

191 'init_typed', 

192 'warn_required_dynamic_aliases', 

193 'debug_dataclass_transform', 

194 ) 

195 init_forbid_extra: bool 1a

196 init_typed: bool 1a

197 warn_required_dynamic_aliases: bool 1a

198 debug_dataclass_transform: bool # undocumented 1a

199 

200 def __init__(self, options: Options) -> None: 1a

201 if options.config_file is None: # pragma: no cover 1a

202 return 

203 

204 toml_config = parse_toml(options.config_file) 1a

205 if toml_config is not None: 1a

206 config = toml_config.get('tool', {}).get('pydantic-mypy', {}) 1a

207 for key in self.__slots__: 1a

208 setting = config.get(key, False) 1a

209 if not isinstance(setting, bool): 1a

210 raise ValueError(f'Configuration value must be a boolean for key: {key}') 1a

211 setattr(self, key, setting) 1a

212 else: 

213 plugin_config = ConfigParser() 1a

214 plugin_config.read(options.config_file) 1a

215 for key in self.__slots__: 1a

216 setting = plugin_config.getboolean(CONFIGFILE_KEY, key, fallback=False) 1a

217 setattr(self, key, setting) 1a

218 

219 def to_data(self) -> dict[str, Any]: 1a

220 """Returns a dict of config names to their values.""" 

221 return {key: getattr(self, key) for key in self.__slots__} 1a

222 

223 

224def from_attributes_callback(ctx: MethodContext) -> Type: 1a

225 """Raise an error if from_attributes is not enabled.""" 

226 model_type: Instance 

227 ctx_type = ctx.type 1a

228 if isinstance(ctx_type, TypeType): 1a

229 ctx_type = ctx_type.item 1a

230 if isinstance(ctx_type, CallableType) and isinstance(ctx_type.ret_type, Instance): 1a

231 model_type = ctx_type.ret_type # called on the class 1a

232 elif isinstance(ctx_type, Instance): 1a

233 model_type = ctx_type # called on an instance (unusual, but still valid) 1a

234 else: # pragma: no cover 

235 detail = f'ctx.type: {ctx_type} (of type {ctx_type.__class__.__name__})' 

236 error_unexpected_behavior(detail, ctx.api, ctx.context) 

237 return ctx.default_return_type 

238 pydantic_metadata = model_type.type.metadata.get(METADATA_KEY) 1a

239 if pydantic_metadata is None: 1a

240 return ctx.default_return_type 1a

241 if not model_type.type.has_base(BASEMODEL_FULLNAME): 241 ↛ 243line 241 didn't jump to line 243 because the condition on line 241 was never true1a

242 # not a Pydantic v2 model 

243 return ctx.default_return_type 

244 from_attributes = pydantic_metadata.get('config', {}).get('from_attributes') 1a

245 if from_attributes is not True: 1a

246 error_from_attributes(model_type.type.name, ctx.api, ctx.context) 1a

247 return ctx.default_return_type 1a

248 

249 

250class PydanticModelField: 1a

251 """Based on mypy.plugins.dataclasses.DataclassAttribute.""" 

252 

253 def __init__( 1a

254 self, 

255 name: str, 

256 alias: str | None, 

257 is_frozen: bool, 

258 has_dynamic_alias: bool, 

259 has_default: bool, 

260 strict: bool | None, 

261 line: int, 

262 column: int, 

263 type: Type | None, 

264 info: TypeInfo, 

265 ): 

266 self.name = name 1a

267 self.alias = alias 1a

268 self.is_frozen = is_frozen 1a

269 self.has_dynamic_alias = has_dynamic_alias 1a

270 self.has_default = has_default 1a

271 self.strict = strict 1a

272 self.line = line 1a

273 self.column = column 1a

274 self.type = type 1a

275 self.info = info 1a

276 

277 def to_argument( 1a

278 self, 

279 current_info: TypeInfo, 

280 typed: bool, 

281 model_strict: bool, 

282 force_optional: bool, 

283 use_alias: bool, 

284 api: SemanticAnalyzerPluginInterface, 

285 force_typevars_invariant: bool, 

286 is_root_model_root: bool, 

287 ) -> Argument: 

288 """Based on mypy.plugins.dataclasses.DataclassAttribute.to_argument.""" 

289 variable = self.to_var(current_info, api, use_alias, force_typevars_invariant) 1a

290 

291 strict = model_strict if self.strict is None else self.strict 1a

292 if typed or strict: 1a

293 type_annotation = self.expand_type(current_info, api, include_root_type=True) 1a

294 else: 

295 type_annotation = AnyType(TypeOfAny.explicit) 1a

296 

297 return Argument( 1a

298 variable=variable, 

299 type_annotation=type_annotation, 

300 initializer=None, 

301 kind=ARG_OPT 

302 if is_root_model_root 

303 else (ARG_NAMED_OPT if force_optional or self.has_default else ARG_NAMED), 

304 ) 

305 

306 def expand_type( 1a

307 self, 

308 current_info: TypeInfo, 

309 api: SemanticAnalyzerPluginInterface, 

310 force_typevars_invariant: bool = False, 

311 include_root_type: bool = False, 

312 ) -> Type | None: 

313 """Based on mypy.plugins.dataclasses.DataclassAttribute.expand_type.""" 

314 if force_typevars_invariant: 1a

315 # In some cases, mypy will emit an error "Cannot use a covariant type variable as a parameter" 

316 # To prevent that, we add an option to replace typevars with invariant ones while building certain 

317 # method signatures (in particular, `__init__`). There may be a better way to do this, if this causes 

318 # us problems in the future, we should look into why the dataclasses plugin doesn't have this issue. 

319 if isinstance(self.type, TypeVarType): 1a

320 modified_type = self.type.copy_modified() 1a

321 modified_type.variance = INVARIANT 1a

322 self.type = modified_type 1a

323 

324 if self.type is not None and self.info.self_type is not None: 324 ↛ 349line 324 didn't jump to line 349 because the condition on line 324 was always true1a

325 # In general, it is not safe to call `expand_type()` during semantic analysis, 

326 # however this plugin is called very late, so all types should be fully ready. 

327 # Also, it is tricky to avoid eager expansion of Self types here (e.g. because 

328 # we serialize attributes). 

329 with state.strict_optional_set(api.options.strict_optional): 1a

330 filled_with_typevars = fill_typevars(current_info) 1a

331 # Cannot be TupleType as current_info represents a Pydantic model: 

332 assert isinstance(filled_with_typevars, Instance) 1a

333 if force_typevars_invariant: 1a

334 for arg in filled_with_typevars.args: 1a

335 if isinstance(arg, TypeVarType): 335 ↛ 334line 335 didn't jump to line 334 because the condition on line 335 was always true1a

336 arg.variance = INVARIANT 1a

337 

338 expanded_type = expand_type(self.type, {self.info.self_type.id: filled_with_typevars}) 1a

339 if include_root_type and isinstance(expanded_type, Instance) and is_root_model(expanded_type.type): 1a

340 # When a root model is used as a field, Pydantic allows both an instance of the root model 

341 # as well as instances of the `root` field type: 

342 root_type = expanded_type.type['root'].type 1a

343 if root_type is None: 343 ↛ 345line 343 didn't jump to line 345 because the condition on line 343 was never true1a

344 # Happens if the hint for 'root' has unsolved forward references 

345 return expanded_type 

346 expanded_root_type = expand_type_by_instance(root_type, expanded_type) 1a

347 expanded_type = UnionType([expanded_type, expanded_root_type]) 1a

348 return expanded_type 1a

349 return self.type 

350 

351 def to_var( 1a

352 self, 

353 current_info: TypeInfo, 

354 api: SemanticAnalyzerPluginInterface, 

355 use_alias: bool, 

356 force_typevars_invariant: bool = False, 

357 ) -> Var: 

358 """Based on mypy.plugins.dataclasses.DataclassAttribute.to_var.""" 

359 if use_alias and self.alias is not None: 1a

360 name = self.alias 1a

361 else: 

362 name = self.name 1a

363 

364 return Var(name, self.expand_type(current_info, api, force_typevars_invariant)) 1a

365 

366 def serialize(self) -> JsonDict: 1a

367 """Based on mypy.plugins.dataclasses.DataclassAttribute.serialize.""" 

368 assert self.type 1a

369 return { 1a

370 'name': self.name, 

371 'alias': self.alias, 

372 'is_frozen': self.is_frozen, 

373 'has_dynamic_alias': self.has_dynamic_alias, 

374 'has_default': self.has_default, 

375 'strict': self.strict, 

376 'line': self.line, 

377 'column': self.column, 

378 'type': self.type.serialize(), 

379 } 

380 

381 @classmethod 1a

382 def deserialize(cls, info: TypeInfo, data: JsonDict, api: SemanticAnalyzerPluginInterface) -> PydanticModelField: 1a

383 """Based on mypy.plugins.dataclasses.DataclassAttribute.deserialize.""" 

384 data = data.copy() 1a

385 typ = deserialize_and_fixup_type(data.pop('type'), api) 1a

386 return cls(type=typ, info=info, **data) 1a

387 

388 def expand_typevar_from_subtype(self, sub_type: TypeInfo, api: SemanticAnalyzerPluginInterface) -> None: 1a

389 """Expands type vars in the context of a subtype when an attribute is inherited 

390 from a generic super type. 

391 """ 

392 if self.type is not None: 392 ↛ exitline 392 didn't return from function 'expand_typevar_from_subtype' because the condition on line 392 was always true1a

393 with state.strict_optional_set(api.options.strict_optional): 1a

394 self.type = map_type_from_supertype(self.type, sub_type, self.info) 1a

395 

396 

397class PydanticModelClassVar: 1a

398 """Based on mypy.plugins.dataclasses.DataclassAttribute. 

399 

400 ClassVars are ignored by subclasses. 

401 

402 Attributes: 

403 name: the ClassVar name 

404 """ 

405 

406 def __init__(self, name): 1a

407 self.name = name 1a

408 

409 @classmethod 1a

410 def deserialize(cls, data: JsonDict) -> PydanticModelClassVar: 1a

411 """Based on mypy.plugins.dataclasses.DataclassAttribute.deserialize.""" 

412 data = data.copy() 

413 return cls(**data) 

414 

415 def serialize(self) -> JsonDict: 1a

416 """Based on mypy.plugins.dataclasses.DataclassAttribute.serialize.""" 

417 return { 1a

418 'name': self.name, 

419 } 

420 

421 

422class PydanticModelTransformer: 1a

423 """Transform the BaseModel subclass according to the plugin settings. 

424 

425 Attributes: 

426 tracked_config_fields: A set of field configs that the plugin has to track their value. 

427 """ 

428 

429 tracked_config_fields: set[str] = { 1a

430 'extra', 

431 'frozen', 

432 'from_attributes', 

433 'populate_by_name', 

434 'validate_by_alias', 

435 'validate_by_name', 

436 'alias_generator', 

437 'strict', 

438 } 

439 

440 def __init__( 1a

441 self, 

442 cls: ClassDef, 

443 reason: Expression | Statement, 

444 api: SemanticAnalyzerPluginInterface, 

445 plugin_config: PydanticPluginConfig, 

446 ) -> None: 

447 self._cls = cls 1a

448 self._reason = reason 1a

449 self._api = api 1a

450 

451 self.plugin_config = plugin_config 1a

452 

453 def transform(self) -> bool: 1a

454 """Configures the BaseModel subclass according to the plugin settings. 

455 

456 In particular: 

457 

458 * determines the model config and fields, 

459 * adds a fields-aware signature for the initializer and construct methods 

460 * freezes the class if frozen = True 

461 * stores the fields, config, and if the class is settings in the mypy metadata for access by subclasses 

462 """ 

463 info = self._cls.info 1a

464 is_a_root_model = is_root_model(info) 1a

465 config = self.collect_config() 1a

466 fields, class_vars = self.collect_fields_and_class_vars(config, is_a_root_model) 1a

467 if fields is None or class_vars is None: 467 ↛ 469line 467 didn't jump to line 469 because the condition on line 467 was never true1a

468 # Some definitions are not ready. We need another pass. 

469 return False 

470 for field in fields: 1a

471 if field.type is None: 1a

472 return False 1a

473 

474 is_settings = info.has_base(BASESETTINGS_FULLNAME) 1a

475 self.add_initializer(fields, config, is_settings, is_a_root_model) 1a

476 self.add_model_construct_method(fields, config, is_settings, is_a_root_model) 1a

477 self.set_frozen(fields, self._api, frozen=config.frozen is True) 1a

478 

479 self.adjust_decorator_signatures() 1a

480 

481 info.metadata[METADATA_KEY] = { 1a

482 'fields': {field.name: field.serialize() for field in fields}, 

483 'class_vars': {class_var.name: class_var.serialize() for class_var in class_vars}, 

484 'config': config.get_values_dict(), 

485 } 

486 

487 return True 1a

488 

489 def adjust_decorator_signatures(self) -> None: 1a

490 """When we decorate a function `f` with `pydantic.validator(...)`, `pydantic.field_validator` 

491 or `pydantic.serializer(...)`, mypy sees `f` as a regular method taking a `self` instance, 

492 even though pydantic internally wraps `f` with `classmethod` if necessary. 

493 

494 Teach mypy this by marking any function whose outermost decorator is a `validator()`, 

495 `field_validator()` or `serializer()` call as a `classmethod`. 

496 """ 

497 for sym in self._cls.info.names.values(): 1a

498 if isinstance(sym.node, Decorator): 1a

499 first_dec = sym.node.original_decorators[0] 1a

500 if ( 

501 isinstance(first_dec, CallExpr) 

502 and isinstance(first_dec.callee, NameExpr) 

503 and first_dec.callee.fullname in IMPLICIT_CLASSMETHOD_DECORATOR_FULLNAMES 

504 # @model_validator(mode="after") is an exception, it expects a regular method 

505 and not ( 

506 first_dec.callee.fullname == MODEL_VALIDATOR_FULLNAME 

507 and any( 

508 first_dec.arg_names[i] == 'mode' and isinstance(arg, StrExpr) and arg.value == 'after' 

509 for i, arg in enumerate(first_dec.args) 

510 ) 

511 ) 

512 ): 

513 # TODO: Only do this if the first argument of the decorated function is `cls` 

514 sym.node.func.is_class = True 1a

515 

516 def collect_config(self) -> ModelConfigData: # noqa: C901 (ignore complexity) 1a

517 """Collects the values of the config attributes that are used by the plugin, accounting for parent classes.""" 

518 cls = self._cls 1a

519 config = ModelConfigData() 1a

520 

521 has_config_kwargs = False 1a

522 has_config_from_namespace = False 1a

523 

524 # Handle `class MyModel(BaseModel, <name>=<expr>, ...):` 

525 for name, expr in cls.keywords.items(): 1a

526 config_data = self.get_config_update(name, expr) 1a

527 if config_data: 1a

528 has_config_kwargs = True 1a

529 config.update(config_data) 1a

530 

531 # Handle `model_config` 

532 stmt: Statement | None = None 1a

533 for stmt in cls.defs.body: 1a

534 if not isinstance(stmt, (AssignmentStmt, ClassDef)): 1a

535 continue 1a

536 

537 if isinstance(stmt, AssignmentStmt): 1a

538 lhs = stmt.lvalues[0] 1a

539 if not isinstance(lhs, NameExpr) or lhs.name != 'model_config': 1a

540 continue 1a

541 

542 if isinstance(stmt.rvalue, CallExpr): # calls to `dict` or `ConfigDict` 1a

543 for arg_name, arg in zip(stmt.rvalue.arg_names, stmt.rvalue.args): 1a

544 if arg_name is None: 544 ↛ 545line 544 didn't jump to line 545 because the condition on line 544 was never true1a

545 continue 

546 config.update(self.get_config_update(arg_name, arg, lax_extra=True)) 1a

547 elif isinstance(stmt.rvalue, DictExpr): # dict literals 547 ↛ 564line 547 didn't jump to line 564 because the condition on line 547 was always true1a

548 for key_expr, value_expr in stmt.rvalue.items: 1a

549 if not isinstance(key_expr, StrExpr): 549 ↛ 550line 549 didn't jump to line 550 because the condition on line 549 was never true1a

550 continue 

551 config.update(self.get_config_update(key_expr.value, value_expr)) 1a

552 

553 elif isinstance(stmt, ClassDef): 553 ↛ 564line 553 didn't jump to line 564 because the condition on line 553 was always true1a

554 if stmt.name != 'Config': # 'deprecated' Config-class 1a

555 continue 1a

556 for substmt in stmt.defs.body: 1a

557 if not isinstance(substmt, AssignmentStmt): 1a

558 continue 1a

559 lhs = substmt.lvalues[0] 1a

560 if not isinstance(lhs, NameExpr): 560 ↛ 561line 560 didn't jump to line 561 because the condition on line 560 was never true1a

561 continue 

562 config.update(self.get_config_update(lhs.name, substmt.rvalue)) 1a

563 

564 if has_config_kwargs: 564 ↛ 565line 564 didn't jump to line 565 because the condition on line 564 was never true1a

565 self._api.fail( 

566 'Specifying config in two places is ambiguous, use either Config attribute or class kwargs', 

567 cls, 

568 ) 

569 break 

570 

571 has_config_from_namespace = True 1a

572 

573 if has_config_kwargs or has_config_from_namespace: 1a

574 if ( 

575 stmt 

576 and config.has_alias_generator 

577 and not (config.validate_by_name or config.populate_by_name) 

578 and self.plugin_config.warn_required_dynamic_aliases 

579 ): 

580 error_required_dynamic_aliases(self._api, stmt) 1a

581 

582 for info in cls.info.mro[1:]: # 0 is the current class 1a

583 if METADATA_KEY not in info.metadata: 1a

584 continue 1a

585 

586 # Each class depends on the set of fields in its ancestors 

587 self._api.add_plugin_dependency(make_wildcard_trigger(info.fullname)) 1a

588 for name, value in info.metadata[METADATA_KEY]['config'].items(): 1a

589 config.setdefault(name, value) 1a

590 return config 1a

591 

592 def collect_fields_and_class_vars( 1a

593 self, model_config: ModelConfigData, is_root_model: bool 

594 ) -> tuple[list[PydanticModelField] | None, list[PydanticModelClassVar] | None]: 

595 """Collects the fields for the model, accounting for parent classes.""" 

596 cls = self._cls 1a

597 

598 # First, collect fields and ClassVars belonging to any class in the MRO, ignoring duplicates. 

599 # 

600 # We iterate through the MRO in reverse because attrs defined in the parent must appear 

601 # earlier in the attributes list than attrs defined in the child. See: 

602 # https://docs.python.org/3/library/dataclasses.html#inheritance 

603 # 

604 # However, we also want fields defined in the subtype to override ones defined 

605 # in the parent. We can implement this via a dict without disrupting the attr order 

606 # because dicts preserve insertion order in Python 3.7+. 

607 found_fields: dict[str, PydanticModelField] = {} 1a

608 found_class_vars: dict[str, PydanticModelClassVar] = {} 1a

609 for info in reversed(cls.info.mro[1:-1]): # 0 is the current class, -2 is BaseModel, -1 is object 1a

610 # if BASEMODEL_METADATA_TAG_KEY in info.metadata and BASEMODEL_METADATA_KEY not in info.metadata: 

611 # # We haven't processed the base class yet. Need another pass. 

612 # return None, None 

613 if METADATA_KEY not in info.metadata: 1a

614 continue 1a

615 

616 # Each class depends on the set of attributes in its dataclass ancestors. 

617 self._api.add_plugin_dependency(make_wildcard_trigger(info.fullname)) 1a

618 

619 for name, data in info.metadata[METADATA_KEY]['fields'].items(): 1a

620 field = PydanticModelField.deserialize(info, data, self._api) 1a

621 # (The following comment comes directly from the dataclasses plugin) 

622 # TODO: We shouldn't be performing type operations during the main 

623 # semantic analysis pass, since some TypeInfo attributes might 

624 # still be in flux. This should be performed in a later phase. 

625 field.expand_typevar_from_subtype(cls.info, self._api) 1a

626 found_fields[name] = field 1a

627 

628 sym_node = cls.info.names.get(name) 1a

629 if sym_node and sym_node.node and not isinstance(sym_node.node, Var): 629 ↛ 630line 629 didn't jump to line 630 because the condition on line 629 was never true1a

630 self._api.fail( 

631 'BaseModel field may only be overridden by another field', 

632 sym_node.node, 

633 ) 

634 # Collect ClassVars 

635 for name, data in info.metadata[METADATA_KEY]['class_vars'].items(): 635 ↛ 636line 635 didn't jump to line 636 because the loop on line 635 never started1a

636 found_class_vars[name] = PydanticModelClassVar.deserialize(data) 

637 

638 # Second, collect fields and ClassVars belonging to the current class. 

639 current_field_names: set[str] = set() 1a

640 current_class_vars_names: set[str] = set() 1a

641 for stmt in self._get_assignment_statements_from_block(cls.defs): 1a

642 maybe_field = self.collect_field_or_class_var_from_stmt(stmt, model_config, found_class_vars) 1a

643 if maybe_field is None: 1a

644 continue 1a

645 

646 lhs = stmt.lvalues[0] 1a

647 assert isinstance(lhs, NameExpr) # collect_field_or_class_var_from_stmt guarantees this 1a

648 if isinstance(maybe_field, PydanticModelField): 1a

649 if is_root_model and lhs.name != 'root': 1a

650 error_extra_fields_on_root_model(self._api, stmt) 1a

651 else: 

652 current_field_names.add(lhs.name) 1a

653 found_fields[lhs.name] = maybe_field 1a

654 elif isinstance(maybe_field, PydanticModelClassVar): 654 ↛ 641line 654 didn't jump to line 641 because the condition on line 654 was always true1a

655 current_class_vars_names.add(lhs.name) 1a

656 found_class_vars[lhs.name] = maybe_field 1a

657 

658 return list(found_fields.values()), list(found_class_vars.values()) 1a

659 

660 def _get_assignment_statements_from_if_statement(self, stmt: IfStmt) -> Iterator[AssignmentStmt]: 1a

661 for body in stmt.body: 1a

662 if not body.is_unreachable: 662 ↛ 661line 662 didn't jump to line 661 because the condition on line 662 was always true1a

663 yield from self._get_assignment_statements_from_block(body) 1a

664 if stmt.else_body is not None and not stmt.else_body.is_unreachable: 664 ↛ 665line 664 didn't jump to line 665 because the condition on line 664 was never true1a

665 yield from self._get_assignment_statements_from_block(stmt.else_body) 

666 

667 def _get_assignment_statements_from_block(self, block: Block) -> Iterator[AssignmentStmt]: 1a

668 for stmt in block.body: 1a

669 if isinstance(stmt, AssignmentStmt): 1a

670 yield stmt 1a

671 elif isinstance(stmt, IfStmt): 1a

672 yield from self._get_assignment_statements_from_if_statement(stmt) 1a

673 

674 def collect_field_or_class_var_from_stmt( # noqa C901 1a

675 self, stmt: AssignmentStmt, model_config: ModelConfigData, class_vars: dict[str, PydanticModelClassVar] 

676 ) -> PydanticModelField | PydanticModelClassVar | None: 

677 """Get pydantic model field from statement. 

678 

679 Args: 

680 stmt: The statement. 

681 model_config: Configuration settings for the model. 

682 class_vars: ClassVars already known to be defined on the model. 

683 

684 Returns: 

685 A pydantic model field if it could find the field in statement. Otherwise, `None`. 

686 """ 

687 cls = self._cls 1a

688 

689 lhs = stmt.lvalues[0] 1a

690 if not isinstance(lhs, NameExpr) or not _fields.is_valid_field_name(lhs.name) or lhs.name == 'model_config': 1a

691 return None 1a

692 

693 if not stmt.new_syntax: 1a

694 if ( 

695 isinstance(stmt.rvalue, CallExpr) 

696 and isinstance(stmt.rvalue.callee, CallExpr) 

697 and isinstance(stmt.rvalue.callee.callee, NameExpr) 

698 and stmt.rvalue.callee.callee.fullname in DECORATOR_FULLNAMES 

699 ): 

700 # This is a (possibly-reused) validator or serializer, not a field 

701 # In particular, it looks something like: my_validator = validator('my_field')(f) 

702 # Eventually, we may want to attempt to respect model_config['ignored_types'] 

703 return None 1a

704 

705 if lhs.name in class_vars: 705 ↛ 707line 705 didn't jump to line 707 because the condition on line 705 was never true1a

706 # Class vars are not fields and are not required to be annotated 

707 return None 

708 

709 # The assignment does not have an annotation, and it's not anything else we recognize 

710 error_untyped_fields(self._api, stmt) 1a

711 return None 1a

712 

713 lhs = stmt.lvalues[0] 1a

714 if not isinstance(lhs, NameExpr): 714 ↛ 715line 714 didn't jump to line 715 because the condition on line 714 was never true1a

715 return None 

716 

717 if not _fields.is_valid_field_name(lhs.name) or lhs.name == 'model_config': 717 ↛ 718line 717 didn't jump to line 718 because the condition on line 717 was never true1a

718 return None 

719 

720 sym = cls.info.names.get(lhs.name) 1a

721 if sym is None: # pragma: no cover 1a

722 # This is likely due to a star import (see the dataclasses plugin for a more detailed explanation) 

723 # This is the same logic used in the dataclasses plugin 

724 return None 

725 

726 node = sym.node 1a

727 if isinstance(node, PlaceholderNode): # pragma: no cover 1a

728 # See the PlaceholderNode docstring for more detail about how this can occur 

729 # Basically, it is an edge case when dealing with complex import logic 

730 

731 # The dataclasses plugin now asserts this cannot happen, but I'd rather not error if it does.. 

732 return None 

733 

734 if isinstance(node, TypeAlias): 1a

735 self._api.fail( 

736 'Type aliases inside BaseModel definitions are not supported at runtime', 

737 node, 

738 ) 

739 # Skip processing this node. This doesn't match the runtime behaviour, 

740 # but the only alternative would be to modify the SymbolTable, 

741 # and it's a little hairy to do that in a plugin. 

742 return None 

743 

744 if not isinstance(node, Var): # pragma: no cover 1a

745 # Don't know if this edge case still happens with the `is_valid_field` check above 

746 # but better safe than sorry 

747 

748 # The dataclasses plugin now asserts this cannot happen, but I'd rather not error if it does.. 

749 return None 

750 

751 # x: ClassVar[int] is not a field 

752 if node.is_classvar: 1a

753 return PydanticModelClassVar(lhs.name) 1a

754 

755 # x: InitVar[int] is not supported in BaseModel 

756 node_type = get_proper_type(node.type) 1a

757 if isinstance(node_type, Instance) and node_type.type.fullname == 'dataclasses.InitVar': 757 ↛ 758line 757 didn't jump to line 758 because the condition on line 757 was never true1a

758 self._api.fail( 

759 'InitVar is not supported in BaseModel', 

760 node, 

761 ) 

762 

763 has_default = self.get_has_default(stmt) 1a

764 strict = self.get_strict(stmt) 1a

765 

766 if sym.type is None and node.is_final and node.is_inferred: 766 ↛ 774line 766 didn't jump to line 774 because the condition on line 766 was never true1a

767 # This follows the logic from the dataclasses plugin. The following comment is taken verbatim: 

768 # 

769 # This is a special case, assignment like x: Final = 42 is classified 

770 # annotated above, but mypy strips the `Final` turning it into x = 42. 

771 # We do not support inferred types in dataclasses, so we can try inferring 

772 # type for simple literals, and otherwise require an explicit type 

773 # argument for Final[...]. 

774 typ = self._api.analyze_simple_literal_type(stmt.rvalue, is_final=True) 

775 if typ: 

776 node.type = typ 

777 else: 

778 self._api.fail( 

779 'Need type argument for Final[...] with non-literal default in BaseModel', 

780 stmt, 

781 ) 

782 node.type = AnyType(TypeOfAny.from_error) 

783 

784 if node.is_final and has_default: 1a

785 # TODO this path should be removed (see https://github.com/pydantic/pydantic/issues/11119) 

786 return PydanticModelClassVar(lhs.name) 1a

787 

788 alias, has_dynamic_alias = self.get_alias_info(stmt) 1a

789 if ( 

790 has_dynamic_alias 

791 and not (model_config.validate_by_name or model_config.populate_by_name) 

792 and self.plugin_config.warn_required_dynamic_aliases 

793 ): 

794 error_required_dynamic_aliases(self._api, stmt) 1a

795 is_frozen = self.is_field_frozen(stmt) 1a

796 

797 init_type = self._infer_dataclass_attr_init_type(sym, lhs.name, stmt) 1a

798 return PydanticModelField( 1a

799 name=lhs.name, 

800 has_dynamic_alias=has_dynamic_alias, 

801 has_default=has_default, 

802 strict=strict, 

803 alias=alias, 

804 is_frozen=is_frozen, 

805 line=stmt.line, 

806 column=stmt.column, 

807 type=init_type, 

808 info=cls.info, 

809 ) 

810 

811 def _infer_dataclass_attr_init_type(self, sym: SymbolTableNode, name: str, context: Context) -> Type | None: 1a

812 """Infer __init__ argument type for an attribute. 

813 

814 In particular, possibly use the signature of __set__. 

815 """ 

816 default = sym.type 1a

817 if sym.implicit: 817 ↛ 818line 817 didn't jump to line 818 because the condition on line 817 was never true1a

818 return default 

819 t = get_proper_type(sym.type) 1a

820 

821 # Perform a simple-minded inference from the signature of __set__, if present. 

822 # We can't use mypy.checkmember here, since this plugin runs before type checking. 

823 # We only support some basic scanerios here, which is hopefully sufficient for 

824 # the vast majority of use cases. 

825 if not isinstance(t, Instance): 1a

826 return default 1a

827 setter = t.type.get('__set__') 1a

828 if setter: 828 ↛ 829line 828 didn't jump to line 829 because the condition on line 828 was never true1a

829 if isinstance(setter.node, FuncDef): 

830 super_info = t.type.get_containing_type_info('__set__') 

831 assert super_info 

832 if setter.type: 

833 setter_type = get_proper_type(map_type_from_supertype(setter.type, t.type, super_info)) 

834 else: 

835 return AnyType(TypeOfAny.unannotated) 

836 if isinstance(setter_type, CallableType) and setter_type.arg_kinds == [ 

837 ARG_POS, 

838 ARG_POS, 

839 ARG_POS, 

840 ]: 

841 return expand_type_by_instance(setter_type.arg_types[2], t) 

842 else: 

843 self._api.fail(f'Unsupported signature for "__set__" in "{t.type.name}"', context) 

844 else: 

845 self._api.fail(f'Unsupported "__set__" in "{t.type.name}"', context) 

846 

847 return default 1a

848 

849 def add_initializer( 1a

850 self, fields: list[PydanticModelField], config: ModelConfigData, is_settings: bool, is_root_model: bool 

851 ) -> None: 

852 """Adds a fields-aware `__init__` method to the class. 

853 

854 The added `__init__` will be annotated with types vs. all `Any` depending on the plugin settings. 

855 """ 

856 if '__init__' in self._cls.info.names and not self._cls.info.names['__init__'].plugin_generated: 1a

857 return # Don't generate an __init__ if one already exists 1a

858 

859 typed = self.plugin_config.init_typed 1a

860 model_strict = bool(config.strict) 1a

861 use_alias = not (config.validate_by_name or config.populate_by_name) and config.validate_by_alias is not False 1a

862 requires_dynamic_aliases = bool(config.has_alias_generator and not config.validate_by_name) 1a

863 args = self.get_field_arguments( 1a

864 fields, 

865 typed=typed, 

866 model_strict=model_strict, 

867 requires_dynamic_aliases=requires_dynamic_aliases, 

868 use_alias=use_alias, 

869 is_settings=is_settings, 

870 is_root_model=is_root_model, 

871 force_typevars_invariant=True, 

872 ) 

873 

874 if is_settings: 1a

875 base_settings_node = self._api.lookup_fully_qualified(BASESETTINGS_FULLNAME).node 1a

876 assert isinstance(base_settings_node, TypeInfo) 1a

877 if '__init__' in base_settings_node.names: 877 ↛ 897line 877 didn't jump to line 897 because the condition on line 877 was always true1a

878 base_settings_init_node = base_settings_node.names['__init__'].node 1a

879 assert isinstance(base_settings_init_node, FuncDef) 1a

880 if base_settings_init_node is not None and base_settings_init_node.type is not None: 880 ↛ 897line 880 didn't jump to line 897 because the condition on line 880 was always true1a

881 func_type = base_settings_init_node.type 1a

882 assert isinstance(func_type, CallableType) 1a

883 for arg_idx, arg_name in enumerate(func_type.arg_names): 1a

884 if arg_name is None or arg_name.startswith('__') or not arg_name.startswith('_'): 1a

885 continue 1a

886 analyzed_variable_type = self._api.anal_type(func_type.arg_types[arg_idx]) 1a

887 if analyzed_variable_type is not None and arg_name == '_cli_settings_source': 1a

888 # _cli_settings_source is defined as CliSettingsSource[Any], and as such 

889 # the Any causes issues with --disallow-any-explicit. As a workaround, change 

890 # the Any type (as if CliSettingsSource was left unparameterized): 

891 analyzed_variable_type = analyzed_variable_type.accept( 1a

892 ChangeExplicitTypeOfAny(TypeOfAny.from_omitted_generics) 

893 ) 

894 variable = Var(arg_name, analyzed_variable_type) 1a

895 args.append(Argument(variable, analyzed_variable_type, None, ARG_OPT)) 1a

896 

897 if not self.should_init_forbid_extra(fields, config): 1a

898 var = Var('kwargs') 1a

899 args.append(Argument(var, AnyType(TypeOfAny.explicit), None, ARG_STAR2)) 1a

900 

901 add_method(self._api, self._cls, '__init__', args=args, return_type=NoneType()) 1a

902 

903 def add_model_construct_method( 1a

904 self, 

905 fields: list[PydanticModelField], 

906 config: ModelConfigData, 

907 is_settings: bool, 

908 is_root_model: bool, 

909 ) -> None: 

910 """Adds a fully typed `model_construct` classmethod to the class. 

911 

912 Similar to the fields-aware __init__ method, but always uses the field names (not aliases), 

913 and does not treat settings fields as optional. 

914 """ 

915 set_str = self._api.named_type(f'{BUILTINS_NAME}.set', [self._api.named_type(f'{BUILTINS_NAME}.str')]) 1a

916 optional_set_str = UnionType([set_str, NoneType()]) 1a

917 fields_set_argument = Argument(Var('_fields_set', optional_set_str), optional_set_str, None, ARG_OPT) 1a

918 with state.strict_optional_set(self._api.options.strict_optional): 1a

919 args = self.get_field_arguments( 1a

920 fields, 

921 typed=True, 

922 model_strict=bool(config.strict), 

923 requires_dynamic_aliases=False, 

924 use_alias=False, 

925 is_settings=is_settings, 

926 is_root_model=is_root_model, 

927 ) 

928 if not self.should_init_forbid_extra(fields, config): 1a

929 var = Var('kwargs') 1a

930 args.append(Argument(var, AnyType(TypeOfAny.explicit), None, ARG_STAR2)) 1a

931 

932 args = args + [fields_set_argument] if is_root_model else [fields_set_argument] + args 1a

933 

934 add_method( 1a

935 self._api, 

936 self._cls, 

937 'model_construct', 

938 args=args, 

939 return_type=fill_typevars(self._cls.info), 

940 is_classmethod=True, 

941 ) 

942 

943 def set_frozen(self, fields: list[PydanticModelField], api: SemanticAnalyzerPluginInterface, frozen: bool) -> None: 1a

944 """Marks all fields as properties so that attempts to set them trigger mypy errors. 

945 

946 This is the same approach used by the attrs and dataclasses plugins. 

947 """ 

948 info = self._cls.info 1a

949 for field in fields: 1a

950 sym_node = info.names.get(field.name) 1a

951 if sym_node is not None: 1a

952 var = sym_node.node 1a

953 if isinstance(var, Var): 953 ↛ 955line 953 didn't jump to line 955 because the condition on line 953 was always true1a

954 var.is_property = frozen or field.is_frozen 1a

955 elif isinstance(var, PlaceholderNode) and not self._api.final_iteration: 

956 # See https://github.com/pydantic/pydantic/issues/5191 to hit this branch for test coverage 

957 self._api.defer() 

958 else: # pragma: no cover 

959 # I don't know whether it's possible to hit this branch, but I've added it for safety 

960 try: 

961 var_str = str(var) 

962 except TypeError: 

963 # This happens for PlaceholderNode; perhaps it will happen for other types in the future.. 

964 var_str = repr(var) 

965 detail = f'sym_node.node: {var_str} (of type {var.__class__})' 

966 error_unexpected_behavior(detail, self._api, self._cls) 

967 else: 

968 var = field.to_var(info, api, use_alias=False) 1a

969 var.info = info 1a

970 var.is_property = frozen 1a

971 var._fullname = info.fullname + '.' + var.name 1a

972 info.names[var.name] = SymbolTableNode(MDEF, var) 1a

973 

974 def get_config_update(self, name: str, arg: Expression, lax_extra: bool = False) -> ModelConfigData | None: 1a

975 """Determines the config update due to a single kwarg in the ConfigDict definition. 

976 

977 Warns if a tracked config attribute is set to a value the plugin doesn't know how to interpret (e.g., an int) 

978 """ 

979 if name not in self.tracked_config_fields: 1a

980 return None 1a

981 if name == 'extra': 1a

982 if isinstance(arg, StrExpr): 1a

983 forbid_extra = arg.value == 'forbid' 1a

984 elif isinstance(arg, MemberExpr): 1a

985 forbid_extra = arg.name == 'forbid' 1a

986 else: 

987 if not lax_extra: 1a

988 # Only emit an error for other types of `arg` (e.g., `NameExpr`, `ConditionalExpr`, etc.) when 

989 # reading from a config class, etc. If a ConfigDict is used, then we don't want to emit an error 

990 # because you'll get type checking from the ConfigDict itself. 

991 # 

992 # It would be nice if we could introspect the types better otherwise, but I don't know what the API 

993 # is to evaluate an expr into its type and then check if that type is compatible with the expected 

994 # type. Note that you can still get proper type checking via: `model_config = ConfigDict(...)`, just 

995 # if you don't use an explicit string, the plugin won't be able to infer whether extra is forbidden. 

996 error_invalid_config_value(name, self._api, arg) 1a

997 return None 1a

998 return ModelConfigData(forbid_extra=forbid_extra) 1a

999 if name == 'alias_generator': 1a

1000 has_alias_generator = True 1a

1001 if isinstance(arg, NameExpr) and arg.fullname == 'builtins.None': 1a

1002 has_alias_generator = False 1a

1003 return ModelConfigData(has_alias_generator=has_alias_generator) 1a

1004 if isinstance(arg, NameExpr) and arg.fullname in ('builtins.True', 'builtins.False'): 1a

1005 return ModelConfigData(**{name: arg.fullname == 'builtins.True'}) 1a

1006 error_invalid_config_value(name, self._api, arg) 1a

1007 return None 1a

1008 

1009 @staticmethod 1a

1010 def get_has_default(stmt: AssignmentStmt) -> bool: 1a

1011 """Returns a boolean indicating whether the field defined in `stmt` is a required field.""" 

1012 expr = stmt.rvalue 1a

1013 if isinstance(expr, TempNode): 1a

1014 # TempNode means annotation-only, so has no default 

1015 return False 1a

1016 if isinstance(expr, CallExpr) and isinstance(expr.callee, RefExpr) and expr.callee.fullname == FIELD_FULLNAME: 1a

1017 # The "default value" is a call to `Field`; at this point, the field has a default if and only if: 

1018 # * there is a positional argument that is not `...` 

1019 # * there is a keyword argument named "default" that is not `...` 

1020 # * there is a "default_factory" that is not `None` 

1021 for arg, name in zip(expr.args, expr.arg_names): 1a

1022 # If name is None, then this arg is the default because it is the only positional argument. 

1023 if name is None or name == 'default': 1a

1024 return arg.__class__ is not EllipsisExpr 1a

1025 if name == 'default_factory': 1a

1026 return not (isinstance(arg, NameExpr) and arg.fullname == 'builtins.None') 1a

1027 return False 1a

1028 # Has no default if the "default value" is Ellipsis (i.e., `field_name: Annotation = ...`) 

1029 return not isinstance(expr, EllipsisExpr) 1a

1030 

1031 @staticmethod 1a

1032 def get_strict(stmt: AssignmentStmt) -> bool | None: 1a

1033 """Returns a the `strict` value of a field if defined, otherwise `None`.""" 

1034 expr = stmt.rvalue 1a

1035 if isinstance(expr, CallExpr) and isinstance(expr.callee, RefExpr) and expr.callee.fullname == FIELD_FULLNAME: 1a

1036 for arg, name in zip(expr.args, expr.arg_names): 1a

1037 if name != 'strict': 1a

1038 continue 1a

1039 if isinstance(arg, NameExpr): 1039 ↛ 1044line 1039 didn't jump to line 1044 because the condition on line 1039 was always true1a

1040 if arg.fullname == 'builtins.True': 1a

1041 return True 1a

1042 elif arg.fullname == 'builtins.False': 1042 ↛ 1044line 1042 didn't jump to line 1044 because the condition on line 1042 was always true1a

1043 return False 1a

1044 return None 

1045 return None 1a

1046 

1047 @staticmethod 1a

1048 def get_alias_info(stmt: AssignmentStmt) -> tuple[str | None, bool]: 1a

1049 """Returns a pair (alias, has_dynamic_alias), extracted from the declaration of the field defined in `stmt`. 

1050 

1051 `has_dynamic_alias` is True if and only if an alias is provided, but not as a string literal. 

1052 If `has_dynamic_alias` is True, `alias` will be None. 

1053 """ 

1054 expr = stmt.rvalue 1a

1055 if isinstance(expr, TempNode): 1a

1056 # TempNode means annotation-only 

1057 return None, False 1a

1058 

1059 if not ( 

1060 isinstance(expr, CallExpr) and isinstance(expr.callee, RefExpr) and expr.callee.fullname == FIELD_FULLNAME 

1061 ): 

1062 # Assigned value is not a call to pydantic.fields.Field 

1063 return None, False 1a

1064 

1065 if 'validation_alias' in expr.arg_names: 1a

1066 arg = expr.args[expr.arg_names.index('validation_alias')] 1a

1067 elif 'alias' in expr.arg_names: 1a

1068 arg = expr.args[expr.arg_names.index('alias')] 1a

1069 else: 

1070 return None, False 1a

1071 

1072 if isinstance(arg, StrExpr): 1a

1073 return arg.value, False 1a

1074 else: 

1075 return None, True 1a

1076 

1077 @staticmethod 1a

1078 def is_field_frozen(stmt: AssignmentStmt) -> bool: 1a

1079 """Returns whether the field is frozen, extracted from the declaration of the field defined in `stmt`. 

1080 

1081 Note that this is only whether the field was declared to be frozen in a `<field_name> = Field(frozen=True)` 

1082 sense; this does not determine whether the field is frozen because the entire model is frozen; that is 

1083 handled separately. 

1084 """ 

1085 expr = stmt.rvalue 1a

1086 if isinstance(expr, TempNode): 1a

1087 # TempNode means annotation-only 

1088 return False 1a

1089 

1090 if not ( 

1091 isinstance(expr, CallExpr) and isinstance(expr.callee, RefExpr) and expr.callee.fullname == FIELD_FULLNAME 

1092 ): 

1093 # Assigned value is not a call to pydantic.fields.Field 

1094 return False 1a

1095 

1096 for i, arg_name in enumerate(expr.arg_names): 1a

1097 if arg_name == 'frozen': 1a

1098 arg = expr.args[i] 1a

1099 return isinstance(arg, NameExpr) and arg.fullname == 'builtins.True' 1a

1100 return False 1a

1101 

1102 def get_field_arguments( 1a

1103 self, 

1104 fields: list[PydanticModelField], 

1105 typed: bool, 

1106 model_strict: bool, 

1107 use_alias: bool, 

1108 requires_dynamic_aliases: bool, 

1109 is_settings: bool, 

1110 is_root_model: bool, 

1111 force_typevars_invariant: bool = False, 

1112 ) -> list[Argument]: 

1113 """Helper function used during the construction of the `__init__` and `model_construct` method signatures. 

1114 

1115 Returns a list of mypy Argument instances for use in the generated signatures. 

1116 """ 

1117 info = self._cls.info 1a

1118 arguments = [ 1a

1119 field.to_argument( 

1120 info, 

1121 typed=typed, 

1122 model_strict=model_strict, 

1123 force_optional=requires_dynamic_aliases or is_settings, 

1124 use_alias=use_alias, 

1125 api=self._api, 

1126 force_typevars_invariant=force_typevars_invariant, 

1127 is_root_model_root=is_root_model and field.name == 'root', 

1128 ) 

1129 for field in fields 

1130 if not (use_alias and field.has_dynamic_alias) 

1131 ] 

1132 return arguments 1a

1133 

1134 def should_init_forbid_extra(self, fields: list[PydanticModelField], config: ModelConfigData) -> bool: 1a

1135 """Indicates whether the generated `__init__` should get a `**kwargs` at the end of its signature. 

1136 

1137 We disallow arbitrary kwargs if the extra config setting is "forbid", or if the plugin config says to, 

1138 *unless* a required dynamic alias is present (since then we can't determine a valid signature). 

1139 """ 

1140 if not (config.validate_by_name or config.populate_by_name): 1a

1141 if self.is_dynamic_alias_present(fields, bool(config.has_alias_generator)): 1a

1142 return False 1a

1143 if config.forbid_extra: 1a

1144 return True 1a

1145 return self.plugin_config.init_forbid_extra 1a

1146 

1147 @staticmethod 1a

1148 def is_dynamic_alias_present(fields: list[PydanticModelField], has_alias_generator: bool) -> bool: 1a

1149 """Returns whether any fields on the model have a "dynamic alias", i.e., an alias that cannot be 

1150 determined during static analysis. 

1151 """ 

1152 for field in fields: 1a

1153 if field.has_dynamic_alias: 1a

1154 return True 1a

1155 if has_alias_generator: 1a

1156 for field in fields: 1a

1157 if field.alias is None: 1a

1158 return True 1a

1159 return False 1a

1160 

1161 

1162class ChangeExplicitTypeOfAny(TypeTranslator): 1a

1163 """A type translator used to change type of Any's, if explicit.""" 

1164 

1165 def __init__(self, type_of_any: int) -> None: 1a

1166 self._type_of_any = type_of_any 1a

1167 super().__init__() 1a

1168 

1169 def visit_any(self, t: AnyType) -> Type: # noqa: D102 1a

1170 if t.type_of_any == TypeOfAny.explicit: 1170 ↛ 1173line 1170 didn't jump to line 1173 because the condition on line 1170 was always true1a

1171 return t.copy_modified(type_of_any=self._type_of_any) 1a

1172 else: 

1173 return t 

1174 

1175 

1176class ModelConfigData: 1a

1177 """Pydantic mypy plugin model config class.""" 

1178 

1179 def __init__( 1a

1180 self, 

1181 forbid_extra: bool | None = None, 

1182 frozen: bool | None = None, 

1183 from_attributes: bool | None = None, 

1184 populate_by_name: bool | None = None, 

1185 validate_by_alias: bool | None = None, 

1186 validate_by_name: bool | None = None, 

1187 has_alias_generator: bool | None = None, 

1188 strict: bool | None = None, 

1189 ): 

1190 self.forbid_extra = forbid_extra 1a

1191 self.frozen = frozen 1a

1192 self.from_attributes = from_attributes 1a

1193 self.populate_by_name = populate_by_name 1a

1194 self.validate_by_alias = validate_by_alias 1a

1195 self.validate_by_name = validate_by_name 1a

1196 self.has_alias_generator = has_alias_generator 1a

1197 self.strict = strict 1a

1198 

1199 def get_values_dict(self) -> dict[str, Any]: 1a

1200 """Returns a dict of Pydantic model config names to their values. 

1201 

1202 It includes the config if config value is not `None`. 

1203 """ 

1204 return {k: v for k, v in self.__dict__.items() if v is not None} 1a

1205 

1206 def update(self, config: ModelConfigData | None) -> None: 1a

1207 """Update Pydantic model config values.""" 

1208 if config is None: 1a

1209 return 1a

1210 for k, v in config.get_values_dict().items(): 1a

1211 setattr(self, k, v) 1a

1212 

1213 def setdefault(self, key: str, value: Any) -> None: 1a

1214 """Set default value for Pydantic model config if config value is `None`.""" 

1215 if getattr(self, key) is None: 1a

1216 setattr(self, key, value) 1a

1217 

1218 

1219def is_root_model(info: TypeInfo) -> bool: 1a

1220 """Return whether the type info is a root model subclass (or the `RootModel` class itself).""" 

1221 return info.has_base(ROOT_MODEL_FULLNAME) 1a

1222 

1223 

1224ERROR_ORM = ErrorCode('pydantic-orm', 'Invalid from_attributes call', 'Pydantic') 1a

1225ERROR_CONFIG = ErrorCode('pydantic-config', 'Invalid config value', 'Pydantic') 1a

1226ERROR_ALIAS = ErrorCode('pydantic-alias', 'Dynamic alias disallowed', 'Pydantic') 1a

1227ERROR_UNEXPECTED = ErrorCode('pydantic-unexpected', 'Unexpected behavior', 'Pydantic') 1a

1228ERROR_UNTYPED = ErrorCode('pydantic-field', 'Untyped field disallowed', 'Pydantic') 1a

1229ERROR_FIELD_DEFAULTS = ErrorCode('pydantic-field', 'Invalid Field defaults', 'Pydantic') 1a

1230ERROR_EXTRA_FIELD_ROOT_MODEL = ErrorCode('pydantic-field', 'Extra field on RootModel subclass', 'Pydantic') 1a

1231 

1232 

1233def error_from_attributes(model_name: str, api: CheckerPluginInterface, context: Context) -> None: 1a

1234 """Emits an error when the model does not have `from_attributes=True`.""" 

1235 api.fail(f'"{model_name}" does not have from_attributes=True', context, code=ERROR_ORM) 1a

1236 

1237 

1238def error_invalid_config_value(name: str, api: SemanticAnalyzerPluginInterface, context: Context) -> None: 1a

1239 """Emits an error when the config value is invalid.""" 

1240 api.fail(f'Invalid value for "Config.{name}"', context, code=ERROR_CONFIG) 1a

1241 

1242 

1243def error_required_dynamic_aliases(api: SemanticAnalyzerPluginInterface, context: Context) -> None: 1a

1244 """Emits required dynamic aliases error. 

1245 

1246 This will be called when `warn_required_dynamic_aliases=True`. 

1247 """ 

1248 api.fail('Required dynamic aliases disallowed', context, code=ERROR_ALIAS) 1a

1249 

1250 

1251def error_unexpected_behavior( 1a

1252 detail: str, api: CheckerPluginInterface | SemanticAnalyzerPluginInterface, context: Context 1a

1253) -> None: # pragma: no cover 1a

1254 """Emits unexpected behavior error.""" 

1255 # Can't think of a good way to test this, but I confirmed it renders as desired by adding to a non-error path 

1256 link = 'https://github.com/pydantic/pydantic/issues/new/choose' 

1257 full_message = f'The pydantic mypy plugin ran into unexpected behavior: {detail}\n' 

1258 full_message += f'Please consider reporting this bug at {link} so we can try to fix it!' 

1259 api.fail(full_message, context, code=ERROR_UNEXPECTED) 

1260 

1261 

1262def error_untyped_fields(api: SemanticAnalyzerPluginInterface, context: Context) -> None: 1a

1263 """Emits an error when there is an untyped field in the model.""" 

1264 api.fail('Untyped fields disallowed', context, code=ERROR_UNTYPED) 1a

1265 

1266 

1267def error_extra_fields_on_root_model(api: CheckerPluginInterface, context: Context) -> None: 1a

1268 """Emits an error when there is more than just a root field defined for a subclass of RootModel.""" 

1269 api.fail('Only `root` is allowed as a field of a `RootModel`', context, code=ERROR_EXTRA_FIELD_ROOT_MODEL) 1a

1270 

1271 

1272def add_method( 1a

1273 api: SemanticAnalyzerPluginInterface | CheckerPluginInterface, 

1274 cls: ClassDef, 

1275 name: str, 

1276 args: list[Argument], 

1277 return_type: Type, 

1278 self_type: Type | None = None, 

1279 tvar_def: TypeVarType | None = None, 

1280 is_classmethod: bool = False, 

1281) -> None: 

1282 """Very closely related to `mypy.plugins.common.add_method_to_class`, with a few pydantic-specific changes.""" 

1283 info = cls.info 1a

1284 

1285 # First remove any previously generated methods with the same name 

1286 # to avoid clashes and problems in the semantic analyzer. 

1287 if name in info.names: 1a

1288 sym = info.names[name] 1a

1289 if sym.plugin_generated and isinstance(sym.node, FuncDef): 1a

1290 cls.defs.body.remove(sym.node) # pragma: no cover 1a

1291 

1292 if isinstance(api, SemanticAnalyzerPluginInterface): 1292 ↛ 1295line 1292 didn't jump to line 1295 because the condition on line 1292 was always true1a

1293 function_type = api.named_type('builtins.function') 1a

1294 else: 

1295 function_type = api.named_generic_type('builtins.function', []) 

1296 

1297 if is_classmethod: 1a

1298 self_type = self_type or TypeType(fill_typevars(info)) 1a

1299 first = [Argument(Var('_cls'), self_type, None, ARG_POS, True)] 1a

1300 else: 

1301 self_type = self_type or fill_typevars(info) 1a

1302 # `self` is positional *ONLY* here, but this can't be expressed 

1303 # fully in the mypy internal API. ARG_POS is the closest we can get. 

1304 # Using ARG_POS will, however, give mypy errors if a `self` field 

1305 # is present on a model: 

1306 # 

1307 # Name "self" already defined (possibly by an import) [no-redef] 

1308 # 

1309 # As a workaround, we give this argument a name that will 

1310 # never conflict. By its positional nature, this name will not 

1311 # be used or exposed to users. 

1312 first = [Argument(Var('__pydantic_self__'), self_type, None, ARG_POS)] 1a

1313 args = first + args 1a

1314 

1315 arg_types, arg_names, arg_kinds = [], [], [] 1a

1316 for arg in args: 1a

1317 assert arg.type_annotation, 'All arguments must be fully typed.' 1a

1318 arg_types.append(arg.type_annotation) 1a

1319 arg_names.append(arg.variable.name) 1a

1320 arg_kinds.append(arg.kind) 1a

1321 

1322 signature = CallableType(arg_types, arg_kinds, arg_names, return_type, function_type) 1a

1323 if tvar_def: 1323 ↛ 1324line 1323 didn't jump to line 1324 because the condition on line 1323 was never true1a

1324 signature.variables = [tvar_def] 

1325 

1326 func = FuncDef(name, args, Block([PassStmt()])) 1a

1327 func.info = info 1a

1328 func.type = set_callable_name(signature, func) 1a

1329 func.is_class = is_classmethod 1a

1330 func._fullname = info.fullname + '.' + name 1a

1331 func.line = info.line 1a

1332 

1333 # NOTE: we would like the plugin generated node to dominate, but we still 

1334 # need to keep any existing definitions so they get semantically analyzed. 

1335 if name in info.names: 1a

1336 # Get a nice unique name instead. 

1337 r_name = get_unique_redefinition_name(name, info.names) 1a

1338 info.names[r_name] = info.names[name] 1a

1339 

1340 # Add decorator for is_classmethod 

1341 # The dataclasses plugin claims this is unnecessary for classmethods, but not including it results in a 

1342 # signature incompatible with the superclass, which causes mypy errors to occur for every subclass of BaseModel. 

1343 if is_classmethod: 1a

1344 func.is_decorated = True 1a

1345 v = Var(name, func.type) 1a

1346 v.info = info 1a

1347 v._fullname = func._fullname 1a

1348 v.is_classmethod = True 1a

1349 dec = Decorator(func, [NameExpr('classmethod')], v) 1a

1350 dec.line = info.line 1a

1351 sym = SymbolTableNode(MDEF, dec) 1a

1352 else: 

1353 sym = SymbolTableNode(MDEF, func) 1a

1354 sym.plugin_generated = True 1a

1355 info.names[name] = sym 1a

1356 

1357 info.defn.defs.body.append(func) 1a

1358 

1359 

1360def parse_toml(config_file: str) -> dict[str, Any] | None: 1a

1361 """Returns a dict of config keys to values. 

1362 

1363 It reads configs from toml file and returns `None` if the file is not a toml file. 

1364 """ 

1365 if not config_file.endswith('.toml'): 1a

1366 return None 1a

1367 

1368 if sys.version_info >= (3, 11): 1368 ↛ 1371line 1368 didn't jump to line 1371 because the condition on line 1368 was always true1a

1369 import tomllib as toml_ 1a

1370 else: 

1371 try: 

1372 import tomli as toml_ 

1373 except ImportError: # pragma: no cover 

1374 import warnings 

1375 

1376 warnings.warn('No TOML parser installed, cannot read configuration from `pyproject.toml`.') 

1377 return None 

1378 

1379 with open(config_file, 'rb') as rf: 1a

1380 return toml_.load(rf) 1a