dataclasses.py 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290
  1. """Provide an enhanced dataclass that performs validation."""
  2. from __future__ import annotations as _annotations
  3. import dataclasses
  4. import sys
  5. import types
  6. from typing import TYPE_CHECKING, Any, Callable, Generic, NoReturn, TypeVar, overload
  7. from typing_extensions import Literal, TypeGuard, dataclass_transform
  8. from ._internal import _config, _decorators, _typing_extra
  9. from ._internal import _dataclasses as _pydantic_dataclasses
  10. from ._migration import getattr_migration
  11. from .config import ConfigDict
  12. from .fields import Field
  13. if TYPE_CHECKING:
  14. from ._internal._dataclasses import PydanticDataclass
  15. __all__ = 'dataclass', 'rebuild_dataclass'
  16. _T = TypeVar('_T')
  17. if sys.version_info >= (3, 10):
  18. @dataclass_transform(field_specifiers=(dataclasses.field, Field))
  19. @overload
  20. def dataclass(
  21. *,
  22. init: Literal[False] = False,
  23. repr: bool = True,
  24. eq: bool = True,
  25. order: bool = False,
  26. unsafe_hash: bool = False,
  27. frozen: bool = False,
  28. config: ConfigDict | type[object] | None = None,
  29. validate_on_init: bool | None = None,
  30. kw_only: bool = ...,
  31. slots: bool = ...,
  32. ) -> Callable[[type[_T]], type[PydanticDataclass]]: # type: ignore
  33. ...
  34. @dataclass_transform(field_specifiers=(dataclasses.field, Field))
  35. @overload
  36. def dataclass(
  37. _cls: type[_T], # type: ignore
  38. *,
  39. init: Literal[False] = False,
  40. repr: bool = True,
  41. eq: bool = True,
  42. order: bool = False,
  43. unsafe_hash: bool = False,
  44. frozen: bool = False,
  45. config: ConfigDict | type[object] | None = None,
  46. validate_on_init: bool | None = None,
  47. kw_only: bool = ...,
  48. slots: bool = ...,
  49. ) -> type[PydanticDataclass]:
  50. ...
  51. else:
  52. @dataclass_transform(field_specifiers=(dataclasses.field, Field))
  53. @overload
  54. def dataclass(
  55. *,
  56. init: Literal[False] = False,
  57. repr: bool = True,
  58. eq: bool = True,
  59. order: bool = False,
  60. unsafe_hash: bool = False,
  61. frozen: bool = False,
  62. config: ConfigDict | type[object] | None = None,
  63. validate_on_init: bool | None = None,
  64. ) -> Callable[[type[_T]], type[PydanticDataclass]]: # type: ignore
  65. ...
  66. @dataclass_transform(field_specifiers=(dataclasses.field, Field))
  67. @overload
  68. def dataclass(
  69. _cls: type[_T], # type: ignore
  70. *,
  71. init: Literal[False] = False,
  72. repr: bool = True,
  73. eq: bool = True,
  74. order: bool = False,
  75. unsafe_hash: bool = False,
  76. frozen: bool = False,
  77. config: ConfigDict | type[object] | None = None,
  78. validate_on_init: bool | None = None,
  79. ) -> type[PydanticDataclass]:
  80. ...
  81. @dataclass_transform(field_specifiers=(dataclasses.field, Field))
  82. def dataclass(
  83. _cls: type[_T] | None = None,
  84. *,
  85. init: Literal[False] = False,
  86. repr: bool = True,
  87. eq: bool = True,
  88. order: bool = False,
  89. unsafe_hash: bool = False,
  90. frozen: bool = False,
  91. config: ConfigDict | type[object] | None = None,
  92. validate_on_init: bool | None = None,
  93. kw_only: bool = False,
  94. slots: bool = False,
  95. ) -> Callable[[type[_T]], type[PydanticDataclass]] | type[PydanticDataclass]:
  96. """Usage docs: https://docs.pydantic.dev/2.4/concepts/dataclasses/
  97. A decorator used to create a Pydantic-enhanced dataclass, similar to the standard Python `dataclass`,
  98. but with added validation.
  99. This function should be used similarly to `dataclasses.dataclass`.
  100. Args:
  101. _cls: The target `dataclass`.
  102. init: Included for signature compatibility with `dataclasses.dataclass`, and is passed through to
  103. `dataclasses.dataclass` when appropriate. If specified, must be set to `False`, as pydantic inserts its
  104. own `__init__` function.
  105. repr: A boolean indicating whether or not to include the field in the `__repr__` output.
  106. eq: Determines if a `__eq__` should be generated for the class.
  107. order: Determines if comparison magic methods should be generated, such as `__lt__`, but not `__eq__`.
  108. unsafe_hash: Determines if an unsafe hashing function should be included in the class.
  109. frozen: Determines if the generated class should be a 'frozen' `dataclass`, which does not allow its
  110. attributes to be modified from its constructor.
  111. config: A configuration for the `dataclass` generation.
  112. validate_on_init: A deprecated parameter included for backwards compatibility; in V2, all Pydantic dataclasses
  113. are validated on init.
  114. kw_only: Determines if `__init__` method parameters must be specified by keyword only. Defaults to `False`.
  115. slots: Determines if the generated class should be a 'slots' `dataclass`, which does not allow the addition of
  116. new attributes after instantiation.
  117. Returns:
  118. A decorator that accepts a class as its argument and returns a Pydantic `dataclass`.
  119. Raises:
  120. AssertionError: Raised if `init` is not `False` or `validate_on_init` is `False`.
  121. """
  122. assert init is False, 'pydantic.dataclasses.dataclass only supports init=False'
  123. assert validate_on_init is not False, 'validate_on_init=False is no longer supported'
  124. if sys.version_info >= (3, 10):
  125. kwargs = dict(kw_only=kw_only, slots=slots)
  126. else:
  127. kwargs = {}
  128. def create_dataclass(cls: type[Any]) -> type[PydanticDataclass]:
  129. """Create a Pydantic dataclass from a regular dataclass.
  130. Args:
  131. cls: The class to create the Pydantic dataclass from.
  132. Returns:
  133. A Pydantic dataclass.
  134. """
  135. original_cls = cls
  136. config_dict = config
  137. if config_dict is None:
  138. # if not explicitly provided, read from the type
  139. cls_config = getattr(cls, '__pydantic_config__', None)
  140. if cls_config is not None:
  141. config_dict = cls_config
  142. config_wrapper = _config.ConfigWrapper(config_dict)
  143. decorators = _decorators.DecoratorInfos.build(cls)
  144. # Keep track of the original __doc__ so that we can restore it after applying the dataclasses decorator
  145. # Otherwise, classes with no __doc__ will have their signature added into the JSON schema description,
  146. # since dataclasses.dataclass will set this as the __doc__
  147. original_doc = cls.__doc__
  148. if _pydantic_dataclasses.is_builtin_dataclass(cls):
  149. # Don't preserve the docstring for vanilla dataclasses, as it may include the signature
  150. # This matches v1 behavior, and there was an explicit test for it
  151. original_doc = None
  152. # We don't want to add validation to the existing std lib dataclass, so we will subclass it
  153. # If the class is generic, we need to make sure the subclass also inherits from Generic
  154. # with all the same parameters.
  155. bases = (cls,)
  156. if issubclass(cls, Generic):
  157. generic_base = Generic[cls.__parameters__] # type: ignore
  158. bases = bases + (generic_base,)
  159. cls = types.new_class(cls.__name__, bases)
  160. cls = dataclasses.dataclass( # type: ignore[call-overload]
  161. cls,
  162. # the value of init here doesn't affect anything except that it makes it easier to generate a signature
  163. init=True,
  164. repr=repr,
  165. eq=eq,
  166. order=order,
  167. unsafe_hash=unsafe_hash,
  168. frozen=frozen,
  169. **kwargs,
  170. )
  171. cls.__pydantic_decorators__ = decorators # type: ignore
  172. cls.__doc__ = original_doc
  173. cls.__module__ = original_cls.__module__
  174. cls.__qualname__ = original_cls.__qualname__
  175. pydantic_complete = _pydantic_dataclasses.complete_dataclass(
  176. cls, config_wrapper, raise_errors=False, types_namespace=None
  177. )
  178. cls.__pydantic_complete__ = pydantic_complete # type: ignore
  179. return cls
  180. if _cls is None:
  181. return create_dataclass
  182. return create_dataclass(_cls)
  183. __getattr__ = getattr_migration(__name__)
  184. if (3, 8) <= sys.version_info < (3, 11):
  185. # Monkeypatch dataclasses.InitVar so that typing doesn't error if it occurs as a type when evaluating type hints
  186. # Starting in 3.11, typing.get_type_hints will not raise an error if the retrieved type hints are not callable.
  187. def _call_initvar(*args: Any, **kwargs: Any) -> NoReturn:
  188. """This function does nothing but raise an error that is as similar as possible to what you'd get
  189. if you were to try calling `InitVar[int]()` without this monkeypatch. The whole purpose is just
  190. to ensure typing._type_check does not error if the type hint evaluates to `InitVar[<parameter>]`.
  191. """
  192. raise TypeError("'InitVar' object is not callable")
  193. dataclasses.InitVar.__call__ = _call_initvar
  194. def rebuild_dataclass(
  195. cls: type[PydanticDataclass],
  196. *,
  197. force: bool = False,
  198. raise_errors: bool = True,
  199. _parent_namespace_depth: int = 2,
  200. _types_namespace: dict[str, Any] | None = None,
  201. ) -> bool | None:
  202. """Try to rebuild the pydantic-core schema for the dataclass.
  203. This may be necessary when one of the annotations is a ForwardRef which could not be resolved during
  204. the initial attempt to build the schema, and automatic rebuilding fails.
  205. This is analogous to `BaseModel.model_rebuild`.
  206. Args:
  207. cls: The class to build the dataclass core schema for.
  208. force: Whether to force the rebuilding of the model schema, defaults to `False`.
  209. raise_errors: Whether to raise errors, defaults to `True`.
  210. _parent_namespace_depth: The depth level of the parent namespace, defaults to 2.
  211. _types_namespace: The types namespace, defaults to `None`.
  212. Returns:
  213. Returns `None` if the schema is already "complete" and rebuilding was not required.
  214. If rebuilding _was_ required, returns `True` if rebuilding was successful, otherwise `False`.
  215. """
  216. if not force and cls.__pydantic_complete__:
  217. return None
  218. else:
  219. if _types_namespace is not None:
  220. types_namespace: dict[str, Any] | None = _types_namespace.copy()
  221. else:
  222. if _parent_namespace_depth > 0:
  223. frame_parent_ns = _typing_extra.parent_frame_namespace(parent_depth=_parent_namespace_depth) or {}
  224. # Note: we may need to add something similar to cls.__pydantic_parent_namespace__ from BaseModel
  225. # here when implementing handling of recursive generics. See BaseModel.model_rebuild for reference.
  226. types_namespace = frame_parent_ns
  227. else:
  228. types_namespace = {}
  229. types_namespace = _typing_extra.get_cls_types_namespace(cls, types_namespace)
  230. return _pydantic_dataclasses.complete_dataclass(
  231. cls,
  232. _config.ConfigWrapper(cls.__pydantic_config__, check=False),
  233. raise_errors=raise_errors,
  234. types_namespace=types_namespace,
  235. )
  236. def is_pydantic_dataclass(__cls: type[Any]) -> TypeGuard[type[PydanticDataclass]]:
  237. """Whether a class is a pydantic dataclass.
  238. Args:
  239. __cls: The class.
  240. Returns:
  241. `True` if the class is a pydantic dataclass, `False` otherwise.
  242. """
  243. return dataclasses.is_dataclass(__cls) and '__pydantic_validator__' in __cls.__dict__