diff --git a/CITATION.cff b/CITATION.cff index 6ba15db207e..de1294d0666 100644 --- a/CITATION.cff +++ b/CITATION.cff @@ -44,5 +44,5 @@ keywords: - hints - typing license: MIT -version: v2.10.1 -date-released: 2024-11-21 +version: v2.10.3 +date-released: 2024-12-03 diff --git a/HISTORY.md b/HISTORY.md index e114f0f90e0..3ca97b58c09 100644 --- a/HISTORY.md +++ b/HISTORY.md @@ -1,3 +1,33 @@ +## v2.10.3 (2024-12-03) + +[GitHub release](https://github.com/pydantic/pydantic/releases/tag/v2.10.3) + +### What's Changed + +#### Fixes + +* Set fields when `defer_build` is set on Pydantic dataclasses by @Viicos in [#10984](https://github.com/pydantic/pydantic/pull/10984) +* Do not resolve the JSON Schema reference for `dict` core schema keys by @Viicos in [#10989](https://github.com/pydantic/pydantic/pull/10989) +* Use the globals of the function when evaluating the return type for `PlainSerializer` and `WrapSerializer` functions by @Viicos in [#11008](https://github.com/pydantic/pydantic/pull/11008) +* Fix host required enforcement for urls to be compatible with v2.9 behavior by @sydney-runkle in [#11027](https://github.com/pydantic/pydantic/pull/11027) +* Add a `default_factory_takes_validated_data` property to `FieldInfo` by @Viicos in [#11034](https://github.com/pydantic/pydantic/pull/11034) +* Fix url json schema in `serialization` mode by @sydney-runkle in [#11035](https://github.com/pydantic/pydantic/pull/11035) + +## v2.10.2 (2024-11-25) + +[GitHub release](https://github.com/pydantic/pydantic/releases/tag/v2.10.2) + +### What's Changed + +#### Fixes + +* Only evaluate FieldInfo annotations if required during schema building by @Viicos in [#10769](https://github.com/pydantic/pydantic/pull/10769) +* Do not evaluate annotations for private fields by @Viicos in [#10962](https://github.com/pydantic/pydantic/pull/10962) +* Support serialization as any for `Secret` types and `Url` types by @sydney-runkle in [#10947](https://github.com/pydantic/pydantic/pull/10947) +* Fix type hint of `Field.default` to be compatible with Python 3.8 and 3.9 by @Viicos in [#10972](https://github.com/pydantic/pydantic/pull/10972) +* Add hashing support for URL types by @sydney-runkle in [#10975](https://github.com/pydantic/pydantic/pull/10975) +* Hide `BaseModel.__replace__` definition from type checkers by @Viicos in [10979](https://github.com/pydantic/pydantic/pull/10979) + ## v2.10.1 (2024-11-21) [GitHub release](https://github.com/pydantic/pydantic/releases/tag/v2.10.1) diff --git a/pydantic/_internal/_dataclasses.py b/pydantic/_internal/_dataclasses.py index 208220ccbad..f2e6f22fa05 100644 --- a/pydantic/_internal/_dataclasses.py +++ b/pydantic/_internal/_dataclasses.py @@ -125,6 +125,8 @@ def __init__(__dataclass_self__: PydanticDataclass, *args: Any, **kwargs: Any) - cls.__init__ = __init__ # type: ignore cls.__pydantic_config__ = config_wrapper.config_dict # type: ignore + set_dataclass_fields(cls, ns_resolver, config_wrapper=config_wrapper) + if not _force_build and config_wrapper.defer_build: set_dataclass_mocks(cls, cls.__name__) return False @@ -134,8 +136,6 @@ def __init__(__dataclass_self__: PydanticDataclass, *args: Any, **kwargs: Any) - 'Support for `__post_init_post_parse__` has been dropped, the method will not be called', DeprecationWarning ) - set_dataclass_fields(cls, ns_resolver, config_wrapper=config_wrapper) - typevars_map = get_standard_typevars_map(cls) gen_schema = GenerateSchema( config_wrapper, @@ -143,14 +143,15 @@ def __init__(__dataclass_self__: PydanticDataclass, *args: Any, **kwargs: Any) - typevars_map=typevars_map, ) - # set __signature__ attr only for model class, but not for its instances + # set __signature__ attr only for the class, but not for its instances # (because instances can define `__call__`, and `inspect.signature` shouldn't # use the `__signature__` attribute and instead generate from `__call__`). cls.__signature__ = LazyClassAttribute( '__signature__', partial( generate_pydantic_signature, - # It's' important that we reference the original_init here + # It's important that we reference the `original_init` here, + # as it is the one synthesized by the stdlib `dataclass` module: init=original_init, fields=cls.__pydantic_fields__, # type: ignore populate_by_name=config_wrapper.populate_by_name, diff --git a/pydantic/_internal/_fields.py b/pydantic/_internal/_fields.py index 688b69d65b1..5c760abc292 100644 --- a/pydantic/_internal/_fields.py +++ b/pydantic/_internal/_fields.py @@ -109,7 +109,7 @@ def collect_model_fields( # noqa: C901 if model_fields := getattr(base, '__pydantic_fields__', None): parent_fields_lookup.update(model_fields) - type_hints = _typing_extra.get_cls_type_hints(cls, ns_resolver=ns_resolver, lenient=True) + type_hints = _typing_extra.get_model_type_hints(cls, ns_resolver=ns_resolver) # https://docs.python.org/3/howto/annotations.html#accessing-the-annotations-dict-of-an-object-in-python-3-9-and-older # annotations is only used for finding fields in parent classes @@ -117,7 +117,7 @@ def collect_model_fields( # noqa: C901 fields: dict[str, FieldInfo] = {} class_vars: set[str] = set() - for ann_name, ann_type in type_hints.items(): + for ann_name, (ann_type, evaluated) in type_hints.items(): if ann_name == 'model_config': # We never want to treat `model_config` as a field # Note: we may need to change this logic if/when we introduce a `BareModel` class with no @@ -202,6 +202,7 @@ def collect_model_fields( # noqa: C901 except AttributeError: if ann_name in annotations: field_info = FieldInfo_.from_annotation(ann_type) + field_info.evaluated = evaluated else: # if field has no default value and is not in __annotations__ this means that it is # defined in a base class and we can take it from there @@ -214,6 +215,7 @@ def collect_model_fields( # noqa: C901 # generated thanks to models not being fully defined while initializing recursive models. # Nothing stops us from just creating a new FieldInfo for this type hint, so we do this. field_info = FieldInfo_.from_annotation(ann_type) + field_info.evaluated = evaluated else: _warn_on_nested_alias_in_annotation(ann_type, ann_name) if isinstance(default, FieldInfo_) and ismethoddescriptor(default.default): @@ -224,6 +226,7 @@ def collect_model_fields( # noqa: C901 default.default = default.default.__get__(None, cls) field_info = FieldInfo_.from_annotated_attribute(ann_type, default) + field_info.evaluated = evaluated # attributes which are fields are removed from the class namespace: # 1. To match the behaviour of annotation-only fields # 2. To avoid false positives in the NameError check above @@ -316,7 +319,7 @@ def collect_dataclass_fields( continue globalns, localns = ns_resolver.types_namespace - ann_type = _typing_extra.eval_type(dataclass_field.type, globalns, localns, lenient=True) + ann_type, _ = _typing_extra.try_eval_type(dataclass_field.type, globalns, localns) if _typing_extra.is_classvar_annotation(ann_type): continue diff --git a/pydantic/_internal/_generate_schema.py b/pydantic/_internal/_generate_schema.py index 31537516d55..ee26d080f62 100644 --- a/pydantic/_internal/_generate_schema.py +++ b/pydantic/_internal/_generate_schema.py @@ -1221,12 +1221,16 @@ def _common_field_schema( # C901 ) -> _CommonField: # Update FieldInfo annotation if appropriate: FieldInfo = import_cached_field_info() - if has_instance_in_type(field_info.annotation, (ForwardRef, str)): - # TODO Can we use field_info.apply_typevars_map here? Shouldn't we use lenient=False? - evaluated = _typing_extra.eval_type(field_info.annotation, *self._types_namespace, lenient=True) - evaluated = replace_types(evaluated, self._typevars_map) - if evaluated is not field_info.annotation and not has_instance_in_type(evaluated, PydanticRecursiveRef): - new_field_info = FieldInfo.from_annotation(evaluated) + if not field_info.evaluated: + # TODO Can we use field_info.apply_typevars_map here? + try: + evaluated_type = _typing_extra.eval_type(field_info.annotation, *self._types_namespace) + except NameError as e: + raise PydanticUndefinedAnnotation.from_name_error(e) from e + evaluated_type = replace_types(evaluated_type, self._typevars_map) + field_info.evaluated = True + if not has_instance_in_type(evaluated_type, PydanticRecursiveRef): + new_field_info = FieldInfo.from_annotation(evaluated_type) field_info.annotation = new_field_info.annotation # Handle any field info attributes that may have been obtained from now-resolved annotations @@ -1344,12 +1348,13 @@ def _type_alias_type_schema(self, obj: TypeAliasType) -> CoreSchema: return maybe_schema origin: TypeAliasType = get_origin(obj) or obj - - annotation = origin.__value__ typevars_map = get_standard_typevars_map(obj) with self._ns_resolver.push(origin): - annotation = _typing_extra.eval_type(annotation, *self._types_namespace, lenient=True) + try: + annotation = _typing_extra.eval_type(origin.__value__, *self._types_namespace) + except NameError as e: + raise PydanticUndefinedAnnotation.from_name_error(e) from e annotation = replace_types(annotation, typevars_map) schema = self.generate_schema(annotation) assert schema['type'] != 'definitions' @@ -1427,9 +1432,7 @@ def _typed_dict_schema(self, typed_dict_cls: Any, origin: Any) -> core_schema.Co field_docstrings = None try: - annotations = _typing_extra.get_cls_type_hints( - typed_dict_cls, ns_resolver=self._ns_resolver, lenient=False - ) + annotations = _typing_extra.get_cls_type_hints(typed_dict_cls, ns_resolver=self._ns_resolver) except NameError as e: raise PydanticUndefinedAnnotation.from_name_error(e) from e @@ -1491,9 +1494,7 @@ def _namedtuple_schema(self, namedtuple_cls: Any, origin: Any) -> core_schema.Co namedtuple_cls = origin try: - annotations = _typing_extra.get_cls_type_hints( - namedtuple_cls, ns_resolver=self._ns_resolver, lenient=False - ) + annotations = _typing_extra.get_cls_type_hints(namedtuple_cls, ns_resolver=self._ns_resolver) except NameError as e: raise PydanticUndefinedAnnotation.from_name_error(e) from e if not annotations: diff --git a/pydantic/_internal/_typing_extra.py b/pydantic/_internal/_typing_extra.py index 65dec2d9351..399c8c46114 100644 --- a/pydantic/_internal/_typing_extra.py +++ b/pydantic/_internal/_typing_extra.py @@ -9,7 +9,7 @@ import typing import warnings from functools import lru_cache, partial -from typing import Any, Callable +from typing import TYPE_CHECKING, Any, Callable import typing_extensions from typing_extensions import TypeIs, deprecated, get_args, get_origin @@ -23,6 +23,8 @@ from types import EllipsisType as EllipsisType from types import NoneType as NoneType +if TYPE_CHECKING: + from pydantic import BaseModel # See https://typing-extensions.readthedocs.io/en/latest/#runtime-use-of-types: @@ -449,17 +451,77 @@ def parent_frame_namespace(*, parent_depth: int = 2, force: bool = False) -> dic return frame.f_locals +def _type_convert(arg: Any) -> Any: + """Convert `None` to `NoneType` and strings to `ForwardRef` instances. + + This is a backport of the private `typing._type_convert` function. When + evaluating a type, `ForwardRef._evaluate` ends up being called, and is + responsible for making this conversion. However, we still have to apply + it for the first argument passed to our type evaluation functions, similarly + to the `typing.get_type_hints` function. + """ + if arg is None: + return NoneType + if isinstance(arg, str): + # Like `typing.get_type_hints`, assume the arg can be in any context, + # hence the proper `is_argument` and `is_class` args: + return _make_forward_ref(arg, is_argument=False, is_class=True) + return arg + + +def get_model_type_hints( + obj: type[BaseModel], + *, + ns_resolver: NsResolver | None = None, +) -> dict[str, tuple[Any, bool]]: + """Collect annotations from a Pydantic model class, including those from parent classes. + + Args: + obj: The Pydantic model to inspect. + ns_resolver: A namespace resolver instance to use. Defaults to an empty instance. + + Returns: + A dictionary mapping annotation names to a two-tuple: the first element is the evaluated + type or the original annotation if a `NameError` occurred, the second element is a boolean + indicating if whether the evaluation succeeded. + """ + hints: dict[str, Any] | dict[str, tuple[Any, bool]] = {} + ns_resolver = ns_resolver or NsResolver() + + for base in reversed(obj.__mro__): + ann: dict[str, Any] | None = base.__dict__.get('__annotations__') + if not ann or isinstance(ann, types.GetSetDescriptorType): + continue + with ns_resolver.push(base): + globalns, localns = ns_resolver.types_namespace + for name, value in ann.items(): + if name.startswith('_'): + # For private attributes, we only need the annotation to detect the `ClassVar` special form. + # For this reason, we still try to evaluate it, but we also catch any possible exception (on + # top of the `NameError`s caught in `try_eval_type`) that could happen so that users are free + # to use any kind of forward annotation for private fields (e.g. circular imports, new typing + # syntax, etc). + try: + hints[name] = try_eval_type(value, globalns, localns) + except Exception: + hints[name] = (value, False) + else: + hints[name] = try_eval_type(value, globalns, localns) + return hints + + def get_cls_type_hints( - obj: type[Any], *, ns_resolver: NsResolver | None = None, lenient: bool = False + obj: type[Any], + *, + ns_resolver: NsResolver | None = None, ) -> dict[str, Any]: """Collect annotations from a class, including those from parent classes. Args: obj: The class to inspect. ns_resolver: A namespace resolver instance to use. Defaults to an empty instance. - lenient: Whether to keep unresolvable annotations as is or re-raise the `NameError` exception. Default: re-raise. """ - hints = {} + hints: dict[str, Any] | dict[str, tuple[Any, bool]] = {} ns_resolver = ns_resolver or NsResolver() for base in reversed(obj.__mro__): @@ -469,42 +531,54 @@ def get_cls_type_hints( with ns_resolver.push(base): globalns, localns = ns_resolver.types_namespace for name, value in ann.items(): - hints[name] = eval_type(value, globalns, localns, lenient=lenient) + hints[name] = eval_type(value, globalns, localns) return hints -def eval_type( +def try_eval_type( value: Any, globalns: GlobalsNamespace | None = None, localns: MappingNamespace | None = None, - *, - lenient: bool = False, -) -> Any: - """Evaluate the annotation using the provided namespaces. +) -> tuple[Any, bool]: + """Try evaluating the annotation using the provided namespaces. Args: value: The value to evaluate. If `None`, it will be replaced by `type[None]`. If an instance of `str`, it will be converted to a `ForwardRef`. localns: The global namespace to use during annotation evaluation. globalns: The local namespace to use during annotation evaluation. - lenient: Whether to keep unresolvable annotations as is or re-raise the `NameError` exception. Default: re-raise. + + Returns: + A two-tuple containing the possibly evaluated type and a boolean indicating + whether the evaluation succeeded or not. """ - if value is None: - value = NoneType - elif isinstance(value, str): - value = _make_forward_ref(value, is_argument=False, is_class=True) + value = _type_convert(value) try: - return eval_type_backport(value, globalns, localns) + return eval_type_backport(value, globalns, localns), True except NameError: - if not lenient: - raise - # the point of this function is to be tolerant to this case - return value + return value, False + + +def eval_type( + value: Any, + globalns: GlobalsNamespace | None = None, + localns: MappingNamespace | None = None, +) -> Any: + """Evaluate the annotation using the provided namespaces. + + Args: + value: The value to evaluate. If `None`, it will be replaced by `type[None]`. If an instance + of `str`, it will be converted to a `ForwardRef`. + localns: The global namespace to use during annotation evaluation. + globalns: The local namespace to use during annotation evaluation. + """ + value = _type_convert(value) + return eval_type_backport(value, globalns, localns) @deprecated( - '`eval_type_lenient` is deprecated, use `eval_type` with `lenient=True` instead.', + '`eval_type_lenient` is deprecated, use `try_eval_type` instead.', category=None, ) def eval_type_lenient( @@ -512,7 +586,8 @@ def eval_type_lenient( globalns: GlobalsNamespace | None = None, localns: MappingNamespace | None = None, ) -> Any: - return eval_type(value, globalns, localns, lenient=True) + ev, _ = try_eval_type(value, globalns, localns) + return ev def eval_type_backport( diff --git a/pydantic/fields.py b/pydantic/fields.py index 0605e9e27a2..9582685cb0f 100644 --- a/pydantic/fields.py +++ b/pydantic/fields.py @@ -154,6 +154,7 @@ class FieldInfo(_repr.Representation): __slots__ = ( 'annotation', + 'evaluated', 'default', 'default_factory', 'alias', @@ -207,6 +208,7 @@ def __init__(self, **kwargs: Unpack[_FieldInfoInputs]) -> None: self._attributes_set = {k: v for k, v in kwargs.items() if v is not _Unset} kwargs = {k: _DefaultValues.get(k) if v is _Unset else v for k, v in kwargs.items()} # type: ignore self.annotation, annotation_metadata = self._extract_metadata(kwargs.get('annotation')) + self.evaluated = False default = kwargs.pop('default', PydanticUndefined) if default is Ellipsis: @@ -569,6 +571,15 @@ def deprecation_message(self) -> str | None: return 'deprecated' if self.deprecated else None return self.deprecated if isinstance(self.deprecated, str) else self.deprecated.message + @property + def default_factory_takes_validated_data(self) -> bool | None: + """Whether the provided default factory callable has a validated data parameter. + + Returns `None` if no default factory is set. + """ + if self.default_factory is not None: + return _fields.takes_validated_data_argument(self.default_factory) + @overload def get_default( self, *, call_default_factory: Literal[True], validated_data: dict[str, Any] | None = None @@ -594,14 +605,15 @@ def get_default(self, *, call_default_factory: bool = False, validated_data: dic if self.default_factory is None: return _utils.smart_deepcopy(self.default) elif call_default_factory: - if _fields.takes_validated_data_argument(self.default_factory): + if self.default_factory_takes_validated_data: + fac = cast('Callable[[dict[str, Any]], Any]', self.default_factory) if validated_data is None: raise ValueError( "The default factory requires the 'validated_data' argument, which was not provided when calling 'get_default'." ) - return self.default_factory(validated_data) + return fac(validated_data) else: - fac = cast(Callable[[], Any], self.default_factory) # Pyright doesn't narrow correctly + fac = cast('Callable[[], Any]', self.default_factory) return fac() else: return None @@ -654,7 +666,7 @@ def apply_typevars_map( pydantic._internal._generics.replace_types is used for replacing the typevars with their concrete types. """ - annotation = _typing_extra.eval_type(self.annotation, globalns, localns, lenient=True) + annotation, _ = _typing_extra.try_eval_type(self.annotation, globalns, localns) self.annotation = _generics.replace_types(annotation, typevars_map) def __repr_args__(self) -> ReprArgs: @@ -662,9 +674,9 @@ def __repr_args__(self) -> ReprArgs: yield 'required', self.is_required() for s in self.__slots__: - if s == '_attributes_set': - continue - if s == 'annotation': + # TODO: properly make use of the protocol (https://rich.readthedocs.io/en/stable/pretty.html#rich-repr-protocol) + # By yielding a three-tuple: + if s in ('_attributes_set', 'annotation', 'evaluated'): continue elif s == 'metadata' and not self.metadata: continue @@ -732,7 +744,7 @@ class _EmptyKwargs(typing_extensions.TypedDict): # to understand the magic that happens at runtime with the following overloads: @overload # type hint the return value as `Any` to avoid type checking regressions when using `...`. def Field( - default: _typing_extra.EllipsisType, + default: ellipsis, # noqa: F821 # TODO: use `_typing_extra.EllipsisType` when we drop Py3.9 *, alias: str | None = _Unset, alias_priority: int | None = _Unset, diff --git a/pydantic/functional_serializers.py b/pydantic/functional_serializers.py index b653c8a6313..9f850295f99 100644 --- a/pydantic/functional_serializers.py +++ b/pydantic/functional_serializers.py @@ -63,13 +63,14 @@ def __get_pydantic_core_schema__(self, source_type: Any, handler: GetCoreSchemaH The Pydantic core schema. """ schema = handler(source_type) - globalns, localns = handler._get_types_namespace() try: + # Do not pass in globals as the function could be defined in a different module. + # Instead, let `get_function_return_type` infer the globals to use, but still pass + # in locals that may contain a parent/rebuild namespace: return_type = _decorators.get_function_return_type( self.func, self.return_type, - globalns=globalns, - localns=localns, + localns=handler._get_types_namespace().locals, ) except NameError as e: raise PydanticUndefinedAnnotation.from_name_error(e) from e @@ -166,11 +167,13 @@ def __get_pydantic_core_schema__(self, source_type: Any, handler: GetCoreSchemaH schema = handler(source_type) globalns, localns = handler._get_types_namespace() try: + # Do not pass in globals as the function could be defined in a different module. + # Instead, let `get_function_return_type` infer the globals to use, but still pass + # in locals that may contain a parent/rebuild namespace: return_type = _decorators.get_function_return_type( self.func, self.return_type, - globalns=globalns, - localns=localns, + localns=handler._get_types_namespace().locals, ) except NameError as e: raise PydanticUndefinedAnnotation.from_name_error(e) from e diff --git a/pydantic/json_schema.py b/pydantic/json_schema.py index afbc1831c69..301332e9ba7 100644 --- a/pydantic/json_schema.py +++ b/pydantic/json_schema.py @@ -1009,24 +1009,36 @@ def dict_schema(self, schema: core_schema.DictSchema) -> JsonSchemaValue: """ json_schema: JsonSchemaValue = {'type': 'object'} - keys_schema = self.resolve_ref_schema( - self.generate_inner(schema['keys_schema']).copy() if 'keys_schema' in schema else {} - ) - keys_pattern = keys_schema.pop('pattern', None) + keys_schema = self.generate_inner(schema['keys_schema']).copy() if 'keys_schema' in schema else {} + if '$ref' not in keys_schema: + keys_pattern = keys_schema.pop('pattern', None) + # Don't give a title to patternProperties/propertyNames: + keys_schema.pop('title', None) + else: + # Here, we assume that if the keys schema is a definition reference, + # it can't be a simple string core schema (and thus no pattern can exist). + # However, this is only in practice (in theory, a definition reference core + # schema could be generated for a simple string schema). + # Note that we avoid calling `self.resolve_ref_schema`, as it might not exist yet. + keys_pattern = None values_schema = self.generate_inner(schema['values_schema']).copy() if 'values_schema' in schema else {} - # don't give a title to additionalProperties, patternProperties and propertyNames + # don't give a title to additionalProperties: values_schema.pop('title', None) - keys_schema.pop('title', None) + if values_schema or keys_pattern is not None: # don't add additionalProperties if it's empty if keys_pattern is None: json_schema['additionalProperties'] = values_schema else: json_schema['patternProperties'] = {keys_pattern: values_schema} - # The len check indicates that constraints are probably present: - if keys_schema.get('type') == 'string' and len(keys_schema) > 1: - keys_schema.pop('type') + if ( + # The len check indicates that constraints are probably present: + (keys_schema.get('type') == 'string' and len(keys_schema) > 1) + # If this is a definition reference schema, it most likely has constraints: + or '$ref' in keys_schema + ): + keys_schema.pop('type', None) json_schema['propertyNames'] = keys_schema self.update_with_validations(json_schema, schema, self.ValidationsMapping.object) @@ -1559,6 +1571,9 @@ def resolve_ref_schema(self, json_schema: JsonSchemaValue) -> JsonSchemaValue: Returns: The resolved schema. + + Raises: + RuntimeError: If the schema reference can't be found in definitions. """ if '$ref' not in json_schema: return json_schema diff --git a/pydantic/main.py b/pydantic/main.py index 47959b8f235..ca5a4529e9e 100644 --- a/pydantic/main.py +++ b/pydantic/main.py @@ -855,10 +855,6 @@ def __deepcopy__(self, memo: dict[int, Any] | None = None) -> Self: return m - def __replace__(self, **changes: Any) -> Self: - """Creates a new instance of the model, replacing fields with values from changes. Relevant for v3.13+.""" - return self.model_copy(update=changes) - if not TYPE_CHECKING: # We put `__getattr__` in a non-TYPE_CHECKING block because otherwise, mypy allows arbitrary attribute access # The same goes for __setattr__ and __delattr__, see: https://github.com/pydantic/pydantic/issues/8643 @@ -970,6 +966,11 @@ def __delattr__(self, item: str) -> Any: except AttributeError: raise AttributeError(f'{type(self).__name__!r} object has no attribute {item!r}') + # Because we make use of `@dataclass_transform()`, `__replace__` is already synthesized by + # type checkers, so we define the implementation in this `if not TYPE_CHECKING:` block: + def __replace__(self, **changes: Any) -> Self: + return self.model_copy(update=changes) + def _check_frozen(self, name: str, value: Any) -> None: if self.model_config.get('frozen', None): typ = 'frozen_instance' diff --git a/pydantic/networks.py b/pydantic/networks.py index ab590eaa18b..755835199ce 100644 --- a/pydantic/networks.py +++ b/pydantic/networks.py @@ -10,7 +10,7 @@ from ipaddress import IPv4Address, IPv4Interface, IPv4Network, IPv6Address, IPv6Interface, IPv6Network from typing import TYPE_CHECKING, Any, ClassVar -from pydantic_core import MultiHostHost, PydanticCustomError, core_schema +from pydantic_core import MultiHostHost, PydanticCustomError, SchemaSerializer, core_schema from pydantic_core import MultiHostUrl as _CoreMultiHostUrl from pydantic_core import Url as _CoreUrl from typing_extensions import Annotated, Self, TypeAlias @@ -224,6 +224,9 @@ def __deepcopy__(self, memo: dict) -> Self: def __eq__(self, other: Any) -> bool: return self.__class__ is other.__class__ and self._url == other._url + def __hash__(self) -> int: + return hash(self._url) + @classmethod def build( cls, @@ -285,6 +288,17 @@ def wrap_val(v, h): serialization=core_schema.to_string_ser_schema(), ) + @classmethod + def __get_pydantic_json_schema__( + cls, core_schema: core_schema.CoreSchema, handler: _schema_generation_shared.GetJsonSchemaHandler + ) -> JsonSchemaValue: + # we use the url schema for json schema generation, but we might have to extract it from + # the function-wrap schema we use as a tool for validation on initialization + inner_schema = core_schema['schema'] if core_schema['type'] == 'function-wrap' else core_schema + return handler(inner_schema) + + __pydantic_serializer__ = SchemaSerializer(core_schema.any_schema(serialization=core_schema.to_string_ser_schema())) + class _BaseMultiHostUrl: _constraints: ClassVar[UrlConstraints] = UrlConstraints() @@ -368,6 +382,9 @@ def __deepcopy__(self, memo: dict) -> Self: def __eq__(self, other: Any) -> bool: return self.__class__ is other.__class__ and self._url == other._url + def __hash__(self) -> int: + return hash(self._url) + @classmethod def build( cls, @@ -435,6 +452,17 @@ def wrap_val(v, h): serialization=core_schema.to_string_ser_schema(), ) + @classmethod + def __get_pydantic_json_schema__( + cls, core_schema: core_schema.CoreSchema, handler: _schema_generation_shared.GetJsonSchemaHandler + ) -> JsonSchemaValue: + # we use the url schema for json schema generation, but we might have to extract it from + # the function-wrap schema we use as a tool for validation on initialization + inner_schema = core_schema['schema'] if core_schema['type'] == 'function-wrap' else core_schema + return handler(inner_schema) + + __pydantic_serializer__ = SchemaSerializer(core_schema.any_schema(serialization=core_schema.to_string_ser_schema())) + @lru_cache def _build_type_adapter(cls: type[_BaseUrl | _BaseMultiHostUrl]) -> TypeAdapter: @@ -446,13 +474,13 @@ class AnyUrl(_BaseUrl): * Any scheme allowed * Top-level domain (TLD) not required - * Host required + * Host not required Assuming an input URL of `http://samuel:pass@example.com:8000/the/path/?query=here#fragment=is;this=bit`, the types export the following properties: - `scheme`: the URL scheme (`http`), always set. - - `host`: the URL host (`example.com`), always set. + - `host`: the URL host (`example.com`). - `username`: optional username if included (`samuel`). - `password`: optional password if included (`pass`). - `port`: optional port (`8000`). @@ -461,13 +489,6 @@ class AnyUrl(_BaseUrl): - `fragment`: optional fragment (`fragment=is;this=bit`). """ - _constraints = UrlConstraints(host_required=True) - - @property - def host(self) -> str: - """The required URL host.""" - return self._url.host # pyright: ignore[reportReturnType] - # Note: all single host urls inherit from `AnyUrl` to preserve compatibility with pre-v2.10 code # Where urls were annotated variants of `AnyUrl`, which was an alias to `pydantic_core.Url` @@ -477,17 +498,17 @@ class AnyHttpUrl(AnyUrl): """A type that will accept any http or https URL. * TLD not required - * Host required + * Host not required """ - _constraints = UrlConstraints(host_required=True, allowed_schemes=['http', 'https']) + _constraints = UrlConstraints(allowed_schemes=['http', 'https']) class HttpUrl(AnyUrl): """A type that will accept any http or https URL. * TLD not required - * Host required + * Host not required * Max length 2083 ```python @@ -561,33 +582,28 @@ class MyModel(BaseModel): (or at least big) company. """ - _constraints = UrlConstraints(max_length=2083, allowed_schemes=['http', 'https'], host_required=True) + _constraints = UrlConstraints(max_length=2083, allowed_schemes=['http', 'https']) class AnyWebsocketUrl(AnyUrl): """A type that will accept any ws or wss URL. * TLD not required - * Host required + * Host not required """ - _constraints = UrlConstraints(allowed_schemes=['ws', 'wss'], host_required=True) + _constraints = UrlConstraints(allowed_schemes=['ws', 'wss']) class WebsocketUrl(AnyUrl): """A type that will accept any ws or wss URL. * TLD not required - * Host required + * Host not required * Max length 2083 """ - _constraints = UrlConstraints(max_length=2083, allowed_schemes=['ws', 'wss'], host_required=True) - - @property - def host(self) -> str: - """The required URL host.""" - return self._url.host # type: ignore + _constraints = UrlConstraints(max_length=2083, allowed_schemes=['ws', 'wss']) class FileUrl(AnyUrl): @@ -598,25 +614,15 @@ class FileUrl(AnyUrl): _constraints = UrlConstraints(allowed_schemes=['file']) - @property - def host(self) -> str | None: # pyright: ignore[reportIncompatibleMethodOverride] - """The host part of the URL, or `None`.""" - return self._url.host - class FtpUrl(AnyUrl): """A type that will accept ftp URL. * TLD not required - * Host required + * Host not required """ - _constraints = UrlConstraints(allowed_schemes=['ftp'], host_required=True) - - @property - def host(self) -> str | None: # pyright: ignore[reportIncompatibleMethodOverride] - """The host part of the URL, or `None`.""" - return self._url.host + _constraints = UrlConstraints(allowed_schemes=['ftp']) class PostgresDsn(_BaseMultiHostUrl): @@ -717,6 +723,11 @@ class CockroachDsn(AnyUrl): ], ) + @property + def host(self) -> str: + """The required URL host.""" + return self._url.host # pyright: ignore[reportReturnType] + class AmqpDsn(AnyUrl): """A type that will accept any AMQP DSN. @@ -728,11 +739,6 @@ class AmqpDsn(AnyUrl): _constraints = UrlConstraints(allowed_schemes=['amqp', 'amqps']) - @property - def host(self) -> str | None: # pyright: ignore[reportIncompatibleMethodOverride] - """The host part of the URL, or `None`.""" - return self._url.host - class RedisDsn(AnyUrl): """A type that will accept any Redis DSN. @@ -750,6 +756,11 @@ class RedisDsn(AnyUrl): host_required=True, ) + @property + def host(self) -> str: + """The required URL host.""" + return self._url.host # pyright: ignore[reportReturnType] + class MongoDsn(_BaseMultiHostUrl): """A type that will accept any MongoDB DSN. @@ -768,12 +779,10 @@ class KafkaDsn(AnyUrl): * User info required * TLD not required - * Host required + * Host not required """ - _constraints = UrlConstraints( - allowed_schemes=['kafka'], default_host='localhost', default_port=9092, host_required=True - ) + _constraints = UrlConstraints(allowed_schemes=['kafka'], default_host='localhost', default_port=9092) class NatsDsn(_BaseMultiHostUrl): @@ -795,7 +804,7 @@ class MySQLDsn(AnyUrl): * User info required * TLD not required - * Host required + * Host not required """ _constraints = UrlConstraints( @@ -819,13 +828,12 @@ class MariaDBDsn(AnyUrl): * User info required * TLD not required - * Host required + * Host not required """ _constraints = UrlConstraints( allowed_schemes=['mariadb', 'mariadb+mariadbconnector', 'mariadb+pymysql'], default_port=3306, - host_required=True, ) @@ -834,14 +842,13 @@ class ClickHouseDsn(AnyUrl): * User info required * TLD not required - * Host required + * Host not required """ _constraints = UrlConstraints( allowed_schemes=['clickhouse+native', 'clickhouse+asynch'], default_host='localhost', default_port=9000, - host_required=True, ) @@ -858,6 +865,11 @@ class SnowflakeDsn(AnyUrl): host_required=True, ) + @property + def host(self) -> str: + """The required URL host.""" + return self._url.host # pyright: ignore[reportReturnType] + def import_email_validator() -> None: global email_validator diff --git a/pydantic/types.py b/pydantic/types.py index 2cbbb6567b4..5ba60605e54 100644 --- a/pydantic/types.py +++ b/pydantic/types.py @@ -33,7 +33,7 @@ import annotated_types from annotated_types import BaseMetadata, MaxLen, MinLen -from pydantic_core import CoreSchema, PydanticCustomError, core_schema +from pydantic_core import CoreSchema, PydanticCustomError, SchemaSerializer, core_schema from typing_extensions import Annotated, Literal, Protocol, TypeAlias, TypeAliasType, deprecated from ._internal import _core_utils, _fields, _internal_dataclass, _typing_extra, _utils, _validators @@ -1524,6 +1524,13 @@ def _display(self) -> str | bytes: raise NotImplementedError +def _serialize_secret(value: Secret[SecretType], info: core_schema.SerializationInfo) -> str | Secret[SecretType]: + if info.mode == 'json': + return str(value) + else: + return value + + class Secret(_SecretBase[SecretType]): """A generic base class used for defining a field with sensitive information that you do not want to be visible in logging or tracebacks. @@ -1660,12 +1667,6 @@ def validate_secret_value(value, handler) -> Secret[SecretType]: validated_inner = handler(value) return cls(validated_inner) - def serialize(value: Secret[SecretType], info: core_schema.SerializationInfo) -> str | Secret[SecretType]: - if info.mode == 'json': - return str(value) - else: - return value - return core_schema.json_or_python_schema( python_schema=core_schema.no_info_wrap_validator_function( validate_secret_value, @@ -1673,33 +1674,44 @@ def serialize(value: Secret[SecretType], info: core_schema.SerializationInfo) -> ), json_schema=core_schema.no_info_after_validator_function(lambda x: cls(x), inner_schema), serialization=core_schema.plain_serializer_function_ser_schema( - serialize, + _serialize_secret, info_arg=True, when_used='always', ), ) + __pydantic_serializer__ = SchemaSerializer( + core_schema.any_schema( + serialization=core_schema.plain_serializer_function_ser_schema( + _serialize_secret, + info_arg=True, + when_used='always', + ) + ) + ) + def _secret_display(value: SecretType) -> str: # type: ignore return '**********' if value else '' +def _serialize_secret_field( + value: _SecretField[SecretType], info: core_schema.SerializationInfo +) -> str | _SecretField[SecretType]: + if info.mode == 'json': + # we want the output to always be string without the `b'` prefix for bytes, + # hence we just use `secret_display` + return _secret_display(value.get_secret_value()) + else: + return value + + class _SecretField(_SecretBase[SecretType]): _inner_schema: ClassVar[CoreSchema] _error_kind: ClassVar[str] @classmethod def __get_pydantic_core_schema__(cls, source: type[Any], handler: GetCoreSchemaHandler) -> core_schema.CoreSchema: - def serialize( - value: _SecretField[SecretType], info: core_schema.SerializationInfo - ) -> str | _SecretField[SecretType]: - if info.mode == 'json': - # we want the output to always be string without the `b'` prefix for bytes, - # hence we just use `secret_display` - return _secret_display(value.get_secret_value()) - else: - return value - def get_json_schema(_core_schema: core_schema.CoreSchema, handler: GetJsonSchemaHandler) -> JsonSchemaValue: json_schema = handler(cls._inner_schema) _utils.update_not_none( @@ -1727,10 +1739,9 @@ def get_secret_schema(strict: bool) -> CoreSchema: ), json_schema=json_schema, serialization=core_schema.plain_serializer_function_ser_schema( - serialize, + _serialize_secret_field, info_arg=True, - return_schema=core_schema.str_schema(), - when_used='json', + when_used='always', ), ) @@ -1740,6 +1751,16 @@ def get_secret_schema(strict: bool) -> CoreSchema: metadata={'pydantic_js_functions': [get_json_schema]}, ) + __pydantic_serializer__ = SchemaSerializer( + core_schema.any_schema( + serialization=core_schema.plain_serializer_function_ser_schema( + _serialize_secret_field, + info_arg=True, + when_used='always', + ) + ) + ) + class SecretStr(_SecretField[str]): """A string used for storing sensitive information that you do not want to be visible in logging or tracebacks. diff --git a/pydantic/version.py b/pydantic/version.py index 9f16617f9be..9176145c64f 100644 --- a/pydantic/version.py +++ b/pydantic/version.py @@ -4,7 +4,7 @@ __all__ = 'VERSION', 'version_info' -VERSION = '2.10.1' +VERSION = '2.10.3' """The version of Pydantic.""" diff --git a/tests/test_dataclasses.py b/tests/test_dataclasses.py index 91c1f7857bf..7ba743c0f83 100644 --- a/tests/test_dataclasses.py +++ b/tests/test_dataclasses.py @@ -3065,3 +3065,12 @@ class Model(BaseModel): model_config = ConfigDict(arbitrary_types_allowed=True) dc: DC + + +def test_deferred_dataclass_fields_available() -> None: + # This aligns with deferred Pydantic models: + @pydantic.dataclasses.dataclass(config={'defer_build': True}) + class A: + a: int + + assert 'a' in A.__pydantic_fields__ # pyright: ignore[reportAttributeAccessIssue] diff --git a/tests/test_forward_ref.py b/tests/test_forward_ref.py index eb2247d4f0c..7395114e1be 100644 --- a/tests/test_forward_ref.py +++ b/tests/test_forward_ref.py @@ -598,6 +598,13 @@ class Model(BaseModel): assert module.Model.__private_attributes__ == {} +def test_private_attr_annotation_not_evaluated() -> None: + class Model(BaseModel): + _a: 'UnknownAnnotation' + + assert '_a' in Model.__private_attributes__ + + def test_json_encoder_str(create_module): module = create_module( # language=Python @@ -1307,12 +1314,22 @@ def test_uses_the_correct_globals_to_resolve_forward_refs_on_serializers(create_ # we use the globals of the underlying func to resolve the return type. @create_module def module_1(): - from pydantic import BaseModel, field_serializer # or model_serializer, computed_field + from typing_extensions import Annotated + + from pydantic import ( + BaseModel, + PlainSerializer, # or WrapSerializer + field_serializer, # or model_serializer, computed_field + ) MyStr = str + def ser_func(value) -> 'MyStr': + return str(value) + class Model(BaseModel): a: int + b: Annotated[int, PlainSerializer(ser_func)] @field_serializer('a') def ser(self, value) -> 'MyStr': diff --git a/tests/test_json_schema.py b/tests/test_json_schema.py index 6555d07bac4..c64e7f813b0 100644 --- a/tests/test_json_schema.py +++ b/tests/test_json_schema.py @@ -1743,6 +1743,9 @@ class MyModel(BaseModel): enum_dict: Dict[MyEnum, str] assert MyModel.model_json_schema() == { + '$defs': { + 'MyEnum': {'enum': ['foo', 'bar'], 'title': 'MyEnum', 'type': 'string'}, + }, 'title': 'MyModel', 'type': 'object', 'properties': { @@ -1750,7 +1753,7 @@ class MyModel(BaseModel): 'title': 'Enum Dict', 'type': 'object', 'additionalProperties': {'type': 'string'}, - 'propertyNames': {'enum': ['foo', 'bar']}, + 'propertyNames': {'$ref': '#/$defs/MyEnum'}, } }, 'required': ['enum_dict'], diff --git a/tests/test_main.py b/tests/test_main.py index cfcece87bb1..5ea3b8da468 100644 --- a/tests/test_main.py +++ b/tests/test_main.py @@ -1784,6 +1784,9 @@ class Model(BaseModel): model = Model() assert model.b == 1 + model = Model.model_construct(a=1) + assert model.b == 1 + class InvalidModel(BaseModel): a: int = Field(default_factory=lambda data: data['b']) b: int diff --git a/tests/test_networks.py b/tests/test_networks.py index f4a8111a6cc..b70296b9714 100644 --- a/tests/test_networks.py +++ b/tests/test_networks.py @@ -1,5 +1,5 @@ import json -from typing import Union +from typing import Any, Union import pytest from pydantic_core import PydanticCustomError, Url @@ -1105,3 +1105,42 @@ def remove_trailing_slash(url: AnyUrl) -> str: ] ta = TypeAdapter(HttpUrl) assert ta.validate_python('https://example.com/') == 'https://example.com' + + +def test_serialize_as_any() -> None: + ta = TypeAdapter(Any) + assert ta.dump_python(HttpUrl('https://example.com')) == HttpUrl('https://example.com/') + assert ta.dump_json('https://example.com') == b'"https://example.com"' + + +def test_any_url_hashable() -> None: + example_url_1a = AnyUrl('https://example1.com') + example_url_1b = AnyUrl('https://example1.com') + example_url_2 = AnyUrl('https://example2.com') + + assert hash(example_url_1a) == hash(example_url_1b) + assert hash(example_url_1a) != hash(example_url_2) + assert len({example_url_1a, example_url_1b, example_url_2}) == 2 + + example_multi_host_url_1a = PostgresDsn('postgres://user:pass@host1:5432,host2:5432/app') + example_multi_host_url_1b = PostgresDsn('postgres://user:pass@host1:5432,host2:5432/app') + example_multi_host_url_2 = PostgresDsn('postgres://user:pass@host1:5432,host3:5432/app') + + assert hash(example_multi_host_url_1a) == hash(example_multi_host_url_1b) + assert hash(example_multi_host_url_1a) != hash(example_multi_host_url_2) + assert len({example_multi_host_url_1a, example_multi_host_url_1b, example_multi_host_url_2}) == 2 + + +def test_host_not_required_for_2_9_compatibility() -> None: + data_uri = 'file:///path/to/data' + url = AnyUrl(data_uri) + assert url.host is None + + +def test_json_schema() -> None: + ta = TypeAdapter(HttpUrl) + val_json_schema = ta.json_schema(mode='validation') + assert val_json_schema == {'type': 'string', 'format': 'uri', 'minLength': 1, 'maxLength': 2083} + + ser_json_schema = ta.json_schema(mode='serialization') + assert ser_json_schema == {'type': 'string', 'format': 'uri', 'minLength': 1, 'maxLength': 2083} diff --git a/tests/test_types.py b/tests/test_types.py index f9d6c521604..426be140e6c 100644 --- a/tests/test_types.py +++ b/tests/test_types.py @@ -7060,3 +7060,37 @@ class Model(BaseModel): with pytest.raises(ValidationError): Model(**{'base64_value': b'123456'}) + + +def test_serialize_as_any_secret_types() -> None: + ta_secret_str = TypeAdapter(SecretStr) + secret_str = ta_secret_str.validate_python('secret') + + ta_any = TypeAdapter(Any) + + assert ta_any.dump_python(secret_str) == secret_str + assert ta_any.dump_python(secret_str, mode='json') == '**********' + assert ta_any.dump_json(secret_str) == b'"**********"' + + ta_secret_bytes = TypeAdapter(SecretBytes) + secret_bytes = ta_secret_bytes.validate_python(b'secret') + + assert ta_any.dump_python(secret_bytes) == secret_bytes + assert ta_any.dump_python(secret_bytes, mode='json') == '**********' + assert ta_any.dump_json(secret_bytes) == b'"**********"' + + ta_secret_date = TypeAdapter(SecretDate) + secret_date = ta_secret_date.validate_python('2024-01-01') + + assert ta_any.dump_python(secret_date) == secret_date + assert ta_any.dump_python(secret_date, mode='json') == '****/**/**' + assert ta_any.dump_json(secret_date) == b'"****/**/**"' + + +def test_custom_serializer_override_secret_str() -> None: + class User(BaseModel): + name: str + password: Annotated[SecretStr, PlainSerializer(lambda x: f'secret: {str(x)}')] + + u = User(name='sam', password='hi') + assert u.model_dump()['password'] == 'secret: **********' diff --git a/uv.lock b/uv.lock index c6f9af14b48..a7eba385e11 100644 --- a/uv.lock +++ b/uv.lock @@ -1324,7 +1324,7 @@ wheels = [ [[package]] name = "pydantic" -version = "2.10.1" +version = "2.10.3" source = { editable = "." } dependencies = [ { name = "annotated-types" },