From 4611613f927e86a36599e333a0856ee9a45e4ded Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Mon, 22 Nov 2021 11:03:54 +0000 Subject: [PATCH] Revert "lru_cache preserves signature of wrapped function (#6221)" This reverts commit 8bda66a73725ff98919b8b57e600178591e948c2. The change causes issues with ParamSpec implementations in type checkers, at least pyright and my work-in-progress support for ParamSpec in mypy. It's not yet clear how to fix the issues, so I think that it's best to revert this, at least temporarily until we've found a good solution. See #6347 for context. --- stdlib/functools.pyi | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/stdlib/functools.pyi b/stdlib/functools.pyi index 9b761ffcca2f..b5e52bf59920 100644 --- a/stdlib/functools.pyi +++ b/stdlib/functools.pyi @@ -2,7 +2,7 @@ import sys import types from _typeshed import SupportsItems, SupportsLessThan from typing import Any, Callable, Generic, Hashable, Iterable, NamedTuple, Sequence, Sized, Tuple, Type, TypeVar, overload -from typing_extensions import ParamSpec, final +from typing_extensions import final if sys.version_info >= (3, 9): from types import GenericAlias @@ -11,7 +11,6 @@ _AnyCallable = Callable[..., Any] _T = TypeVar("_T") _S = TypeVar("_S") -_P = ParamSpec("_P") @overload def reduce(function: Callable[[_T, _S], _T], sequence: Iterable[_S], initial: _T) -> _T: ... @@ -25,20 +24,20 @@ class _CacheInfo(NamedTuple): currsize: int @final -class _lru_cache_wrapper(Generic[_P, _T]): # type: ignore - __wrapped__: Callable[_P, _T] # type: ignore - def __call__(self, *args: _P.args, **kwargs: _P.kwargs) -> _T: ... # type: ignore +class _lru_cache_wrapper(Generic[_T]): + __wrapped__: Callable[..., _T] + def __call__(self, *args: Hashable, **kwargs: Hashable) -> _T: ... def cache_info(self) -> _CacheInfo: ... def cache_clear(self) -> None: ... if sys.version_info >= (3, 8): @overload - def lru_cache(maxsize: int | None = ..., typed: bool = ...) -> Callable[[Callable[_P, _T]], _lru_cache_wrapper[_P, _T]]: ... # type: ignore + def lru_cache(maxsize: int | None = ..., typed: bool = ...) -> Callable[[Callable[..., _T]], _lru_cache_wrapper[_T]]: ... @overload - def lru_cache(maxsize: Callable[_P, _T], typed: bool = ...) -> _lru_cache_wrapper[_P, _T]: ... # type: ignore + def lru_cache(maxsize: Callable[..., _T], typed: bool = ...) -> _lru_cache_wrapper[_T]: ... else: - def lru_cache(maxsize: int | None = ..., typed: bool = ...) -> Callable[[Callable[_P, _T]], _lru_cache_wrapper[_P, _T]]: ... # type: ignore + def lru_cache(maxsize: int | None = ..., typed: bool = ...) -> Callable[[Callable[..., _T]], _lru_cache_wrapper[_T]]: ... WRAPPER_ASSIGNMENTS: Sequence[str] WRAPPER_UPDATES: Sequence[str] @@ -118,7 +117,7 @@ if sys.version_info >= (3, 8): def __class_getitem__(cls, item: Any) -> GenericAlias: ... if sys.version_info >= (3, 9): - def cache(__user_function: Callable[_P, _T]) -> _lru_cache_wrapper[_P, _T]: ... # type: ignore + def cache(__user_function: Callable[..., _T]) -> _lru_cache_wrapper[_T]: ... def _make_key( args: Tuple[Hashable, ...],