From 010da0b2f48dc92be2f79495fd4551c92351868f Mon Sep 17 00:00:00 2001 From: Ilya Priven Date: Mon, 28 Aug 2023 04:03:50 -0400 Subject: [PATCH 001/144] attrs, dataclasses: don't enforce slots when base doesn't (#15976) Doing the same thing we do for regular classes. Fixes #15975 --- mypy/plugins/attrs.py | 5 +++++ mypy/plugins/dataclasses.py | 6 ++++++ test-data/unit/check-dataclasses.test | 16 ++++++++++++++++ test-data/unit/check-plugin-attrs.test | 15 +++++++++++++++ 4 files changed, 42 insertions(+) diff --git a/mypy/plugins/attrs.py b/mypy/plugins/attrs.py index d444c18852dd..3d326a5f4e80 100644 --- a/mypy/plugins/attrs.py +++ b/mypy/plugins/attrs.py @@ -893,6 +893,11 @@ def _add_attrs_magic_attribute( def _add_slots(ctx: mypy.plugin.ClassDefContext, attributes: list[Attribute]) -> None: + if any(p.slots is None for p in ctx.cls.info.mro[1:-1]): + # At least one type in mro (excluding `self` and `object`) + # does not have concrete `__slots__` defined. Ignoring. + return + # Unlike `@dataclasses.dataclass`, `__slots__` is rewritten here. ctx.cls.info.slots = {attr.name for attr in attributes} diff --git a/mypy/plugins/dataclasses.py b/mypy/plugins/dataclasses.py index d782acf50af5..39b597491e9e 100644 --- a/mypy/plugins/dataclasses.py +++ b/mypy/plugins/dataclasses.py @@ -443,6 +443,12 @@ def add_slots( self._cls, ) return + + if any(p.slots is None for p in info.mro[1:-1]): + # At least one type in mro (excluding `self` and `object`) + # does not have concrete `__slots__` defined. Ignoring. + return + info.slots = generated_slots # Now, insert `.__slots__` attribute to class namespace: diff --git a/test-data/unit/check-dataclasses.test b/test-data/unit/check-dataclasses.test index 7881dfbcf1bb..91c409807497 100644 --- a/test-data/unit/check-dataclasses.test +++ b/test-data/unit/check-dataclasses.test @@ -1519,6 +1519,22 @@ class Some: self.y = 1 # E: Trying to assign name "y" that is not in "__slots__" of type "__main__.Some" [builtins fixtures/dataclasses.pyi] +[case testDataclassWithSlotsDerivedFromNonSlot] +# flags: --python-version 3.10 +from dataclasses import dataclass + +class A: + pass + +@dataclass(slots=True) +class B(A): + x: int + + def __post_init__(self) -> None: + self.y = 42 + +[builtins fixtures/dataclasses.pyi] + [case testDataclassWithSlotsConflict] # flags: --python-version 3.10 from dataclasses import dataclass diff --git a/test-data/unit/check-plugin-attrs.test b/test-data/unit/check-plugin-attrs.test index 7580531bebc9..e8598132c50e 100644 --- a/test-data/unit/check-plugin-attrs.test +++ b/test-data/unit/check-plugin-attrs.test @@ -1677,6 +1677,21 @@ class C: self.c = 2 # E: Trying to assign name "c" that is not in "__slots__" of type "__main__.C" [builtins fixtures/plugin_attrs.pyi] +[case testAttrsClassWithSlotsDerivedFromNonSlots] +import attrs + +class A: + pass + +@attrs.define(slots=True) +class B(A): + x: int + + def __attrs_post_init__(self) -> None: + self.y = 42 + +[builtins fixtures/plugin_attrs.pyi] + [case testRuntimeSlotsAttr] from attr import dataclass From 171402834faece2e20760f0d02e96aa3714324c2 Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Tue, 29 Aug 2023 10:17:52 +0100 Subject: [PATCH 002/144] Optimize Unpack for failures (#15967) This is a small but possibly important PR. Wherever possible we should represent user error and/or failed type inference as `*tuple[Any, ...]`/`*tuple[, ...]`, rather than `Unpack[Any]`/`Unpack[]` or plain `Any`/``. This way we will not need any special casing for failure conditions in various places without risking a crash instead of a graceful failure (error message). --- mypy/expandtype.py | 23 ++++++----------------- mypy/semanal_main.py | 2 ++ mypy/semanal_typeargs.py | 21 ++++++++++++++------- test-data/unit/check-typevar-tuple.test | 5 ++--- 4 files changed, 24 insertions(+), 27 deletions(-) diff --git a/mypy/expandtype.py b/mypy/expandtype.py index ef8ebe1a9128..26353c043cb7 100644 --- a/mypy/expandtype.py +++ b/mypy/expandtype.py @@ -273,7 +273,7 @@ def visit_unpack_type(self, t: UnpackType) -> Type: # example is non-normalized types when called from semanal.py. return UnpackType(t.type.accept(self)) - def expand_unpack(self, t: UnpackType) -> list[Type] | AnyType | UninhabitedType: + def expand_unpack(self, t: UnpackType) -> list[Type]: assert isinstance(t.type, TypeVarTupleType) repl = get_proper_type(self.variables.get(t.type.id, t.type)) if isinstance(repl, TupleType): @@ -285,9 +285,9 @@ def expand_unpack(self, t: UnpackType) -> list[Type] | AnyType | UninhabitedType ): return [UnpackType(typ=repl)] elif isinstance(repl, (AnyType, UninhabitedType)): - # tuple[Any, ...] for Any would be better, but we don't have - # the type info to construct that type here. - return repl + # Replace *Ts = Any with *Ts = *tuple[Any, ...] and some for . + # These types may appear here as a result of user error or failed inference. + return [UnpackType(t.type.tuple_fallback.copy_modified(args=[repl]))] else: raise RuntimeError(f"Invalid type replacement to expand: {repl}") @@ -310,12 +310,7 @@ def interpolate_args_for_unpack(self, t: CallableType, var_arg: UnpackType) -> l # We have plain Unpack[Ts] assert isinstance(var_arg_type, TypeVarTupleType) fallback = var_arg_type.tuple_fallback - expanded_items_res = self.expand_unpack(var_arg) - if isinstance(expanded_items_res, list): - expanded_items = expanded_items_res - else: - # We got Any or - return prefix + [expanded_items_res] + suffix + expanded_items = self.expand_unpack(var_arg) new_unpack = UnpackType(TupleType(expanded_items, fallback)) return prefix + [new_unpack] + suffix @@ -394,14 +389,8 @@ def expand_types_with_unpack( items: list[Type] = [] for item in typs: if isinstance(item, UnpackType) and isinstance(item.type, TypeVarTupleType): - unpacked_items = self.expand_unpack(item) - if isinstance(unpacked_items, (AnyType, UninhabitedType)): - # TODO: better error for , something like tuple of unknown? - return unpacked_items - else: - items.extend(unpacked_items) + items.extend(self.expand_unpack(item)) else: - # Must preserve original aliases when possible. items.append(item.accept(self)) return items diff --git a/mypy/semanal_main.py b/mypy/semanal_main.py index 51a7014fac1a..ec09deb0952f 100644 --- a/mypy/semanal_main.py +++ b/mypy/semanal_main.py @@ -381,6 +381,7 @@ def check_type_arguments(graph: Graph, scc: list[str], errors: Errors) -> None: errors, state.options, is_typeshed_file(state.options.abs_custom_typeshed_dir, state.path or ""), + state.manager.semantic_analyzer.named_type, ) with state.wrap_context(): with mypy.state.state.strict_optional_set(state.options.strict_optional): @@ -399,6 +400,7 @@ def check_type_arguments_in_targets( errors, state.options, is_typeshed_file(state.options.abs_custom_typeshed_dir, state.path or ""), + state.manager.semantic_analyzer.named_type, ) with state.wrap_context(): with mypy.state.state.strict_optional_set(state.options.strict_optional): diff --git a/mypy/semanal_typeargs.py b/mypy/semanal_typeargs.py index 1ae6fada8f38..749b02391e06 100644 --- a/mypy/semanal_typeargs.py +++ b/mypy/semanal_typeargs.py @@ -7,7 +7,7 @@ from __future__ import annotations -from typing import Sequence +from typing import Callable, Sequence from mypy import errorcodes as codes, message_registry from mypy.errorcodes import ErrorCode @@ -42,11 +42,18 @@ class TypeArgumentAnalyzer(MixedTraverserVisitor): - def __init__(self, errors: Errors, options: Options, is_typeshed_file: bool) -> None: + def __init__( + self, + errors: Errors, + options: Options, + is_typeshed_file: bool, + named_type: Callable[[str, list[Type]], Instance], + ) -> None: super().__init__() self.errors = errors self.options = options self.is_typeshed_file = is_typeshed_file + self.named_type = named_type self.scope = Scope() # Should we also analyze function definitions, or only module top-levels? self.recurse_into_functions = True @@ -243,16 +250,16 @@ def visit_unpack_type(self, typ: UnpackType) -> None: return if isinstance(proper_type, TypeVarTupleType): return + # TODO: this should probably be .has_base("builtins.tuple"), also elsewhere. if isinstance(proper_type, Instance) and proper_type.type.fullname == "builtins.tuple": return - if isinstance(proper_type, AnyType) and proper_type.type_of_any == TypeOfAny.from_error: - return - if not isinstance(proper_type, UnboundType): - # Avoid extra errors if there were some errors already. + if not isinstance(proper_type, (UnboundType, AnyType)): + # Avoid extra errors if there were some errors already. Also interpret plain Any + # as tuple[Any, ...] (this is better for the code in type checker). self.fail( message_registry.INVALID_UNPACK.format(format_type(proper_type, self.options)), typ ) - typ.type = AnyType(TypeOfAny.from_error) + typ.type = self.named_type("builtins.tuple", [AnyType(TypeOfAny.from_error)]) def check_type_var_values( self, name: str, actuals: list[Type], arg_name: str, valids: list[Type], context: Context diff --git a/test-data/unit/check-typevar-tuple.test b/test-data/unit/check-typevar-tuple.test index a36c4d4d6741..c8b33ec96b06 100644 --- a/test-data/unit/check-typevar-tuple.test +++ b/test-data/unit/check-typevar-tuple.test @@ -17,8 +17,7 @@ reveal_type(f(args)) # N: Revealed type is "Tuple[builtins.int, builtins.str]" reveal_type(f(varargs)) # N: Revealed type is "builtins.tuple[builtins.int, ...]" -if object(): - f(0) # E: Argument 1 to "f" has incompatible type "int"; expected +f(0) # E: Argument 1 to "f" has incompatible type "int"; expected "Tuple[, ...]" def g(a: Tuple[Unpack[Ts]], b: Tuple[Unpack[Ts]]) -> Tuple[Unpack[Ts]]: return a @@ -26,7 +25,7 @@ def g(a: Tuple[Unpack[Ts]], b: Tuple[Unpack[Ts]]) -> Tuple[Unpack[Ts]]: reveal_type(g(args, args)) # N: Revealed type is "Tuple[builtins.int, builtins.str]" reveal_type(g(args, args2)) # N: Revealed type is "Tuple[builtins.int, builtins.str]" reveal_type(g(args, args3)) # N: Revealed type is "builtins.tuple[builtins.object, ...]" -reveal_type(g(any, any)) # N: Revealed type is "Any" +reveal_type(g(any, any)) # N: Revealed type is "builtins.tuple[Any, ...]" [builtins fixtures/tuple.pyi] [case testTypeVarTupleMixed] From 6c16143c3a68c99f6e4c99974c44cf3abf867103 Mon Sep 17 00:00:00 2001 From: Nikita Sobolev Date: Tue, 29 Aug 2023 23:12:02 +0300 Subject: [PATCH 003/144] Improve GitHub Actions specs (#15965) Two main changes: 1. Always use secure permissions, when some workflow does not do anything, it has to be `contents: read` only 2. Be more consistent with canceling workflows --- .github/workflows/build_wheels.yml | 3 +++ .github/workflows/docs.yml | 7 +++++++ .github/workflows/mypy_primer.yml | 3 +++ .github/workflows/test.yml | 3 +++ .github/workflows/test_stubgenc.yml | 7 +++++++ 5 files changed, 23 insertions(+) diff --git a/.github/workflows/build_wheels.yml b/.github/workflows/build_wheels.yml index e728d741d90d..3f4ea5e42f9b 100644 --- a/.github/workflows/build_wheels.yml +++ b/.github/workflows/build_wheels.yml @@ -5,6 +5,9 @@ on: branches: [main, master, 'release*'] tags: ['*'] +permissions: + contents: write + jobs: build-wheels: if: github.repository == 'python/mypy' diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 5dc86a1159f4..9f3a6121ae16 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -14,6 +14,13 @@ on: - CREDITS - LICENSE +permissions: + contents: read + +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + jobs: docs: runs-on: ubuntu-latest diff --git a/.github/workflows/mypy_primer.yml b/.github/workflows/mypy_primer.yml index e7e4af1f07b7..2958b8fc325b 100644 --- a/.github/workflows/mypy_primer.yml +++ b/.github/workflows/mypy_primer.yml @@ -15,6 +15,9 @@ on: - 'mypy/test/**' - 'test-data/**' +permissions: + contents: read + concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} cancel-in-progress: true diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index f594353ed05a..0e335a59d1d0 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -15,6 +15,9 @@ on: - CREDITS - LICENSE +permissions: + contents: read + concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} cancel-in-progress: true diff --git a/.github/workflows/test_stubgenc.yml b/.github/workflows/test_stubgenc.yml index db9bf413faa3..33466b9870ff 100644 --- a/.github/workflows/test_stubgenc.yml +++ b/.github/workflows/test_stubgenc.yml @@ -12,6 +12,13 @@ on: - 'mypy/stubdoc.py' - 'test-data/stubgen/**' +permissions: + contents: read + +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + jobs: stubgenc: # Check stub file generation for a small pybind11 project From d6df8e883e927920bbe50aab779e7591e31533c6 Mon Sep 17 00:00:00 2001 From: Ilya Priven Date: Tue, 29 Aug 2023 19:29:19 -0400 Subject: [PATCH 004/144] dataclasses.replace: fall through to typeshed sig (#15962) If the dataclasses plugin cannot determine a signature for `dataclasses.replace`, it should not report an error. The underlying typeshed signature will get a shot at verifying the type and reporting an error, and it would enable the following pattern (without typing `replace`'s kwargs, though) --- mypy/plugins/dataclasses.py | 25 +---------- test-data/unit/check-dataclass-transform.test | 2 +- test-data/unit/check-dataclasses.test | 44 +++++++++++++++---- test-data/unit/lib-stub/dataclasses.pyi | 20 ++++++++- 4 files changed, 55 insertions(+), 36 deletions(-) diff --git a/mypy/plugins/dataclasses.py b/mypy/plugins/dataclasses.py index 39b597491e9e..8b34c28b6832 100644 --- a/mypy/plugins/dataclasses.py +++ b/mypy/plugins/dataclasses.py @@ -972,25 +972,6 @@ def _has_direct_dataclass_transform_metaclass(info: TypeInfo) -> bool: ) -def _fail_not_dataclass(ctx: FunctionSigContext, t: Type, parent_t: Type) -> None: - t_name = format_type_bare(t, ctx.api.options) - if parent_t is t: - msg = ( - f'Argument 1 to "replace" has a variable type "{t_name}" not bound to a dataclass' - if isinstance(t, TypeVarType) - else f'Argument 1 to "replace" has incompatible type "{t_name}"; expected a dataclass' - ) - else: - pt_name = format_type_bare(parent_t, ctx.api.options) - msg = ( - f'Argument 1 to "replace" has type "{pt_name}" whose item "{t_name}" is not bound to a dataclass' - if isinstance(t, TypeVarType) - else f'Argument 1 to "replace" has incompatible type "{pt_name}" whose item "{t_name}" is not a dataclass' - ) - - ctx.api.fail(msg, ctx.context) - - def _get_expanded_dataclasses_fields( ctx: FunctionSigContext, typ: ProperType, display_typ: ProperType, parent_typ: ProperType ) -> list[CallableType] | None: @@ -999,9 +980,7 @@ def _get_expanded_dataclasses_fields( For generic classes, the field types are expanded. If the type contains Any or a non-dataclass, returns None; in the latter case, also reports an error. """ - if isinstance(typ, AnyType): - return None - elif isinstance(typ, UnionType): + if isinstance(typ, UnionType): ret: list[CallableType] | None = [] for item in typ.relevant_items(): item = get_proper_type(item) @@ -1018,14 +997,12 @@ def _get_expanded_dataclasses_fields( elif isinstance(typ, Instance): replace_sym = typ.type.get_method(_INTERNAL_REPLACE_SYM_NAME) if replace_sym is None: - _fail_not_dataclass(ctx, display_typ, parent_typ) return None replace_sig = replace_sym.type assert isinstance(replace_sig, ProperType) assert isinstance(replace_sig, CallableType) return [expand_type_by_instance(replace_sig, typ)] else: - _fail_not_dataclass(ctx, display_typ, parent_typ) return None diff --git a/test-data/unit/check-dataclass-transform.test b/test-data/unit/check-dataclass-transform.test index 9029582ece82..58cd5e5a90f8 100644 --- a/test-data/unit/check-dataclass-transform.test +++ b/test-data/unit/check-dataclass-transform.test @@ -853,7 +853,7 @@ class Person: name: str p = Person('John') -y = replace(p, name='Bob') # E: Argument 1 to "replace" has incompatible type "Person"; expected a dataclass +y = replace(p, name='Bob') [typing fixtures/typing-full.pyi] [builtins fixtures/dataclasses.pyi] diff --git a/test-data/unit/check-dataclasses.test b/test-data/unit/check-dataclasses.test index 91c409807497..1f5f5635de4e 100644 --- a/test-data/unit/check-dataclasses.test +++ b/test-data/unit/check-dataclasses.test @@ -2122,6 +2122,8 @@ a2 = replace(a, x='42', q=42) # E: Argument "x" to "replace" of "A" has incompa a2 = replace(a, q='42') # E: Argument "q" to "replace" of "A" has incompatible type "str"; expected "int" reveal_type(a2) # N: Revealed type is "__main__.A" +[builtins fixtures/tuple.pyi] + [case testReplaceUnion] from typing import Generic, Union, TypeVar from dataclasses import dataclass, replace, InitVar @@ -2151,7 +2153,7 @@ _ = replace(a_or_b, x=42, y=True, z='42', init_var=42) # E: Argument "z" to "re _ = replace(a_or_b, x=42, y=True, w={}, init_var=42) # E: Argument "w" to "replace" of "Union[A[int], B]" has incompatible type "Dict[, ]"; expected _ = replace(a_or_b, y=42, init_var=42) # E: Argument "y" to "replace" of "Union[A[int], B]" has incompatible type "int"; expected "bool" -[builtins fixtures/dataclasses.pyi] +[builtins fixtures/tuple.pyi] [case testReplaceUnionOfTypeVar] from typing import Generic, Union, TypeVar @@ -2171,7 +2173,9 @@ TA = TypeVar('TA', bound=A) TB = TypeVar('TB', bound=B) def f(b_or_t: Union[TA, TB, int]) -> None: - a2 = replace(b_or_t) # E: Argument 1 to "replace" has type "Union[TA, TB, int]" whose item "TB" is not bound to a dataclass # E: Argument 1 to "replace" has incompatible type "Union[TA, TB, int]" whose item "int" is not a dataclass + a2 = replace(b_or_t) # E: Value of type variable "_DataclassT" of "replace" cannot be "Union[TA, TB, int]" + +[builtins fixtures/tuple.pyi] [case testReplaceTypeVarBoundNotDataclass] from dataclasses import dataclass, replace @@ -2183,16 +2187,18 @@ TNone = TypeVar('TNone', bound=None) TUnion = TypeVar('TUnion', bound=Union[str, int]) def f1(t: TInt) -> None: - _ = replace(t, x=42) # E: Argument 1 to "replace" has a variable type "TInt" not bound to a dataclass + _ = replace(t, x=42) # E: Value of type variable "_DataclassT" of "replace" cannot be "TInt" def f2(t: TAny) -> TAny: - return replace(t, x='spam') # E: Argument 1 to "replace" has a variable type "TAny" not bound to a dataclass + return replace(t, x='spam') # E: Value of type variable "_DataclassT" of "replace" cannot be "TAny" def f3(t: TNone) -> TNone: - return replace(t, x='spam') # E: Argument 1 to "replace" has a variable type "TNone" not bound to a dataclass + return replace(t, x='spam') # E: Value of type variable "_DataclassT" of "replace" cannot be "TNone" def f4(t: TUnion) -> TUnion: - return replace(t, x='spam') # E: Argument 1 to "replace" has incompatible type "TUnion" whose item "str" is not a dataclass # E: Argument 1 to "replace" has incompatible type "TUnion" whose item "int" is not a dataclass + return replace(t, x='spam') # E: Value of type variable "_DataclassT" of "replace" cannot be "TUnion" + +[builtins fixtures/tuple.pyi] [case testReplaceTypeVarBound] from dataclasses import dataclass, replace @@ -2217,6 +2223,8 @@ def f(t: TA) -> TA: f(A(x=42)) f(B(x=42)) +[builtins fixtures/tuple.pyi] + [case testReplaceAny] from dataclasses import replace from typing import Any @@ -2225,17 +2233,33 @@ a: Any a2 = replace(a) reveal_type(a2) # N: Revealed type is "Any" +[builtins fixtures/tuple.pyi] + [case testReplaceNotDataclass] from dataclasses import replace -replace(5) # E: Argument 1 to "replace" has incompatible type "int"; expected a dataclass +replace(5) # E: Value of type variable "_DataclassT" of "replace" cannot be "int" class C: pass -replace(C()) # E: Argument 1 to "replace" has incompatible type "C"; expected a dataclass +replace(C()) # E: Value of type variable "_DataclassT" of "replace" cannot be "C" -replace(None) # E: Argument 1 to "replace" has incompatible type "None"; expected a dataclass +replace(None) # E: Value of type variable "_DataclassT" of "replace" cannot be "None" + +[builtins fixtures/tuple.pyi] + +[case testReplaceIsDataclass] +from dataclasses import is_dataclass, replace + +def f(x: object) -> None: + _ = replace(x) # E: Value of type variable "_DataclassT" of "replace" cannot be "object" + if is_dataclass(x): + _ = replace(x) # E: Value of type variable "_DataclassT" of "replace" cannot be "Union[DataclassInstance, Type[DataclassInstance]]" + if not isinstance(x, type): + _ = replace(x) + +[builtins fixtures/tuple.pyi] [case testReplaceGeneric] from dataclasses import dataclass, replace, InitVar @@ -2254,6 +2278,8 @@ reveal_type(a2) # N: Revealed type is "__main__.A[builtins.int]" a2 = replace(a, x='42') # E: Argument "x" to "replace" of "A[int]" has incompatible type "str"; expected "int" reveal_type(a2) # N: Revealed type is "__main__.A[builtins.int]" +[builtins fixtures/tuple.pyi] + [case testPostInitCorrectSignature] from typing import Any, Generic, TypeVar, Callable, Self from dataclasses import dataclass, InitVar diff --git a/test-data/unit/lib-stub/dataclasses.pyi b/test-data/unit/lib-stub/dataclasses.pyi index b2b48c2ae486..cf43747757bd 100644 --- a/test-data/unit/lib-stub/dataclasses.pyi +++ b/test-data/unit/lib-stub/dataclasses.pyi @@ -1,6 +1,14 @@ -from typing import Any, Callable, Generic, Mapping, Optional, TypeVar, overload, Type +from typing import Any, Callable, Generic, Literal, Mapping, Optional, TypeVar, overload, Type, \ + Protocol, ClassVar +from typing_extensions import TypeGuard + +# DataclassInstance is in _typeshed.pyi normally, but alas we can't do the same for lib-stub +# due to test-data/unit/lib-stub/builtins.pyi not having 'tuple'. +class DataclassInstance(Protocol): + __dataclass_fields__: ClassVar[dict[str, Field[Any]]] _T = TypeVar('_T') +_DataclassT = TypeVar("_DataclassT", bound=DataclassInstance) class InitVar(Generic[_T]): ... @@ -33,4 +41,12 @@ def field(*, class Field(Generic[_T]): pass -def replace(__obj: _T, **changes: Any) -> _T: ... +@overload +def is_dataclass(obj: DataclassInstance) -> Literal[True]: ... +@overload +def is_dataclass(obj: type) -> TypeGuard[type[DataclassInstance]]: ... +@overload +def is_dataclass(obj: object) -> TypeGuard[DataclassInstance | type[DataclassInstance]]: ... + + +def replace(__obj: _DataclassT, **changes: Any) -> _DataclassT: ... From 379b52f2cfff4955589df714cb2dd904be482e76 Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Tue, 29 Aug 2023 17:46:46 -0700 Subject: [PATCH 005/144] Try upgrading tox (#15992) Fixes #15990 --- .github/workflows/docs.yml | 2 +- .github/workflows/test.yml | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 9f3a6121ae16..037738d4b3aa 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -34,7 +34,7 @@ jobs: with: python-version: '3.8' - name: Install tox - run: pip install --upgrade 'setuptools!=50' tox==4.4.4 + run: pip install --upgrade 'setuptools!=50' tox==4.11.0 - name: Setup tox environment run: tox run -e ${{ env.TOXENV }} --notest - name: Test diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 0e335a59d1d0..d2e7e7258500 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -133,7 +133,7 @@ jobs: ./misc/build-debug-python.sh $PYTHONVERSION $PYTHONDIR $VENV source $VENV/bin/activate - name: Install tox - run: pip install --upgrade 'setuptools!=50' tox==4.4.4 + run: pip install --upgrade 'setuptools!=50' tox==4.11.0 - name: Compiled with mypyc if: ${{ matrix.test_mypyc }} run: | @@ -185,7 +185,7 @@ jobs: default: 3.11.1 command: python -c "import platform; print(f'{platform.architecture()=} {platform.machine()=}');" - name: Install tox - run: pip install --upgrade 'setuptools!=50' tox==4.4.4 + run: pip install --upgrade 'setuptools!=50' tox==4.11.0 - name: Setup tox environment run: tox run -e py --notest - name: Test From 2298829ab3b7339427ec957ec5c21955d3657c6f Mon Sep 17 00:00:00 2001 From: Ilya Priven Date: Tue, 29 Aug 2023 20:49:24 -0400 Subject: [PATCH 006/144] attrs: remove fields type check (#15983) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Since https://github.com/python-attrs/attrs/pull/890 (≥ 22.1.0) `attrs.fields` is typed to accept a protocol. Since https://github.com/python-attrs/attrs/pull/997 (≥ 22.2.0) `attrs.has` is a type-guard. Support both by removing the explicit error reporting and letting it fall through to the type stub. Fixes #15980. --- mypy/plugins/attrs.py | 5 ----- test-data/unit/check-plugin-attrs.test | 16 ++++++++++------ test-data/unit/lib-stub/attrs/__init__.pyi | 13 ++++++++++--- 3 files changed, 20 insertions(+), 14 deletions(-) diff --git a/mypy/plugins/attrs.py b/mypy/plugins/attrs.py index 3d326a5f4e80..6f5b6f35da07 100644 --- a/mypy/plugins/attrs.py +++ b/mypy/plugins/attrs.py @@ -1111,9 +1111,4 @@ def fields_function_sig_callback(ctx: mypy.plugin.FunctionSigContext) -> Callabl assert ret_type is not None return ctx.default_signature.copy_modified(arg_types=arg_types, ret_type=ret_type) - ctx.api.fail( - f'Argument 1 to "fields" has incompatible type "{format_type_bare(proper_type, ctx.api.options)}"; expected an attrs class', - ctx.context, - ) - return ctx.default_signature diff --git a/test-data/unit/check-plugin-attrs.test b/test-data/unit/check-plugin-attrs.test index e8598132c50e..1465bab2bb7b 100644 --- a/test-data/unit/check-plugin-attrs.test +++ b/test-data/unit/check-plugin-attrs.test @@ -1596,16 +1596,18 @@ def f(t: TA) -> None: [builtins fixtures/plugin_attrs.pyi] [case testNonattrsFields] -# flags: --no-strict-optional from typing import Any, cast, Type -from attrs import fields +from attrs import fields, has class A: b: int c: str -fields(A) # E: Argument 1 to "fields" has incompatible type "Type[A]"; expected an attrs class -fields(None) # E: Argument 1 to "fields" has incompatible type "None"; expected an attrs class +if has(A): + fields(A) +else: + fields(A) # E: Argument 1 to "fields" has incompatible type "Type[A]"; expected "Type[AttrsInstance]" +fields(None) # E: Argument 1 to "fields" has incompatible type "None"; expected "Type[AttrsInstance]" fields(cast(Any, 42)) fields(cast(Type[Any], 43)) @@ -2167,7 +2169,8 @@ TA = TypeVar('TA', bound=A) TB = TypeVar('TB', bound=B) def f(b_or_t: TA | TB | int) -> None: - a2 = attrs.evolve(b_or_t) # E: Argument 1 to "evolve" has type "Union[TA, TB, int]" whose item "TB" is not bound to an attrs class # E: Argument 1 to "evolve" has incompatible type "Union[TA, TB, int]" whose item "int" is not an attrs class + a2 = attrs.evolve(b_or_t) # E: Argument 1 to "evolve" has type "Union[TA, TB, int]" whose item "TB" is not bound to an attrs class \ + # E: Argument 1 to "evolve" has incompatible type "Union[TA, TB, int]" whose item "int" is not an attrs class [builtins fixtures/plugin_attrs.pyi] @@ -2216,7 +2219,8 @@ def h(t: TNone) -> None: _ = attrs.evolve(t, x=42) # E: Argument 1 to "evolve" has a variable type "TNone" not bound to an attrs class def x(t: TUnion) -> None: - _ = attrs.evolve(t, x=42) # E: Argument 1 to "evolve" has incompatible type "TUnion" whose item "str" is not an attrs class # E: Argument 1 to "evolve" has incompatible type "TUnion" whose item "int" is not an attrs class + _ = attrs.evolve(t, x=42) # E: Argument 1 to "evolve" has incompatible type "TUnion" whose item "str" is not an attrs class \ + # E: Argument 1 to "evolve" has incompatible type "TUnion" whose item "int" is not an attrs class [builtins fixtures/plugin_attrs.pyi] diff --git a/test-data/unit/lib-stub/attrs/__init__.pyi b/test-data/unit/lib-stub/attrs/__init__.pyi index a575f97da9bc..7a88170d7271 100644 --- a/test-data/unit/lib-stub/attrs/__init__.pyi +++ b/test-data/unit/lib-stub/attrs/__init__.pyi @@ -1,7 +1,14 @@ -from typing import TypeVar, overload, Callable, Any, Optional, Union, Sequence, Mapping, Generic +from typing import TypeVar, overload, Callable, Any, Optional, Union, Sequence, Mapping, \ + Protocol, ClassVar, Type +from typing_extensions import TypeGuard from attr import Attribute as Attribute + +class AttrsInstance(Protocol): + __attrs_attrs__: ClassVar[Any] + + _T = TypeVar('_T') _C = TypeVar('_C', bound=type) @@ -131,5 +138,5 @@ def field( def evolve(inst: _T, **changes: Any) -> _T: ... def assoc(inst: _T, **changes: Any) -> _T: ... - -def fields(cls: type) -> Any: ... +def has(cls: type) -> TypeGuard[Type[AttrsInstance]]: ... +def fields(cls: Type[AttrsInstance]) -> Any: ... From 5783af495f22e2abc42b3c153b0bea2faa9b72e7 Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Tue, 29 Aug 2023 18:50:20 -0700 Subject: [PATCH 007/144] Fix inference for properties with __call__ (#15926) Fixes #5858 --- mypy/checkmember.py | 21 +++++++++++++-------- test-data/unit/check-functions.test | 17 +++++++++++++++++ 2 files changed, 30 insertions(+), 8 deletions(-) diff --git a/mypy/checkmember.py b/mypy/checkmember.py index 1bdc00a6eb59..f7d002f17eb9 100644 --- a/mypy/checkmember.py +++ b/mypy/checkmember.py @@ -2,7 +2,7 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Callable, Sequence, cast +from typing import TYPE_CHECKING, Callable, Optional, Sequence, cast from mypy import meet, message_registry, subtypes from mypy.erasetype import erase_typevars @@ -776,12 +776,17 @@ def analyze_var( freeze_all_type_vars(t) result: Type = t typ = get_proper_type(typ) - if ( - var.is_initialized_in_class - and (not is_instance_var(var) or mx.is_operator) - and isinstance(typ, FunctionLike) - and not typ.is_type_obj() - ): + + call_type: Optional[ProperType] = None + if var.is_initialized_in_class and (not is_instance_var(var) or mx.is_operator): + if isinstance(typ, FunctionLike) and not typ.is_type_obj(): + call_type = typ + elif var.is_property: + call_type = get_proper_type(_analyze_member_access("__call__", typ, mx)) + else: + call_type = typ + + if isinstance(call_type, FunctionLike) and not call_type.is_type_obj(): if mx.is_lvalue: if var.is_property: if not var.is_settable_property: @@ -792,7 +797,7 @@ def analyze_var( if not var.is_staticmethod: # Class-level function objects and classmethods become bound methods: # the former to the instance, the latter to the class. - functype = typ + functype: FunctionLike = call_type # Use meet to narrow original_type to the dispatched type. # For example, assume # * A.f: Callable[[A1], None] where A1 <: A (maybe A1 == A) diff --git a/test-data/unit/check-functions.test b/test-data/unit/check-functions.test index f49541420cc0..4cc523a595d1 100644 --- a/test-data/unit/check-functions.test +++ b/test-data/unit/check-functions.test @@ -3158,3 +3158,20 @@ class C(A, B): class D(A, B): def f(self, z: int) -> str: pass # E: Method "f" is not using @override but is overriding a method in class "__main__.A" [typing fixtures/typing-override.pyi] + +[case testCallableProperty] +from typing import Callable + +class something_callable: + def __call__(self, fn) -> str: ... + +def decorator(fn: Callable[..., int]) -> something_callable: ... + +class A: + @property + @decorator + def f(self) -> int: ... + +reveal_type(A.f) # N: Revealed type is "__main__.something_callable" +reveal_type(A().f) # N: Revealed type is "builtins.str" +[builtins fixtures/property.pyi] From 0ae0c750b7c39c875f5ea536408143fe32d920d8 Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Wed, 30 Aug 2023 03:08:05 +0100 Subject: [PATCH 008/144] Fix ParamSpec inference for callback protocols (#15986) Fixes https://github.com/python/mypy/issues/15984 Fix is straightforward, `ParamSpec` inference special-casing should put instances with `__call__` and callable types on same ground. --- mypy/checkexpr.py | 4 ++++ test-data/unit/check-parameter-specification.test | 15 +++++++++++++++ 2 files changed, 19 insertions(+) diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index 4430d0773cfa..218568007b9e 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -2185,6 +2185,10 @@ def get_arg_infer_passes( # run(test, 1, 2) # we will use `test` for inference, since it will allow to infer also # argument *names* for P <: [x: int, y: int]. + if isinstance(p_actual, Instance): + call_method = find_member("__call__", p_actual, p_actual, is_operator=True) + if call_method is not None: + p_actual = get_proper_type(call_method) if ( isinstance(p_actual, CallableType) and not p_actual.variables diff --git a/test-data/unit/check-parameter-specification.test b/test-data/unit/check-parameter-specification.test index ed1d59b376d2..a98c92ce14e7 100644 --- a/test-data/unit/check-parameter-specification.test +++ b/test-data/unit/check-parameter-specification.test @@ -1824,3 +1824,18 @@ class C(Generic[P]): ... c: C[int, [int, str], str] # E: Nested parameter specifications are not allowed reveal_type(c) # N: Revealed type is "__main__.C[Any]" [builtins fixtures/paramspec.pyi] + +[case testParamSpecInferenceWithCallbackProtocol] +from typing import Protocol, Callable, ParamSpec + +class CB(Protocol): + def __call__(self, x: str, y: int) -> None: ... + +P = ParamSpec('P') +def g(fn: Callable[P, None], *args: P.args, **kwargs: P.kwargs) -> None: ... + +cb: CB +g(cb, y=0, x='a') # OK +g(cb, y='a', x=0) # E: Argument "y" to "g" has incompatible type "str"; expected "int" \ + # E: Argument "x" to "g" has incompatible type "int"; expected "str" +[builtins fixtures/paramspec.pyi] From a7e0f6f8b0ec5de2fe7b804c9ac7160893ae5bf8 Mon Sep 17 00:00:00 2001 From: Ilya Priven Date: Tue, 29 Aug 2023 22:13:01 -0400 Subject: [PATCH 009/144] Add hint for AsyncIterator incompatible return type (#15883) For issue described in #5070 and documented in #14973, add a contextual link to the docs. --- mypy/messages.py | 16 ++++++++++++++++ test-data/unit/check-async-await.test | 16 ++++++++++++++++ 2 files changed, 32 insertions(+) diff --git a/mypy/messages.py b/mypy/messages.py index aab30ee29108..1933b74d27bd 100644 --- a/mypy/messages.py +++ b/mypy/messages.py @@ -1310,6 +1310,22 @@ def return_type_incompatible_with_supertype( code=codes.OVERRIDE, ) + original = get_proper_type(original) + override = get_proper_type(override) + if ( + isinstance(original, Instance) + and isinstance(override, Instance) + and override.type.fullname == "typing.AsyncIterator" + and original.type.fullname == "typing.Coroutine" + and len(original.args) == 3 + and original.args[2] == override + ): + self.note(f'Consider declaring "{name}" in {target} without "async"', context) + self.note( + "See https://mypy.readthedocs.io/en/stable/more_types.html#asynchronous-iterators", + context, + ) + def override_target(self, name: str, name_in_super: str, supertype: str) -> str: target = f'supertype "{supertype}"' if name_in_super != name: diff --git a/test-data/unit/check-async-await.test b/test-data/unit/check-async-await.test index 653025a0bb24..7afdbd687135 100644 --- a/test-data/unit/check-async-await.test +++ b/test-data/unit/check-async-await.test @@ -1021,3 +1021,19 @@ def coro() -> Generator[int, None, None]: reveal_type(coro) # N: Revealed type is "def () -> typing.AwaitableGenerator[builtins.int, None, None, typing.Generator[builtins.int, None, None]]" [builtins fixtures/async_await.pyi] [typing fixtures/typing-async.pyi] + +[case asyncIteratorInProtocol] +from typing import AsyncIterator, Protocol + +class P(Protocol): + async def launch(self) -> AsyncIterator[int]: + raise BaseException + +class Launcher(P): + def launch(self) -> AsyncIterator[int]: # E: Return type "AsyncIterator[int]" of "launch" incompatible with return type "Coroutine[Any, Any, AsyncIterator[int]]" in supertype "P" \ + # N: Consider declaring "launch" in supertype "P" without "async" \ + # N: See https://mypy.readthedocs.io/en/stable/more_types.html#asynchronous-iterators + raise BaseException + +[builtins fixtures/async_await.pyi] +[typing fixtures/typing-async.pyi] From 9a0aca14724eeef580eefcd3b340678313a1930a Mon Sep 17 00:00:00 2001 From: Max Murin Date: Wed, 30 Aug 2023 02:24:06 -0700 Subject: [PATCH 010/144] Update version number to 1.7.0+dev (#15989) We've cut the release branch for 1.6, so the dev version should now be 1.7. Co-authored-by: Shantanu <12621235+hauntsaninja@users.noreply.github.com> --- mypy/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mypy/version.py b/mypy/version.py index 512890ce7d2b..7cfc68d6e553 100644 --- a/mypy/version.py +++ b/mypy/version.py @@ -8,7 +8,7 @@ # - Release versions have the form "1.2.3". # - Dev versions have the form "1.2.3+dev" (PLUS sign to conform to PEP 440). # - Before 1.0 we had the form "0.NNN". -__version__ = "1.6.0+dev" +__version__ = "1.7.0+dev" base_version = __version__ mypy_dir = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) From df4717ee2cbbeb9e47fbd0e60edcaa6f81bbd7bb Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Thu, 31 Aug 2023 05:39:09 -0700 Subject: [PATCH 011/144] Represent bottom type as Never in messages (#15996) Fixes #15950 --- mypy/checker.py | 12 +++++----- mypy/checkexpr.py | 4 ++-- mypy/expandtype.py | 2 +- mypy/meet.py | 4 ++-- mypy/messages.py | 6 ++--- mypy/solve.py | 4 ++-- mypy/typeops.py | 2 +- mypy/types.py | 2 +- test-data/unit/check-classes.test | 16 ++++++------- test-data/unit/check-dataclass-transform.test | 12 +++++----- test-data/unit/check-dataclasses.test | 8 +++---- test-data/unit/check-generic-subtyping.test | 2 +- test-data/unit/check-generics.test | 12 +++++----- test-data/unit/check-inference-context.test | 16 ++++++------- test-data/unit/check-inference.test | 24 +++++++++---------- test-data/unit/check-isinstance.test | 4 ++-- test-data/unit/check-literal.test | 4 ++-- test-data/unit/check-narrowing.test | 2 +- test-data/unit/check-native-int.test | 8 +++---- test-data/unit/check-overloading.test | 12 +++++----- .../unit/check-parameter-specification.test | 6 ++--- test-data/unit/check-plugin-attrs.test | 10 ++++---- test-data/unit/check-protocols.test | 2 +- test-data/unit/check-python310.test | 2 +- test-data/unit/check-selftype.test | 4 ++-- test-data/unit/check-singledispatch.test | 2 +- test-data/unit/check-typeddict.test | 10 ++++---- test-data/unit/check-typevar-tuple.test | 2 +- test-data/unit/check-unreachable-code.test | 10 ++++---- test-data/unit/check-varargs.test | 10 ++++---- test-data/unit/pythoneval-asyncio.test | 2 +- test-data/unit/pythoneval.test | 6 ++--- 32 files changed, 111 insertions(+), 111 deletions(-) diff --git a/mypy/checker.py b/mypy/checker.py index a44601b83e21..fffa87c4f634 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -3934,7 +3934,7 @@ def is_valid_defaultdict_partial_value_type(self, t: ProperType) -> bool: Examples: * t is 'int' --> True - * t is 'list[]' --> True + * t is 'list[Never]' --> True * t is 'dict[...]' --> False (only generic types with a single type argument supported) """ @@ -3980,7 +3980,7 @@ def set_inference_error_fallback_type(self, var: Var, lvalue: Lvalue, type: Type x = [] # type: ignore x.append(1) # Should be ok! - We implement this here by giving x a valid type (replacing inferred with Any). + We implement this here by giving x a valid type (replacing inferred Never with Any). """ fallback = self.inference_error_fallback_type(type) self.set_inferred_type(var, lvalue, fallback) @@ -7403,7 +7403,7 @@ def is_valid_inferred_type(typ: Type, is_lvalue_final: bool = False) -> bool: class InvalidInferredTypes(BoolTypeQuery): """Find type components that are not valid for an inferred type. - These include type, and any types resulting from failed + These include type, and any uninhabited types resulting from failed (ambiguous) type inference. """ @@ -7424,7 +7424,7 @@ def visit_type_var(self, t: TypeVarType) -> bool: class SetNothingToAny(TypeTranslator): - """Replace all ambiguous types with Any (to avoid spurious extra errors).""" + """Replace all ambiguous Uninhabited types with Any (to avoid spurious extra errors).""" def visit_uninhabited_type(self, t: UninhabitedType) -> Type: if t.ambiguous: @@ -7432,7 +7432,7 @@ def visit_uninhabited_type(self, t: UninhabitedType) -> Type: return t def visit_type_alias_type(self, t: TypeAliasType) -> Type: - # Target of the alias cannot be an ambiguous , so we just + # Target of the alias cannot be an ambiguous UninhabitedType, so we just # replace the arguments. return t.copy_modified(args=[a.accept(self) for a in t.args]) @@ -7774,7 +7774,7 @@ def is_subtype_no_promote(left: Type, right: Type) -> bool: def is_overlapping_types_no_promote_no_uninhabited_no_none(left: Type, right: Type) -> bool: - # For the purpose of unsafe overload checks we consider list[] and list[int] + # For the purpose of unsafe overload checks we consider list[Never] and list[int] # non-overlapping. This is consistent with how we treat list[int] and list[str] as # non-overlapping, despite [] belongs to both. Also this will prevent false positives # for failed type inference during unification. diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index 218568007b9e..22a9852545b7 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -2080,7 +2080,7 @@ def infer_function_type_arguments( ): freeze_all_type_vars(applied) return applied - # If it didn't work, erase free variables as , to avoid confusing errors. + # If it didn't work, erase free variables as uninhabited, to avoid confusing errors. unknown = UninhabitedType() unknown.ambiguous = True inferred_args = [ @@ -2444,7 +2444,7 @@ def check_argument_types( callee_arg_types = [orig_callee_arg_type] callee_arg_kinds = [ARG_STAR] else: - # TODO: Any and can appear in Unpack (as a result of user error), + # TODO: Any and Never can appear in Unpack (as a result of user error), # fail gracefully here and elsewhere (and/or normalize them away). assert isinstance(unpacked_type, Instance) assert unpacked_type.type.fullname == "builtins.tuple" diff --git a/mypy/expandtype.py b/mypy/expandtype.py index 26353c043cb7..be8ecb9ccfd9 100644 --- a/mypy/expandtype.py +++ b/mypy/expandtype.py @@ -285,7 +285,7 @@ def expand_unpack(self, t: UnpackType) -> list[Type]: ): return [UnpackType(typ=repl)] elif isinstance(repl, (AnyType, UninhabitedType)): - # Replace *Ts = Any with *Ts = *tuple[Any, ...] and some for . + # Replace *Ts = Any with *Ts = *tuple[Any, ...] and some for Never. # These types may appear here as a result of user error or failed inference. return [UnpackType(t.type.tuple_fallback.copy_modified(args=[repl]))] else: diff --git a/mypy/meet.py b/mypy/meet.py index e3a22a226575..2efde4ac7588 100644 --- a/mypy/meet.py +++ b/mypy/meet.py @@ -968,11 +968,11 @@ def typed_dict_mapping_overlap( As usual empty, dictionaries lie in a gray area. In general, List[str] and List[str] are considered non-overlapping despite empty list belongs to both. However, List[int] - and List[] are considered overlapping. + and List[Never] are considered overlapping. So here we follow the same logic: a TypedDict with no required keys is considered non-overlapping with Mapping[str, ], but is considered overlapping with - Mapping[, ]. This way we avoid false positives for overloads, and also + Mapping[Never, Never]. This way we avoid false positives for overloads, and also avoid false positives for comparisons like SomeTypedDict == {} under --strict-equality. """ left, right = get_proper_types((left, right)) diff --git a/mypy/messages.py b/mypy/messages.py index 1933b74d27bd..cda4cda25ee4 100644 --- a/mypy/messages.py +++ b/mypy/messages.py @@ -2068,7 +2068,7 @@ def report_protocol_problems( if supertype.type.fullname in exclusions.get(type(subtype), []): return if any(isinstance(tp, UninhabitedType) for tp in get_proper_types(supertype.args)): - # We don't want to add notes for failed inference (e.g. Iterable[]). + # We don't want to add notes for failed inference (e.g. Iterable[Never]). # This will be only confusing a user even more. return @@ -2395,7 +2395,7 @@ def quote_type_string(type_string: str) -> str: """Quotes a type representation for use in messages.""" no_quote_regex = r"^<(tuple|union): \d+ items>$" if ( - type_string in ["Module", "overloaded function", "", ""] + type_string in ["Module", "overloaded function", "Never", ""] or type_string.startswith("Module ") or re.match(no_quote_regex, type_string) is not None or type_string.endswith("?") @@ -2597,7 +2597,7 @@ def format_literal_value(typ: LiteralType) -> str: if typ.is_noreturn: return "NoReturn" else: - return "" + return "Never" elif isinstance(typ, TypeType): type_name = "type" if options.use_lowercase_names() else "Type" return f"{type_name}[{format(typ.item)}]" diff --git a/mypy/solve.py b/mypy/solve.py index 5945d97ed85a..95377ea9f93e 100644 --- a/mypy/solve.py +++ b/mypy/solve.py @@ -300,8 +300,8 @@ def test(x: U) -> U: ... common_upper_bound_p = get_proper_type(common_upper_bound) # We include None for when strict-optional is disabled. if isinstance(common_upper_bound_p, (UninhabitedType, NoneType)): - # This will cause to infer , which is better than a free TypeVar - # that has an upper bound . + # This will cause to infer Never, which is better than a free TypeVar + # that has an upper bound Never. return None values: list[Type] = [] diff --git a/mypy/typeops.py b/mypy/typeops.py index 0e0bc348942e..f9c1914cc9a8 100644 --- a/mypy/typeops.py +++ b/mypy/typeops.py @@ -330,7 +330,7 @@ class B(A): pass ) # Update the method signature with the solutions found. - # Technically, some constraints might be unsolvable, make them . + # Technically, some constraints might be unsolvable, make them Never. to_apply = [t if t is not None else UninhabitedType() for t in typeargs] func = expand_type(func, {tv.id: arg for tv, arg in zip(self_vars, to_apply)}) variables = [v for v in func.variables if v not in self_vars] diff --git a/mypy/types.py b/mypy/types.py index fb360fb892f1..f974157ce84d 100644 --- a/mypy/types.py +++ b/mypy/types.py @@ -3103,7 +3103,7 @@ def visit_none_type(self, t: NoneType) -> str: return "None" def visit_uninhabited_type(self, t: UninhabitedType) -> str: - return "" + return "Never" def visit_erased_type(self, t: ErasedType) -> str: return "" diff --git a/test-data/unit/check-classes.test b/test-data/unit/check-classes.test index 04b51bb603c5..4bc1e50f7be9 100644 --- a/test-data/unit/check-classes.test +++ b/test-data/unit/check-classes.test @@ -7718,13 +7718,13 @@ class D: def __init__(self) -> NoReturn: ... if object(): - reveal_type(A()) # N: Revealed type is "" + reveal_type(A()) # N: Revealed type is "Never" if object(): - reveal_type(B()) # N: Revealed type is "" + reveal_type(B()) # N: Revealed type is "Never" if object(): - reveal_type(C()) # N: Revealed type is "" + reveal_type(C()) # N: Revealed type is "Never" if object(): - reveal_type(D()) # N: Revealed type is "" + reveal_type(D()) # N: Revealed type is "Never" [case testOverloadedNewAndInitNoReturn] from typing import NoReturn, overload @@ -7764,19 +7764,19 @@ class D: def __init__(self, a: int = ...) -> None: ... if object(): - reveal_type(A()) # N: Revealed type is "" + reveal_type(A()) # N: Revealed type is "Never" reveal_type(A(1)) # N: Revealed type is "__main__.A" if object(): - reveal_type(B()) # N: Revealed type is "" + reveal_type(B()) # N: Revealed type is "Never" reveal_type(B(1)) # N: Revealed type is "__main__.B" if object(): - reveal_type(C()) # N: Revealed type is "" + reveal_type(C()) # N: Revealed type is "Never" reveal_type(C(1)) # N: Revealed type is "__main__.C" if object(): - reveal_type(D()) # N: Revealed type is "" + reveal_type(D()) # N: Revealed type is "Never" reveal_type(D(1)) # N: Revealed type is "__main__.D" [case testClassScopeImportWithWrapperAndError] diff --git a/test-data/unit/check-dataclass-transform.test b/test-data/unit/check-dataclass-transform.test index 58cd5e5a90f8..743c7fef8aa9 100644 --- a/test-data/unit/check-dataclass-transform.test +++ b/test-data/unit/check-dataclass-transform.test @@ -506,7 +506,7 @@ class FunctionModel: integer_: tuple FunctionModel(string_="abc", integer_=1) -FunctionModel(string_="abc", integer_=tuple()) # E: Argument "integer_" to "FunctionModel" has incompatible type "Tuple[, ...]"; expected "int" +FunctionModel(string_="abc", integer_=tuple()) # E: Argument "integer_" to "FunctionModel" has incompatible type "Tuple[Never, ...]"; expected "int" [typing fixtures/typing-full.pyi] [builtins fixtures/dataclasses.pyi] @@ -529,7 +529,7 @@ class FunctionModel: integer_: int FunctionModel(string_="abc", integer_=1) -FunctionModel(string_="abc", integer_=tuple()) # E: Argument "integer_" to "FunctionModel" has incompatible type "Tuple[, ...]"; expected "int" +FunctionModel(string_="abc", integer_=tuple()) # E: Argument "integer_" to "FunctionModel" has incompatible type "Tuple[Never, ...]"; expected "int" [typing fixtures/typing-full.pyi] [builtins fixtures/dataclasses.pyi] @@ -552,7 +552,7 @@ class BaseClassModel(ModelBase): integer_: tuple BaseClassModel(string_="abc", integer_=1) -BaseClassModel(string_="abc", integer_=tuple()) # E: Argument "integer_" to "BaseClassModel" has incompatible type "Tuple[, ...]"; expected "int" +BaseClassModel(string_="abc", integer_=tuple()) # E: Argument "integer_" to "BaseClassModel" has incompatible type "Tuple[Never, ...]"; expected "int" [typing fixtures/typing-full.pyi] [builtins fixtures/dataclasses.pyi] @@ -574,7 +574,7 @@ class BaseClassModel(ModelBase): integer_: int BaseClassModel(string_="abc", integer_=1) -BaseClassModel(string_="abc", integer_=tuple()) # E: Argument "integer_" to "BaseClassModel" has incompatible type "Tuple[, ...]"; expected "int" +BaseClassModel(string_="abc", integer_=tuple()) # E: Argument "integer_" to "BaseClassModel" has incompatible type "Tuple[Never, ...]"; expected "int" [typing fixtures/typing-full.pyi] [builtins fixtures/dataclasses.pyi] @@ -599,7 +599,7 @@ class MetaClassModel(ModelBaseWithMeta): integer_: tuple MetaClassModel(string_="abc", integer_=1) -MetaClassModel(string_="abc", integer_=tuple()) # E: Argument "integer_" to "MetaClassModel" has incompatible type "Tuple[, ...]"; expected "int" +MetaClassModel(string_="abc", integer_=tuple()) # E: Argument "integer_" to "MetaClassModel" has incompatible type "Tuple[Never, ...]"; expected "int" [typing fixtures/typing-full.pyi] [builtins fixtures/dataclasses.pyi] @@ -624,7 +624,7 @@ class MetaClassModel(ModelBaseWithMeta): integer_: int MetaClassModel(string_="abc", integer_=1) -MetaClassModel(string_="abc", integer_=tuple()) # E: Argument "integer_" to "MetaClassModel" has incompatible type "Tuple[, ...]"; expected "int" +MetaClassModel(string_="abc", integer_=tuple()) # E: Argument "integer_" to "MetaClassModel" has incompatible type "Tuple[Never, ...]"; expected "int" [typing fixtures/typing-full.pyi] [builtins fixtures/dataclasses.pyi] diff --git a/test-data/unit/check-dataclasses.test b/test-data/unit/check-dataclasses.test index 1f5f5635de4e..8a50e7124d05 100644 --- a/test-data/unit/check-dataclasses.test +++ b/test-data/unit/check-dataclasses.test @@ -2134,8 +2134,8 @@ T = TypeVar('T') class A(Generic[T]): x: T # exercises meet(T=int, int) = int y: bool # exercises meet(bool, int) = bool - z: str # exercises meet(str, bytes) = - w: dict # exercises meet(dict, ) = + z: str # exercises meet(str, bytes) = Never + w: dict # exercises meet(dict, Never) = Never init_var: InitVar[int] # exercises (non-optional, optional) = non-optional @dataclass @@ -2149,8 +2149,8 @@ class B: a_or_b: Union[A[int], B] _ = replace(a_or_b, x=42, y=True, init_var=42) _ = replace(a_or_b, x=42, y=True) # E: Missing named argument "init_var" for "replace" of "Union[A[int], B]" -_ = replace(a_or_b, x=42, y=True, z='42', init_var=42) # E: Argument "z" to "replace" of "Union[A[int], B]" has incompatible type "str"; expected -_ = replace(a_or_b, x=42, y=True, w={}, init_var=42) # E: Argument "w" to "replace" of "Union[A[int], B]" has incompatible type "Dict[, ]"; expected +_ = replace(a_or_b, x=42, y=True, z='42', init_var=42) # E: Argument "z" to "replace" of "Union[A[int], B]" has incompatible type "str"; expected Never +_ = replace(a_or_b, x=42, y=True, w={}, init_var=42) # E: Argument "w" to "replace" of "Union[A[int], B]" has incompatible type "Dict[Never, Never]"; expected Never _ = replace(a_or_b, y=42, init_var=42) # E: Argument "y" to "replace" of "Union[A[int], B]" has incompatible type "int"; expected "bool" [builtins fixtures/tuple.pyi] diff --git a/test-data/unit/check-generic-subtyping.test b/test-data/unit/check-generic-subtyping.test index 11c92d07021a..fd40f128ff4a 100644 --- a/test-data/unit/check-generic-subtyping.test +++ b/test-data/unit/check-generic-subtyping.test @@ -434,7 +434,7 @@ B(1) C(1) C('a') # E: Argument 1 to "C" has incompatible type "str"; expected "int" D(A(1)) -D(1) # E: Argument 1 to "D" has incompatible type "int"; expected "A[]" +D(1) # E: Argument 1 to "D" has incompatible type "int"; expected "A[Never]" [case testInheritedConstructor2] diff --git a/test-data/unit/check-generics.test b/test-data/unit/check-generics.test index 93674c0c2d5c..0781451e07ce 100644 --- a/test-data/unit/check-generics.test +++ b/test-data/unit/check-generics.test @@ -573,7 +573,7 @@ def func(x: IntNode[T]) -> IntNode[T]: return x reveal_type(func) # N: Revealed type is "def [T] (x: __main__.Node[builtins.int, T`-1]) -> __main__.Node[builtins.int, T`-1]" -func(1) # E: Argument 1 to "func" has incompatible type "int"; expected "Node[int, ]" +func(1) # E: Argument 1 to "func" has incompatible type "int"; expected "Node[int, Never]" func(Node('x', 1)) # E: Argument 1 to "Node" has incompatible type "str"; expected "int" reveal_type(func(Node(1, 'x'))) # N: Revealed type is "__main__.Node[builtins.int, builtins.str]" @@ -834,7 +834,7 @@ reveal_type(x) # N: Revealed type is "builtins.int" def f2(x: IntTP[T]) -> IntTP[T]: return x -f2((1, 2, 3)) # E: Argument 1 to "f2" has incompatible type "Tuple[int, int, int]"; expected "Tuple[int, ]" +f2((1, 2, 3)) # E: Argument 1 to "f2" has incompatible type "Tuple[int, int, int]"; expected "Tuple[int, Never]" reveal_type(f2((1, 'x'))) # N: Revealed type is "Tuple[builtins.int, builtins.str]" [builtins fixtures/for.pyi] @@ -904,7 +904,7 @@ n.y = 'x' # E: Incompatible types in assignment (expression has type "str", vari def f(x: Node[T, T]) -> TupledNode[T]: return Node(x.x, (x.x, x.x)) -f(1) # E: Argument 1 to "f" has incompatible type "int"; expected "Node[, ]" +f(1) # E: Argument 1 to "f" has incompatible type "int"; expected "Node[Never, Never]" f(Node(1, 'x')) # E: Cannot infer type argument 1 of "f" reveal_type(Node('x', 'x')) # N: Revealed type is "a.Node[builtins.str, builtins.str]" @@ -2279,7 +2279,7 @@ class Box(Generic[T]): class IteratorBox(Box[Iterator[T]]): ... -@IteratorBox.wrap # E: Argument 1 to "wrap" of "Box" has incompatible type "Callable[[], int]"; expected "Callable[[], Iterator[]]" +@IteratorBox.wrap # E: Argument 1 to "wrap" of "Box" has incompatible type "Callable[[], int]"; expected "Callable[[], Iterator[Never]]" def g() -> int: ... [builtins fixtures/classmethod.pyi] @@ -3034,8 +3034,8 @@ def id2(x: V) -> V: reveal_type(dec1(id1)) # N: Revealed type is "def [S <: __main__.B] (S`1) -> builtins.list[S`1]" reveal_type(dec1(id2)) # N: Revealed type is "def [S in (builtins.int, builtins.str)] (S`3) -> builtins.list[S`3]" reveal_type(dec2(id1)) # N: Revealed type is "def [UC <: __main__.C] (UC`5) -> builtins.list[UC`5]" -reveal_type(dec2(id2)) # N: Revealed type is "def () -> builtins.list[]" \ - # E: Argument 1 to "dec2" has incompatible type "Callable[[V], V]"; expected "Callable[[], ]" +reveal_type(dec2(id2)) # N: Revealed type is "def (Never) -> builtins.list[Never]" \ + # E: Argument 1 to "dec2" has incompatible type "Callable[[V], V]"; expected "Callable[[Never], Never]" [case testInferenceAgainstGenericLambdas] # flags: --new-type-inference diff --git a/test-data/unit/check-inference-context.test b/test-data/unit/check-inference-context.test index 5f25b007dd47..169fee65f127 100644 --- a/test-data/unit/check-inference-context.test +++ b/test-data/unit/check-inference-context.test @@ -22,7 +22,7 @@ if int(): if int(): ab = f() if int(): - b = f() # E: Incompatible types in assignment (expression has type "A[]", variable has type "B") + b = f() # E: Incompatible types in assignment (expression has type "A[Never]", variable has type "B") [case testBasicContextInferenceForConstructor] from typing import TypeVar, Generic T = TypeVar('T') @@ -37,7 +37,7 @@ if int(): if int(): ab = A() if int(): - b = A() # E: Incompatible types in assignment (expression has type "A[]", variable has type "B") + b = A() # E: Incompatible types in assignment (expression has type "A[Never]", variable has type "B") [case testIncompatibleContextInference] from typing import TypeVar, Generic T = TypeVar('T') @@ -372,7 +372,7 @@ ao: List[object] a: A def f(): a, aa, ao # Prevent redefinition -a = [] # E: Incompatible types in assignment (expression has type "List[]", variable has type "A") +a = [] # E: Incompatible types in assignment (expression has type "List[Never]", variable has type "A") aa = [] ao = [] @@ -842,7 +842,7 @@ T = TypeVar('T') def f(x: Union[List[T], str]) -> None: pass f([1]) f('') -f(1) # E: Argument 1 to "f" has incompatible type "int"; expected "Union[List[], str]" +f(1) # E: Argument 1 to "f" has incompatible type "int"; expected "Union[List[Never], str]" [builtins fixtures/isinstancelist.pyi] [case testIgnoringInferenceContext] @@ -911,7 +911,7 @@ from typing import TypeVar, Callable, Generic T = TypeVar('T') class A(Generic[T]): pass -reveal_type(A()) # N: Revealed type is "__main__.A[]" +reveal_type(A()) # N: Revealed type is "__main__.A[Never]" b = reveal_type(A()) # type: A[int] # N: Revealed type is "__main__.A[builtins.int]" [case testUnionWithGenericTypeItemContext] @@ -1311,7 +1311,7 @@ from typing import List, TypeVar T = TypeVar('T', bound=int) def f(x: List[T]) -> T: ... -# mypy infers List[] here, and is a subtype of str +# mypy infers List[Never] here, and Never is a subtype of str y: str = f([]) [builtins fixtures/list.pyi] @@ -1323,7 +1323,7 @@ def f(x: List[T]) -> List[T]: ... # TODO: improve error message for such cases, see #3283 and #5706 y: List[str] = f([]) \ - # E: Incompatible types in assignment (expression has type "List[]", variable has type "List[str]") \ + # E: Incompatible types in assignment (expression has type "List[Never]", variable has type "List[str]") \ # N: "List" is invariant -- see https://mypy.readthedocs.io/en/stable/common_issues.html#variance \ # N: Consider using "Sequence" instead, which is covariant [builtins fixtures/list.pyi] @@ -1343,7 +1343,7 @@ T = TypeVar('T', bound=int) def f(x: Optional[T] = None) -> List[T]: ... y: List[str] = f() \ - # E: Incompatible types in assignment (expression has type "List[]", variable has type "List[str]") \ + # E: Incompatible types in assignment (expression has type "List[Never]", variable has type "List[str]") \ # N: "List" is invariant -- see https://mypy.readthedocs.io/en/stable/common_issues.html#variance \ # N: Consider using "Sequence" instead, which is covariant [builtins fixtures/list.pyi] diff --git a/test-data/unit/check-inference.test b/test-data/unit/check-inference.test index 56d3fe2b4ce7..36b028977591 100644 --- a/test-data/unit/check-inference.test +++ b/test-data/unit/check-inference.test @@ -876,10 +876,10 @@ g('a')() # E: "List[str]" not callable # The next line is a case where there are multiple ways to satisfy a constraint # involving a Union. Either T = List[str] or T = str would turn out to be valid, # but mypy doesn't know how to branch on these two options (and potentially have -# to backtrack later) and defaults to T = . The result is an +# to backtrack later) and defaults to T = Never. The result is an # awkward error message. Either a better error message, or simply accepting the # call, would be preferable here. -g(['a']) # E: Argument 1 to "g" has incompatible type "List[str]"; expected "List[]" +g(['a']) # E: Argument 1 to "g" has incompatible type "List[str]"; expected "List[Never]" h(g(['a'])) @@ -972,7 +972,7 @@ from typing import TypeVar, Union, List T = TypeVar('T') def f() -> List[T]: pass d1 = f() # type: Union[List[int], str] -d2 = f() # type: Union[int, str] # E: Incompatible types in assignment (expression has type "List[]", variable has type "Union[int, str]") +d2 = f() # type: Union[int, str] # E: Incompatible types in assignment (expression has type "List[Never]", variable has type "Union[int, str]") def g(x: T) -> List[T]: pass d3 = g(1) # type: Union[List[int], List[str]] [builtins fixtures/list.pyi] @@ -3126,7 +3126,7 @@ T = TypeVar('T') def f() -> Callable[..., NoReturn]: ... x = f() -reveal_type(x) # N: Revealed type is "def (*Any, **Any) -> " +reveal_type(x) # N: Revealed type is "def (*Any, **Any) -> Never" [case testDeferralInNestedScopes] @@ -3635,8 +3635,8 @@ class Call(Protocol[T]): def f(x: Call[T]) -> Tuple[T, T]: ... def g(__x: str) -> None: pass -reveal_type(f(g)) # N: Revealed type is "Tuple[, ]" \ - # E: Argument 1 to "f" has incompatible type "Callable[[str], None]"; expected "Call[]" +reveal_type(f(g)) # N: Revealed type is "Tuple[Never, Never]" \ + # E: Argument 1 to "f" has incompatible type "Callable[[str], None]"; expected "Call[Never]" [builtins fixtures/list.pyi] [case testCallableInferenceAgainstCallableNamedVsPosOnly] @@ -3651,8 +3651,8 @@ class Call(Protocol[T]): def f(x: Call[T]) -> Tuple[T, T]: ... def g(*, x: str) -> None: pass -reveal_type(f(g)) # N: Revealed type is "Tuple[, ]" \ - # E: Argument 1 to "f" has incompatible type "Callable[[NamedArg(str, 'x')], None]"; expected "Call[]" +reveal_type(f(g)) # N: Revealed type is "Tuple[Never, Never]" \ + # E: Argument 1 to "f" has incompatible type "Callable[[NamedArg(str, 'x')], None]"; expected "Call[Never]" [builtins fixtures/list.pyi] [case testCallableInferenceAgainstCallablePosOnlyVsKwargs] @@ -3667,8 +3667,8 @@ class Call(Protocol[T]): def f(x: Call[T]) -> Tuple[T, T]: ... def g(**x: str) -> None: pass -reveal_type(f(g)) # N: Revealed type is "Tuple[, ]" \ - # E: Argument 1 to "f" has incompatible type "Callable[[KwArg(str)], None]"; expected "Call[]" +reveal_type(f(g)) # N: Revealed type is "Tuple[Never, Never]" \ + # E: Argument 1 to "f" has incompatible type "Callable[[KwArg(str)], None]"; expected "Call[Never]" [builtins fixtures/list.pyi] [case testCallableInferenceAgainstCallableNamedVsArgs] @@ -3683,6 +3683,6 @@ class Call(Protocol[T]): def f(x: Call[T]) -> Tuple[T, T]: ... def g(*args: str) -> None: pass -reveal_type(f(g)) # N: Revealed type is "Tuple[, ]" \ - # E: Argument 1 to "f" has incompatible type "Callable[[VarArg(str)], None]"; expected "Call[]" +reveal_type(f(g)) # N: Revealed type is "Tuple[Never, Never]" \ + # E: Argument 1 to "f" has incompatible type "Callable[[VarArg(str)], None]"; expected "Call[Never]" [builtins fixtures/list.pyi] diff --git a/test-data/unit/check-isinstance.test b/test-data/unit/check-isinstance.test index 361d4db78752..b7ee38b69d00 100644 --- a/test-data/unit/check-isinstance.test +++ b/test-data/unit/check-isinstance.test @@ -1812,9 +1812,9 @@ reveal_type(fm) # N: Revealed type is "__main__.FooMetaclass" if issubclass(fm, Foo): reveal_type(fm) # N: Revealed type is "Type[__main__.Foo]" if issubclass(fm, Bar): - reveal_type(fm) # N: Revealed type is "" + reveal_type(fm) # N: Revealed type is "Never" if issubclass(fm, Baz): - reveal_type(fm) # N: Revealed type is "" + reveal_type(fm) # N: Revealed type is "Never" [builtins fixtures/isinstance.pyi] [case testIsinstanceAndNarrowTypeVariable] diff --git a/test-data/unit/check-literal.test b/test-data/unit/check-literal.test index ecd4fc0a1f00..08c709c6b777 100644 --- a/test-data/unit/check-literal.test +++ b/test-data/unit/check-literal.test @@ -1794,7 +1794,7 @@ def f6(x: Optional[Literal[1]], y: Optional[Literal[2]]) -> None: pass reveal_type(unify(f1)) # N: Revealed type is "Literal[1]" if object(): - reveal_type(unify(f2)) # N: Revealed type is "" + reveal_type(unify(f2)) # N: Revealed type is "Never" reveal_type(unify(f3)) # N: Revealed type is "Literal[1]" reveal_type(unify(f4)) # N: Revealed type is "Literal[1]" reveal_type(unify(f5)) # N: Revealed type is "Literal[1]" @@ -1819,7 +1819,7 @@ T = TypeVar('T') def unify(func: Callable[[T, T], None]) -> T: pass def func(x: Literal[1], y: Literal[2]) -> None: pass -reveal_type(unify(func)) # N: Revealed type is "" +reveal_type(unify(func)) # N: Revealed type is "Never" [builtins fixtures/list.pyi] [out] diff --git a/test-data/unit/check-narrowing.test b/test-data/unit/check-narrowing.test index 291f73a45230..c86cffd453df 100644 --- a/test-data/unit/check-narrowing.test +++ b/test-data/unit/check-narrowing.test @@ -1189,7 +1189,7 @@ def f(t: Type[T], a: A, b: B) -> None: reveal_type(a) # N: Revealed type is "__main__.A" if type(b) is t: - reveal_type(b) # N: Revealed type is "" + reveal_type(b) # N: Revealed type is "Never" else: reveal_type(b) # N: Revealed type is "__main__.B" diff --git a/test-data/unit/check-native-int.test b/test-data/unit/check-native-int.test index 30314eebcb31..2f852ca522c5 100644 --- a/test-data/unit/check-native-int.test +++ b/test-data/unit/check-native-int.test @@ -87,9 +87,9 @@ reveal_type(meet(f, f32)) # N: Revealed type is "mypy_extensions.i32" reveal_type(meet(f64, f)) # N: Revealed type is "mypy_extensions.i64" reveal_type(meet(f, f64)) # N: Revealed type is "mypy_extensions.i64" if object(): - reveal_type(meet(f32, f64)) # N: Revealed type is "" + reveal_type(meet(f32, f64)) # N: Revealed type is "Never" if object(): - reveal_type(meet(f64, f32)) # N: Revealed type is "" + reveal_type(meet(f64, f32)) # N: Revealed type is "Never" reveal_type(meet(f, fa)) # N: Revealed type is "builtins.int" reveal_type(meet(f32, fa)) # N: Revealed type is "mypy_extensions.i32" @@ -149,9 +149,9 @@ def ff(x: float) -> None: pass def fi32(x: i32) -> None: pass if object(): - reveal_type(meet(ff, fi32)) # N: Revealed type is "" + reveal_type(meet(ff, fi32)) # N: Revealed type is "Never" if object(): - reveal_type(meet(fi32, ff)) # N: Revealed type is "" + reveal_type(meet(fi32, ff)) # N: Revealed type is "Never" [builtins fixtures/dict.pyi] [case testNativeIntForLoopRange] diff --git a/test-data/unit/check-overloading.test b/test-data/unit/check-overloading.test index ede4a2e4cf62..4546c7171856 100644 --- a/test-data/unit/check-overloading.test +++ b/test-data/unit/check-overloading.test @@ -3397,11 +3397,11 @@ def wrapper() -> None: # Note: These should be fine, but mypy has an unrelated bug # that makes them error out? - a2_overload: A = SomeType().foo(obj1) # E: Argument 1 to "foo" of "SomeType" has incompatible type "Union[W1[A], W2[A]]"; expected "W1[]" - a2_union: A = SomeType().bar(obj1) # E: Argument 1 to "bar" of "SomeType" has incompatible type "Union[W1[A], W2[A]]"; expected "Union[W1[], W2[]]" + a2_overload: A = SomeType().foo(obj1) # E: Argument 1 to "foo" of "SomeType" has incompatible type "Union[W1[A], W2[A]]"; expected "W1[Never]" + a2_union: A = SomeType().bar(obj1) # E: Argument 1 to "bar" of "SomeType" has incompatible type "Union[W1[A], W2[A]]"; expected "Union[W1[Never], W2[Never]]" - SomeType().foo(obj1) # E: Argument 1 to "foo" of "SomeType" has incompatible type "Union[W1[A], W2[A]]"; expected "W1[]" - SomeType().bar(obj1) # E: Argument 1 to "bar" of "SomeType" has incompatible type "Union[W1[A], W2[A]]"; expected "Union[W1[], W2[]]" + SomeType().foo(obj1) # E: Argument 1 to "foo" of "SomeType" has incompatible type "Union[W1[A], W2[A]]"; expected "W1[Never]" + SomeType().bar(obj1) # E: Argument 1 to "bar" of "SomeType" has incompatible type "Union[W1[A], W2[A]]"; expected "Union[W1[Never], W2[Never]]" [case testOverloadingInferUnionReturnWithBadObjectTypevarReturn] from typing import overload, Union, TypeVar, Generic @@ -3425,8 +3425,8 @@ class SomeType(Generic[T]): def wrapper(mysterious: T) -> T: obj1: Union[W1[A], W2[B]] - SomeType().foo(obj1) # E: Argument 1 to "foo" of "SomeType" has incompatible type "Union[W1[A], W2[B]]"; expected "W1[]" - SomeType().bar(obj1) # E: Argument 1 to "bar" of "SomeType" has incompatible type "Union[W1[A], W2[B]]"; expected "Union[W1[], W2[]]" + SomeType().foo(obj1) # E: Argument 1 to "foo" of "SomeType" has incompatible type "Union[W1[A], W2[B]]"; expected "W1[Never]" + SomeType().bar(obj1) # E: Argument 1 to "bar" of "SomeType" has incompatible type "Union[W1[A], W2[B]]"; expected "Union[W1[Never], W2[Never]]" SomeType[A]().foo(obj1) # E: Argument 1 to "foo" of "SomeType" has incompatible type "Union[W1[A], W2[B]]"; expected "W1[A]" SomeType[A]().bar(obj1) # E: Argument 1 to "bar" of "SomeType" has incompatible type "Union[W1[A], W2[B]]"; expected "Union[W1[A], W2[A]]" diff --git a/test-data/unit/check-parameter-specification.test b/test-data/unit/check-parameter-specification.test index a98c92ce14e7..d80069644194 100644 --- a/test-data/unit/check-parameter-specification.test +++ b/test-data/unit/check-parameter-specification.test @@ -1401,9 +1401,9 @@ def wrong_name_constructor(b: bool) -> SomeClass: return SomeClass("a") func(SomeClass, constructor) -reveal_type(func(SomeClass, wrong_constructor)) # N: Revealed type is "def (a: ) -> __main__.SomeClass" -reveal_type(func_regular(SomeClass, wrong_constructor)) # N: Revealed type is "def () -> __main__.SomeClass" -func(SomeClass, wrong_name_constructor) # E: Argument 1 to "func" has incompatible type "Type[SomeClass]"; expected "Callable[[], SomeClass]" +reveal_type(func(SomeClass, wrong_constructor)) # N: Revealed type is "def (a: Never) -> __main__.SomeClass" +reveal_type(func_regular(SomeClass, wrong_constructor)) # N: Revealed type is "def (Never) -> __main__.SomeClass" +func(SomeClass, wrong_name_constructor) # E: Argument 1 to "func" has incompatible type "Type[SomeClass]"; expected "Callable[[Never], SomeClass]" [builtins fixtures/paramspec.pyi] [case testParamSpecInTypeAliasBasic] diff --git a/test-data/unit/check-plugin-attrs.test b/test-data/unit/check-plugin-attrs.test index 1465bab2bb7b..fb5f1f9472c2 100644 --- a/test-data/unit/check-plugin-attrs.test +++ b/test-data/unit/check-plugin-attrs.test @@ -1181,7 +1181,7 @@ def my_factory() -> int: return 7 @attr.s class A: - x: int = attr.ib(factory=list) # E: Incompatible types in assignment (expression has type "List[]", variable has type "int") + x: int = attr.ib(factory=list) # E: Incompatible types in assignment (expression has type "List[Never]", variable has type "int") y: str = attr.ib(factory=my_factory) # E: Incompatible types in assignment (expression has type "int", variable has type "str") [builtins fixtures/list.pyi] @@ -2131,8 +2131,8 @@ T = TypeVar('T') class A(Generic[T]): x: T # exercises meet(T=int, int) = int y: bool # exercises meet(bool, int) = bool - z: str # exercises meet(str, bytes) = - w: dict # exercises meet(dict, ) = + z: str # exercises meet(str, bytes) = Never + w: dict # exercises meet(dict, Never) = Never @attrs.define @@ -2144,8 +2144,8 @@ class B: a_or_b: A[int] | B a2 = attrs.evolve(a_or_b, x=42, y=True) -a2 = attrs.evolve(a_or_b, x=42, y=True, z='42') # E: Argument "z" to "evolve" of "Union[A[int], B]" has incompatible type "str"; expected -a2 = attrs.evolve(a_or_b, x=42, y=True, w={}) # E: Argument "w" to "evolve" of "Union[A[int], B]" has incompatible type "Dict[, ]"; expected +a2 = attrs.evolve(a_or_b, x=42, y=True, z='42') # E: Argument "z" to "evolve" of "Union[A[int], B]" has incompatible type "str"; expected Never +a2 = attrs.evolve(a_or_b, x=42, y=True, w={}) # E: Argument "w" to "evolve" of "Union[A[int], B]" has incompatible type "Dict[Never, Never]"; expected Never [builtins fixtures/plugin_attrs.pyi] diff --git a/test-data/unit/check-protocols.test b/test-data/unit/check-protocols.test index dba01be50fee..e73add454a67 100644 --- a/test-data/unit/check-protocols.test +++ b/test-data/unit/check-protocols.test @@ -928,7 +928,7 @@ class L: def last(seq: Linked[T]) -> T: pass -last(L()) # E: Argument 1 to "last" has incompatible type "L"; expected "Linked[]" +last(L()) # E: Argument 1 to "last" has incompatible type "L"; expected "Linked[Never]" [case testMutuallyRecursiveProtocols] from typing import Protocol, Sequence, List diff --git a/test-data/unit/check-python310.test b/test-data/unit/check-python310.test index 0fe6a3d5a5cc..f81da23d148c 100644 --- a/test-data/unit/check-python310.test +++ b/test-data/unit/check-python310.test @@ -1144,7 +1144,7 @@ m: str match m: case a if a := 1: # E: Incompatible types in assignment (expression has type "int", variable has type "str") - reveal_type(a) # N: Revealed type is "" + reveal_type(a) # N: Revealed type is "Never" [case testMatchAssigningPatternGuard] m: str diff --git a/test-data/unit/check-selftype.test b/test-data/unit/check-selftype.test index d5024412ca97..bf7a928ff51d 100644 --- a/test-data/unit/check-selftype.test +++ b/test-data/unit/check-selftype.test @@ -1520,7 +1520,7 @@ from typing import Self, TypeVar, Tuple T = TypeVar("T") class C: def meth(self: T) -> Tuple[Self, T]: ... # E: Method cannot have explicit self annotation and Self type -reveal_type(C().meth()) # N: Revealed type is "Tuple[, __main__.C]" +reveal_type(C().meth()) # N: Revealed type is "Tuple[Never, __main__.C]" [builtins fixtures/property.pyi] [case testTypingSelfProperty] @@ -1558,7 +1558,7 @@ class C: class D(C): ... reveal_type(D.meth()) # N: Revealed type is "__main__.D" -reveal_type(D.bad()) # N: Revealed type is "" +reveal_type(D.bad()) # N: Revealed type is "Never" [builtins fixtures/classmethod.pyi] [case testTypingSelfOverload] diff --git a/test-data/unit/check-singledispatch.test b/test-data/unit/check-singledispatch.test index 1adec1575b7e..e63d4c073e86 100644 --- a/test-data/unit/check-singledispatch.test +++ b/test-data/unit/check-singledispatch.test @@ -300,7 +300,7 @@ h('a', 1) # E: Argument 2 to "h" has incompatible type "int"; expected "str" [case testDontCrashWhenRegisteringAfterError] import functools -a = functools.singledispatch('a') # E: Need type annotation for "a" # E: Argument 1 to "singledispatch" has incompatible type "str"; expected "Callable[..., ]" +a = functools.singledispatch('a') # E: Need type annotation for "a" # E: Argument 1 to "singledispatch" has incompatible type "str"; expected "Callable[..., Never]" @a.register(int) def default(val) -> int: diff --git a/test-data/unit/check-typeddict.test b/test-data/unit/check-typeddict.test index 7de8e6416f35..b8953f05b6a5 100644 --- a/test-data/unit/check-typeddict.test +++ b/test-data/unit/check-typeddict.test @@ -602,7 +602,7 @@ YbZ = TypedDict('YbZ', {'y': object, 'z': int}) T = TypeVar('T') def f(x: Callable[[T, T], None]) -> T: pass def g(x: XYa, y: YbZ) -> None: pass -reveal_type(f(g)) # N: Revealed type is "" +reveal_type(f(g)) # N: Revealed type is "Never" [builtins fixtures/dict.pyi] [case testMeetOfTypedDictsWithNoCommonKeysHasAllKeysAndNewFallback] @@ -625,7 +625,7 @@ M = Mapping[str, int] T = TypeVar('T') def f(x: Callable[[T, T], None]) -> T: pass def g(x: X, y: M) -> None: pass -reveal_type(f(g)) # N: Revealed type is "" +reveal_type(f(g)) # N: Revealed type is "Never" [builtins fixtures/dict.pyi] [case testMeetOfTypedDictWithIncompatibleMappingIsUninhabited] @@ -636,7 +636,7 @@ M = Mapping[str, str] T = TypeVar('T') def f(x: Callable[[T, T], None]) -> T: pass def g(x: X, y: M) -> None: pass -reveal_type(f(g)) # N: Revealed type is "" +reveal_type(f(g)) # N: Revealed type is "Never" [builtins fixtures/dict.pyi] [case testMeetOfTypedDictWithCompatibleMappingSuperclassIsUninhabitedForNow] @@ -680,7 +680,7 @@ YZ = TypedDict('YZ', {'y': int, 'z': int}) T = TypeVar('T') def f(x: Callable[[T, T], None]) -> T: pass def g(x: XY, y: YZ) -> None: pass -reveal_type(f(g)) # N: Revealed type is "" +reveal_type(f(g)) # N: Revealed type is "Never" [builtins fixtures/dict.pyi] @@ -1856,7 +1856,7 @@ class Config(TypedDict): b: str x: Config -x == {} # E: Non-overlapping equality check (left operand type: "Config", right operand type: "Dict[, ]") +x == {} # E: Non-overlapping equality check (left operand type: "Config", right operand type: "Dict[Never, Never]") [builtins fixtures/dict.pyi] [typing fixtures/typing-typeddict.pyi] diff --git a/test-data/unit/check-typevar-tuple.test b/test-data/unit/check-typevar-tuple.test index c8b33ec96b06..f7faab4818c9 100644 --- a/test-data/unit/check-typevar-tuple.test +++ b/test-data/unit/check-typevar-tuple.test @@ -17,7 +17,7 @@ reveal_type(f(args)) # N: Revealed type is "Tuple[builtins.int, builtins.str]" reveal_type(f(varargs)) # N: Revealed type is "builtins.tuple[builtins.int, ...]" -f(0) # E: Argument 1 to "f" has incompatible type "int"; expected "Tuple[, ...]" +f(0) # E: Argument 1 to "f" has incompatible type "int"; expected "Tuple[Never, ...]" def g(a: Tuple[Unpack[Ts]], b: Tuple[Unpack[Ts]]) -> Tuple[Unpack[Ts]]: return a diff --git a/test-data/unit/check-unreachable-code.test b/test-data/unit/check-unreachable-code.test index 20b5dea9fc87..acb5ca6ea609 100644 --- a/test-data/unit/check-unreachable-code.test +++ b/test-data/unit/check-unreachable-code.test @@ -900,15 +900,15 @@ from typing_extensions import NoReturn as TENoReturn from mypy_extensions import NoReturn as MENoReturn bottom1: Never -reveal_type(bottom1) # N: Revealed type is "" +reveal_type(bottom1) # N: Revealed type is "Never" bottom2: TENever -reveal_type(bottom2) # N: Revealed type is "" +reveal_type(bottom2) # N: Revealed type is "Never" bottom3: NoReturn -reveal_type(bottom3) # N: Revealed type is "" +reveal_type(bottom3) # N: Revealed type is "Never" bottom4: TENoReturn -reveal_type(bottom4) # N: Revealed type is "" +reveal_type(bottom4) # N: Revealed type is "Never" bottom5: MENoReturn -reveal_type(bottom5) # N: Revealed type is "" +reveal_type(bottom5) # N: Revealed type is "Never" [builtins fixtures/tuple.pyi] diff --git a/test-data/unit/check-varargs.test b/test-data/unit/check-varargs.test index fe09fb43c97c..54546f3973b3 100644 --- a/test-data/unit/check-varargs.test +++ b/test-data/unit/check-varargs.test @@ -604,27 +604,27 @@ class B: pass if int(): a, aa = G().f(*[a]) \ # E: Incompatible types in assignment (expression has type "List[A]", variable has type "A") \ - # E: Incompatible types in assignment (expression has type "List[]", variable has type "List[A]") \ + # E: Incompatible types in assignment (expression has type "List[Never]", variable has type "List[A]") \ # N: "List" is invariant -- see https://mypy.readthedocs.io/en/stable/common_issues.html#variance \ # N: Consider using "Sequence" instead, which is covariant if int(): - aa, a = G().f(*[a]) # E: Incompatible types in assignment (expression has type "List[]", variable has type "A") + aa, a = G().f(*[a]) # E: Incompatible types in assignment (expression has type "List[Never]", variable has type "A") if int(): ab, aa = G().f(*[a]) \ - # E: Incompatible types in assignment (expression has type "List[]", variable has type "List[A]") \ + # E: Incompatible types in assignment (expression has type "List[Never]", variable has type "List[A]") \ # N: "List" is invariant -- see https://mypy.readthedocs.io/en/stable/common_issues.html#variance \ # N: Consider using "Sequence" instead, which is covariant \ # E: Argument 1 to "f" of "G" has incompatible type "*List[A]"; expected "B" if int(): ao, ao = G().f(*[a]) \ - # E: Incompatible types in assignment (expression has type "List[]", variable has type "List[object]") \ + # E: Incompatible types in assignment (expression has type "List[Never]", variable has type "List[object]") \ # N: "List" is invariant -- see https://mypy.readthedocs.io/en/stable/common_issues.html#variance \ # N: Consider using "Sequence" instead, which is covariant if int(): aa, aa = G().f(*[a]) \ - # E: Incompatible types in assignment (expression has type "List[]", variable has type "List[A]") \ + # E: Incompatible types in assignment (expression has type "List[Never]", variable has type "List[A]") \ # N: "List" is invariant -- see https://mypy.readthedocs.io/en/stable/common_issues.html#variance \ # N: Consider using "Sequence" instead, which is covariant [builtins fixtures/list.pyi] diff --git a/test-data/unit/pythoneval-asyncio.test b/test-data/unit/pythoneval-asyncio.test index 556414cf3252..069374b9635c 100644 --- a/test-data/unit/pythoneval-asyncio.test +++ b/test-data/unit/pythoneval-asyncio.test @@ -472,7 +472,7 @@ async def bad(arg: P) -> T: pass [out] _program.py:8: note: Revealed type is "def [T] (arg: P?) -> typing.Coroutine[Any, Any, T`-1]" -_program.py:9: error: Value of type "Coroutine[Any, Any, ]" must be used +_program.py:9: error: Value of type "Coroutine[Any, Any, Never]" must be used _program.py:9: note: Are you missing an await? _program.py:11: error: Variable "_testForwardRefToBadAsyncShouldNotCrash_newsemanal.P" is not valid as a type _program.py:11: note: See https://mypy.readthedocs.io/en/stable/common_issues.html#variables-vs-type-aliases diff --git a/test-data/unit/pythoneval.test b/test-data/unit/pythoneval.test index 58dfb172cf76..c5be30eac1b7 100644 --- a/test-data/unit/pythoneval.test +++ b/test-data/unit/pythoneval.test @@ -863,8 +863,8 @@ MyDDict(dict)[0] _program.py:7: error: Argument 1 to "defaultdict" has incompatible type "Type[List[Any]]"; expected "Optional[Callable[[], str]]" _program.py:10: error: Invalid index type "str" for "defaultdict[int, str]"; expected type "int" _program.py:10: error: Incompatible types in assignment (expression has type "int", target has type "str") -_program.py:20: error: Argument 1 to "tst" has incompatible type "defaultdict[str, List[]]"; expected "defaultdict[int, List[]]" -_program.py:24: error: Invalid index type "str" for "MyDDict[Dict[, ]]"; expected type "int" +_program.py:20: error: Argument 1 to "tst" has incompatible type "defaultdict[str, List[Never]]"; expected "defaultdict[int, List[Never]]" +_program.py:24: error: Invalid index type "str" for "MyDDict[Dict[Never, Never]]"; expected type "int" [case testNoSubcriptionOfStdlibCollections] # flags: --python-version 3.7 @@ -2004,7 +2004,7 @@ Foo().__dict__ = {} _testInferenceOfDunderDictOnClassObjects.py:2: note: Revealed type is "types.MappingProxyType[builtins.str, Any]" _testInferenceOfDunderDictOnClassObjects.py:3: note: Revealed type is "builtins.dict[builtins.str, Any]" _testInferenceOfDunderDictOnClassObjects.py:4: error: Property "__dict__" defined in "type" is read-only -_testInferenceOfDunderDictOnClassObjects.py:4: error: Incompatible types in assignment (expression has type "Dict[, ]", variable has type "MappingProxyType[str, Any]") +_testInferenceOfDunderDictOnClassObjects.py:4: error: Incompatible types in assignment (expression has type "Dict[Never, Never]", variable has type "MappingProxyType[str, Any]") [case testTypeVarTuple] # flags: --enable-incomplete-feature=TypeVarTuple --enable-incomplete-feature=Unpack --python-version=3.11 From 2a6d9cbc45eba360934ddee7b43c607b3edb3095 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Fri, 1 Sep 2023 00:37:41 -0700 Subject: [PATCH 012/144] Sync typeshed (#16009) Source commit: https://github.com/python/typeshed/commit/f28cb8b8562ccc382d018129ba4886f241c6db9c --- mypy/typeshed/stdlib/_ctypes.pyi | 6 +- mypy/typeshed/stdlib/asyncio/taskgroups.pyi | 7 +- mypy/typeshed/stdlib/asyncio/tasks.pyi | 30 +++++--- mypy/typeshed/stdlib/configparser.pyi | 81 ++++++++++++++------- mypy/typeshed/stdlib/csv.pyi | 2 +- mypy/typeshed/stdlib/enum.pyi | 6 +- mypy/typeshed/stdlib/genericpath.pyi | 8 +- mypy/typeshed/stdlib/gzip.pyi | 6 +- mypy/typeshed/stdlib/ntpath.pyi | 5 ++ mypy/typeshed/stdlib/os/__init__.pyi | 9 ++- mypy/typeshed/stdlib/posixpath.pyi | 9 +++ mypy/typeshed/stdlib/pydoc.pyi | 4 +- mypy/typeshed/stdlib/ssl.pyi | 10 ++- mypy/typeshed/stdlib/tempfile.pyi | 2 +- mypy/typeshed/stdlib/unittest/__init__.pyi | 4 +- mypy/typeshed/stdlib/unittest/loader.pyi | 7 +- mypy/typeshed/stdlib/unittest/main.pyi | 54 ++++++++++---- mypy/typeshed/stdlib/unittest/result.pyi | 7 ++ mypy/typeshed/stdlib/unittest/runner.pyi | 62 ++++++++++++---- 19 files changed, 233 insertions(+), 86 deletions(-) diff --git a/mypy/typeshed/stdlib/_ctypes.pyi b/mypy/typeshed/stdlib/_ctypes.pyi index 756ee86d3342..165bb5337784 100644 --- a/mypy/typeshed/stdlib/_ctypes.pyi +++ b/mypy/typeshed/stdlib/_ctypes.pyi @@ -69,7 +69,7 @@ class _CData(metaclass=_CDataMeta): def __buffer__(self, __flags: int) -> memoryview: ... def __release_buffer__(self, __buffer: memoryview) -> None: ... -class _SimpleCData(Generic[_T], _CData): +class _SimpleCData(_CData, Generic[_T]): value: _T # The TypeVar can be unsolved here, # but we can't use overloads without creating many, many mypy false-positive errors @@ -78,7 +78,7 @@ class _SimpleCData(Generic[_T], _CData): class _CanCastTo(_CData): ... class _PointerLike(_CanCastTo): ... -class _Pointer(Generic[_CT], _PointerLike, _CData): +class _Pointer(_PointerLike, _CData, Generic[_CT]): _type_: type[_CT] contents: _CT @overload @@ -140,7 +140,7 @@ class _StructUnionBase(_CData, metaclass=_StructUnionMeta): class Union(_StructUnionBase): ... class Structure(_StructUnionBase): ... -class Array(Generic[_CT], _CData): +class Array(_CData, Generic[_CT]): @property @abstractmethod def _length_(self) -> int: ... diff --git a/mypy/typeshed/stdlib/asyncio/taskgroups.pyi b/mypy/typeshed/stdlib/asyncio/taskgroups.pyi index 47d9bb2f699e..aec3f1127f15 100644 --- a/mypy/typeshed/stdlib/asyncio/taskgroups.pyi +++ b/mypy/typeshed/stdlib/asyncio/taskgroups.pyi @@ -1,10 +1,11 @@ import sys from contextvars import Context from types import TracebackType -from typing import TypeVar +from typing import Any, TypeVar from typing_extensions import Self from . import _CoroutineLike +from .events import AbstractEventLoop from .tasks import Task if sys.version_info >= (3, 12): @@ -15,6 +16,10 @@ else: _T = TypeVar("_T") class TaskGroup: + _loop: AbstractEventLoop | None + _tasks: set[Task[Any]] + async def __aenter__(self) -> Self: ... async def __aexit__(self, et: type[BaseException] | None, exc: BaseException | None, tb: TracebackType | None) -> None: ... def create_task(self, coro: _CoroutineLike[_T], *, name: str | None = None, context: Context | None = None) -> Task[_T]: ... + def _on_task_done(self, task: Task[object]) -> None: ... diff --git a/mypy/typeshed/stdlib/asyncio/tasks.pyi b/mypy/typeshed/stdlib/asyncio/tasks.pyi index 5ea30d3791de..3bc65e3703c5 100644 --- a/mypy/typeshed/stdlib/asyncio/tasks.pyi +++ b/mypy/typeshed/stdlib/asyncio/tasks.pyi @@ -243,12 +243,6 @@ if sys.version_info >= (3, 10): async def sleep(delay: float) -> None: ... @overload async def sleep(delay: float, result: _T) -> _T: ... - @overload - async def wait(fs: Iterable[_FT], *, timeout: float | None = None, return_when: str = "ALL_COMPLETED") -> tuple[set[_FT], set[_FT]]: ... # type: ignore[misc] - @overload - async def wait( - fs: Iterable[Awaitable[_T]], *, timeout: float | None = None, return_when: str = "ALL_COMPLETED" - ) -> tuple[set[Task[_T]], set[Task[_T]]]: ... async def wait_for(fut: _FutureLike[_T], timeout: float | None) -> _T: ... else: @@ -257,6 +251,25 @@ else: async def sleep(delay: float, *, loop: AbstractEventLoop | None = None) -> None: ... @overload async def sleep(delay: float, result: _T, *, loop: AbstractEventLoop | None = None) -> _T: ... + async def wait_for(fut: _FutureLike[_T], timeout: float | None, *, loop: AbstractEventLoop | None = None) -> _T: ... + +if sys.version_info >= (3, 11): + @overload + async def wait(fs: Iterable[_FT], *, timeout: float | None = None, return_when: str = "ALL_COMPLETED") -> tuple[set[_FT], set[_FT]]: ... # type: ignore[misc] + @overload + async def wait( + fs: Iterable[Task[_T]], *, timeout: float | None = None, return_when: str = "ALL_COMPLETED" + ) -> tuple[set[Task[_T]], set[Task[_T]]]: ... + +elif sys.version_info >= (3, 10): + @overload + async def wait(fs: Iterable[_FT], *, timeout: float | None = None, return_when: str = "ALL_COMPLETED") -> tuple[set[_FT], set[_FT]]: ... # type: ignore[misc] + @overload + async def wait( + fs: Iterable[Awaitable[_T]], *, timeout: float | None = None, return_when: str = "ALL_COMPLETED" + ) -> tuple[set[Task[_T]], set[Task[_T]]]: ... + +else: @overload async def wait( # type: ignore[misc] fs: Iterable[_FT], @@ -273,7 +286,6 @@ else: timeout: float | None = None, return_when: str = "ALL_COMPLETED", ) -> tuple[set[Task[_T]], set[Task[_T]]]: ... - async def wait_for(fut: _FutureLike[_T], timeout: float | None, *, loop: AbstractEventLoop | None = None) -> _T: ... if sys.version_info >= (3, 12): _TaskCompatibleCoro: TypeAlias = Coroutine[Any, Any, _T_co] @@ -291,7 +303,7 @@ class Task(Future[_T_co], Generic[_T_co]): # type: ignore[type-var] # pyright: coro: _TaskCompatibleCoro[_T_co], *, loop: AbstractEventLoop = ..., - name: str | None, + name: str | None = ..., context: Context | None = None, eager_start: bool = False, ) -> None: ... @@ -301,7 +313,7 @@ class Task(Future[_T_co], Generic[_T_co]): # type: ignore[type-var] # pyright: coro: _TaskCompatibleCoro[_T_co], *, loop: AbstractEventLoop = ..., - name: str | None, + name: str | None = ..., context: Context | None = None, ) -> None: ... elif sys.version_info >= (3, 8): diff --git a/mypy/typeshed/stdlib/configparser.pyi b/mypy/typeshed/stdlib/configparser.pyi index 6f9f788310d1..e6fedb0328c2 100644 --- a/mypy/typeshed/stdlib/configparser.pyi +++ b/mypy/typeshed/stdlib/configparser.pyi @@ -5,31 +5,53 @@ from re import Pattern from typing import Any, ClassVar, TypeVar, overload from typing_extensions import Literal, TypeAlias -__all__ = [ - "NoSectionError", - "DuplicateOptionError", - "DuplicateSectionError", - "NoOptionError", - "InterpolationError", - "InterpolationDepthError", - "InterpolationMissingOptionError", - "InterpolationSyntaxError", - "ParsingError", - "MissingSectionHeaderError", - "ConfigParser", - "RawConfigParser", - "Interpolation", - "BasicInterpolation", - "ExtendedInterpolation", - "LegacyInterpolation", - "SectionProxy", - "ConverterMapping", - "DEFAULTSECT", - "MAX_INTERPOLATION_DEPTH", -] - -if sys.version_info < (3, 12): - __all__ += ["SafeConfigParser"] +if sys.version_info >= (3, 12): + __all__ = ( + "NoSectionError", + "DuplicateOptionError", + "DuplicateSectionError", + "NoOptionError", + "InterpolationError", + "InterpolationDepthError", + "InterpolationMissingOptionError", + "InterpolationSyntaxError", + "ParsingError", + "MissingSectionHeaderError", + "ConfigParser", + "RawConfigParser", + "Interpolation", + "BasicInterpolation", + "ExtendedInterpolation", + "LegacyInterpolation", + "SectionProxy", + "ConverterMapping", + "DEFAULTSECT", + "MAX_INTERPOLATION_DEPTH", + ) +else: + __all__ = [ + "NoSectionError", + "DuplicateOptionError", + "DuplicateSectionError", + "NoOptionError", + "InterpolationError", + "InterpolationDepthError", + "InterpolationMissingOptionError", + "InterpolationSyntaxError", + "ParsingError", + "MissingSectionHeaderError", + "ConfigParser", + "SafeConfigParser", + "RawConfigParser", + "Interpolation", + "BasicInterpolation", + "ExtendedInterpolation", + "LegacyInterpolation", + "SectionProxy", + "ConverterMapping", + "DEFAULTSECT", + "MAX_INTERPOLATION_DEPTH", + ] _Section: TypeAlias = Mapping[str, str] _Parser: TypeAlias = MutableMapping[str, _Section] @@ -128,7 +150,8 @@ class RawConfigParser(_Parser): def read_file(self, f: Iterable[str], source: str | None = None) -> None: ... def read_string(self, string: str, source: str = "") -> None: ... def read_dict(self, dictionary: Mapping[str, Mapping[str, Any]], source: str = "") -> None: ... - def readfp(self, fp: Iterable[str], filename: str | None = None) -> None: ... + if sys.version_info < (3, 12): + def readfp(self, fp: Iterable[str], filename: str | None = None) -> None: ... # These get* methods are partially applied (with the same names) in # SectionProxy; the stubs should be kept updated together @overload @@ -277,7 +300,11 @@ class InterpolationSyntaxError(InterpolationError): ... class ParsingError(Error): source: str errors: list[tuple[int, str]] - def __init__(self, source: str | None = None, filename: str | None = None) -> None: ... + if sys.version_info >= (3, 12): + def __init__(self, source: str) -> None: ... + else: + def __init__(self, source: str | None = None, filename: str | None = None) -> None: ... + def append(self, lineno: int, line: str) -> None: ... class MissingSectionHeaderError(ParsingError): diff --git a/mypy/typeshed/stdlib/csv.pyi b/mypy/typeshed/stdlib/csv.pyi index 139ba7af2208..a9c7fe0492c8 100644 --- a/mypy/typeshed/stdlib/csv.pyi +++ b/mypy/typeshed/stdlib/csv.pyi @@ -69,7 +69,7 @@ class excel(Dialect): ... class excel_tab(excel): ... class unix_dialect(Dialect): ... -class DictReader(Generic[_T], Iterator[_DictReadMapping[_T | Any, str | Any]]): +class DictReader(Iterator[_DictReadMapping[_T | Any, str | Any]], Generic[_T]): fieldnames: Sequence[_T] | None restkey: str | None restval: str | None diff --git a/mypy/typeshed/stdlib/enum.pyi b/mypy/typeshed/stdlib/enum.pyi index a8ba7bf157c2..e6eaf6c413dc 100644 --- a/mypy/typeshed/stdlib/enum.pyi +++ b/mypy/typeshed/stdlib/enum.pyi @@ -33,7 +33,7 @@ if sys.version_info >= (3, 11): "verify", ] -if sys.version_info >= (3, 12): +if sys.version_info >= (3, 11): __all__ += ["pickle_by_enum_name", "pickle_by_global_name"] _EnumMemberT = TypeVar("_EnumMemberT") @@ -188,7 +188,7 @@ class Enum(metaclass=EnumMeta): def __hash__(self) -> int: ... def __format__(self, format_spec: str) -> str: ... def __reduce_ex__(self, proto: Unused) -> tuple[Any, ...]: ... - if sys.version_info >= (3, 12): + if sys.version_info >= (3, 11): def __copy__(self) -> Self: ... def __deepcopy__(self, memo: Any) -> Self: ... @@ -294,6 +294,6 @@ class auto(IntFlag): def value(self) -> Any: ... def __new__(cls) -> Self: ... -if sys.version_info >= (3, 12): +if sys.version_info >= (3, 11): def pickle_by_global_name(self: Enum, proto: int) -> str: ... def pickle_by_enum_name(self: _EnumMemberT, proto: int) -> tuple[Callable[..., Any], tuple[type[_EnumMemberT], str]]: ... diff --git a/mypy/typeshed/stdlib/genericpath.pyi b/mypy/typeshed/stdlib/genericpath.pyi index 46426b63c852..be08f7a3cb79 100644 --- a/mypy/typeshed/stdlib/genericpath.pyi +++ b/mypy/typeshed/stdlib/genericpath.pyi @@ -1,5 +1,6 @@ import os -from _typeshed import BytesPath, FileDescriptorOrPath, StrPath, SupportsRichComparisonT +import sys +from _typeshed import BytesPath, FileDescriptorOrPath, StrOrBytesPath, StrPath, SupportsRichComparisonT from collections.abc import Sequence from typing import overload from typing_extensions import Literal, LiteralString @@ -17,6 +18,8 @@ __all__ = [ "sameopenfile", "samestat", ] +if sys.version_info >= (3, 12): + __all__ += ["islink"] # All overloads can return empty string. Ideally, Literal[""] would be a valid # Iterable[T], so that list[T] | Literal[""] could be used as a return @@ -36,6 +39,9 @@ def getsize(filename: FileDescriptorOrPath) -> int: ... def isfile(path: FileDescriptorOrPath) -> bool: ... def isdir(s: FileDescriptorOrPath) -> bool: ... +if sys.version_info >= (3, 12): + def islink(path: StrOrBytesPath) -> bool: ... + # These return float if os.stat_float_times() == True, # but int is a subclass of float. def getatime(filename: FileDescriptorOrPath) -> float: ... diff --git a/mypy/typeshed/stdlib/gzip.pyi b/mypy/typeshed/stdlib/gzip.pyi index 1ec8b4b8ca7c..d001849e609c 100644 --- a/mypy/typeshed/stdlib/gzip.pyi +++ b/mypy/typeshed/stdlib/gzip.pyi @@ -139,8 +139,10 @@ class GzipFile(_compression.BaseStream): fileobj: _ReadableFileobj | _WritableFileobj | None = None, mtime: float | None = None, ) -> None: ... - @property - def filename(self) -> str: ... + if sys.version_info < (3, 12): + @property + def filename(self) -> str: ... + @property def mtime(self) -> int | None: ... crc: int diff --git a/mypy/typeshed/stdlib/ntpath.pyi b/mypy/typeshed/stdlib/ntpath.pyi index f1fa137c6d88..1a58b52de050 100644 --- a/mypy/typeshed/stdlib/ntpath.pyi +++ b/mypy/typeshed/stdlib/ntpath.pyi @@ -42,6 +42,9 @@ from posixpath import ( splitext as splitext, supports_unicode_filenames as supports_unicode_filenames, ) + +if sys.version_info >= (3, 12): + from posixpath import isjunction as isjunction, splitroot as splitroot from typing import AnyStr, overload from typing_extensions import LiteralString @@ -85,6 +88,8 @@ __all__ = [ "samestat", "commonpath", ] +if sys.version_info >= (3, 12): + __all__ += ["isjunction", "splitroot"] altsep: LiteralString diff --git a/mypy/typeshed/stdlib/os/__init__.pyi b/mypy/typeshed/stdlib/os/__init__.pyi index 994595aae781..961858ce3c19 100644 --- a/mypy/typeshed/stdlib/os/__init__.pyi +++ b/mypy/typeshed/stdlib/os/__init__.pyi @@ -388,6 +388,8 @@ class DirEntry(Generic[AnyStr]): def __fspath__(self) -> AnyStr: ... if sys.version_info >= (3, 9): def __class_getitem__(cls, item: Any) -> GenericAlias: ... + if sys.version_info >= (3, 12): + def is_junction(self) -> bool: ... @final class statvfs_result(structseq[int], tuple[int, int, int, int, int, int, int, int, int, int, int]): @@ -602,7 +604,12 @@ def isatty(__fd: int) -> bool: ... if sys.platform != "win32" and sys.version_info >= (3, 11): def login_tty(__fd: int) -> None: ... -def lseek(__fd: int, __position: int, __how: int) -> int: ... +if sys.version_info >= (3, 11): + def lseek(__fd: int, __position: int, __whence: int) -> int: ... + +else: + def lseek(__fd: int, __position: int, __how: int) -> int: ... + def open(path: StrOrBytesPath, flags: int, mode: int = 0o777, *, dir_fd: int | None = None) -> int: ... def pipe() -> tuple[int, int]: ... def read(__fd: int, __length: int) -> bytes: ... diff --git a/mypy/typeshed/stdlib/posixpath.pyi b/mypy/typeshed/stdlib/posixpath.pyi index 1945190be5f8..45a8ad7ec6a4 100644 --- a/mypy/typeshed/stdlib/posixpath.pyi +++ b/mypy/typeshed/stdlib/posixpath.pyi @@ -58,6 +58,8 @@ __all__ = [ "relpath", "commonpath", ] +if sys.version_info >= (3, 12): + __all__ += ["isjunction", "splitroot"] supports_unicode_filenames: bool # aliases (also in os) @@ -150,3 +152,10 @@ def isabs(s: StrOrBytesPath) -> bool: ... def islink(path: FileDescriptorOrPath) -> bool: ... def ismount(path: FileDescriptorOrPath) -> bool: ... def lexists(path: FileDescriptorOrPath) -> bool: ... + +if sys.version_info >= (3, 12): + def isjunction(path: StrOrBytesPath) -> bool: ... + @overload + def splitroot(p: AnyOrLiteralStr) -> tuple[AnyOrLiteralStr, AnyOrLiteralStr, AnyOrLiteralStr]: ... + @overload + def splitroot(p: PathLike[AnyStr]) -> tuple[AnyStr, AnyStr, AnyStr]: ... diff --git a/mypy/typeshed/stdlib/pydoc.pyi b/mypy/typeshed/stdlib/pydoc.pyi index 7791c977aa8b..1b09bcb059e4 100644 --- a/mypy/typeshed/stdlib/pydoc.pyi +++ b/mypy/typeshed/stdlib/pydoc.pyi @@ -198,7 +198,7 @@ def render_doc( thing: str | object, title: str = "Python Library Documentation: %s", forceload: bool = ..., renderer: Doc | None = None ) -> str: ... -if sys.version_info >= (3, 12): +if sys.version_info >= (3, 11): def doc( thing: str | object, title: str = "Python Library Documentation: %s", @@ -230,7 +230,7 @@ class Helper: def __call__(self, request: str | Helper | object = ...) -> None: ... def interact(self) -> None: ... def getline(self, prompt: str) -> str: ... - if sys.version_info >= (3, 12): + if sys.version_info >= (3, 11): def help(self, request: Any, is_cli: bool = False) -> None: ... else: def help(self, request: Any) -> None: ... diff --git a/mypy/typeshed/stdlib/ssl.pyi b/mypy/typeshed/stdlib/ssl.pyi index 1c49b130e48f..73762cd75e79 100644 --- a/mypy/typeshed/stdlib/ssl.pyi +++ b/mypy/typeshed/stdlib/ssl.pyi @@ -201,12 +201,13 @@ class Options(enum.IntFlag): OP_NO_RENEGOTIATION: int if sys.version_info >= (3, 8): OP_ENABLE_MIDDLEBOX_COMPAT: int - if sys.platform == "linux": - OP_IGNORE_UNEXPECTED_EOF: int if sys.version_info >= (3, 12): OP_LEGACY_SERVER_CONNECT: int if sys.version_info >= (3, 12) and sys.platform != "linux": OP_ENABLE_KTLS: int + if sys.version_info >= (3, 11): + OP_IGNORE_UNEXPECTED_EOF: int + elif sys.version_info >= (3, 8) and sys.platform == "linux": OP_IGNORE_UNEXPECTED_EOF: int OP_ALL: Options @@ -224,12 +225,13 @@ OP_NO_TICKET: Options OP_NO_RENEGOTIATION: Options if sys.version_info >= (3, 8): OP_ENABLE_MIDDLEBOX_COMPAT: Options - if sys.platform == "linux": - OP_IGNORE_UNEXPECTED_EOF: Options if sys.version_info >= (3, 12): OP_LEGACY_SERVER_CONNECT: Options if sys.version_info >= (3, 12) and sys.platform != "linux": OP_ENABLE_KTLS: Options +if sys.version_info >= (3, 11): + OP_IGNORE_UNEXPECTED_EOF: Options +elif sys.version_info >= (3, 8) and sys.platform == "linux": OP_IGNORE_UNEXPECTED_EOF: Options HAS_NEVER_CHECK_COMMON_NAME: bool diff --git a/mypy/typeshed/stdlib/tempfile.pyi b/mypy/typeshed/stdlib/tempfile.pyi index ea04303683b5..61bcde24255b 100644 --- a/mypy/typeshed/stdlib/tempfile.pyi +++ b/mypy/typeshed/stdlib/tempfile.pyi @@ -321,7 +321,7 @@ else: dir: GenericPath[AnyStr] | None = None, ) -> IO[Any]: ... -class _TemporaryFileWrapper(Generic[AnyStr], IO[AnyStr]): +class _TemporaryFileWrapper(IO[AnyStr], Generic[AnyStr]): file: IO[AnyStr] # io.TextIOWrapper, io.BufferedReader or io.BufferedWriter name: str delete: bool diff --git a/mypy/typeshed/stdlib/unittest/__init__.pyi b/mypy/typeshed/stdlib/unittest/__init__.pyi index 33820c793fa5..f96d6fb185c5 100644 --- a/mypy/typeshed/stdlib/unittest/__init__.pyi +++ b/mypy/typeshed/stdlib/unittest/__init__.pyi @@ -65,5 +65,7 @@ if sys.version_info >= (3, 8): if sys.version_info >= (3, 11): __all__ += ["enterModuleContext", "doModuleCleanups"] -def load_tests(loader: TestLoader, tests: TestSuite, pattern: str | None) -> TestSuite: ... +if sys.version_info < (3, 12): + def load_tests(loader: TestLoader, tests: TestSuite, pattern: str | None) -> TestSuite: ... + def __dir__() -> set[str]: ... diff --git a/mypy/typeshed/stdlib/unittest/loader.pyi b/mypy/typeshed/stdlib/unittest/loader.pyi index f3850c939d07..202309ac1d93 100644 --- a/mypy/typeshed/stdlib/unittest/loader.pyi +++ b/mypy/typeshed/stdlib/unittest/loader.pyi @@ -1,3 +1,4 @@ +import sys import unittest.case import unittest.suite from collections.abc import Callable, Sequence @@ -18,7 +19,11 @@ class TestLoader: testNamePatterns: list[str] | None suiteClass: _SuiteClass def loadTestsFromTestCase(self, testCaseClass: type[unittest.case.TestCase]) -> unittest.suite.TestSuite: ... - def loadTestsFromModule(self, module: ModuleType, *args: Any, pattern: Any = None) -> unittest.suite.TestSuite: ... + if sys.version_info >= (3, 12): + def loadTestsFromModule(self, module: ModuleType, *, pattern: str | None = None) -> unittest.suite.TestSuite: ... + else: + def loadTestsFromModule(self, module: ModuleType, *args: Any, pattern: str | None = None) -> unittest.suite.TestSuite: ... + def loadTestsFromName(self, name: str, module: ModuleType | None = None) -> unittest.suite.TestSuite: ... def loadTestsFromNames(self, names: Sequence[str], module: ModuleType | None = None) -> unittest.suite.TestSuite: ... def getTestCaseNames(self, testCaseClass: type[unittest.case.TestCase]) -> Sequence[str]: ... diff --git a/mypy/typeshed/stdlib/unittest/main.pyi b/mypy/typeshed/stdlib/unittest/main.pyi index 6d970c920096..d29e9a2b8da8 100644 --- a/mypy/typeshed/stdlib/unittest/main.pyi +++ b/mypy/typeshed/stdlib/unittest/main.pyi @@ -1,3 +1,4 @@ +import sys import unittest.case import unittest.loader import unittest.result @@ -23,22 +24,43 @@ class TestProgram: progName: str | None warnings: str | None testNamePatterns: list[str] | None - def __init__( - self, - module: None | str | ModuleType = "__main__", - defaultTest: str | Iterable[str] | None = None, - argv: list[str] | None = None, - testRunner: type[_TestRunner] | _TestRunner | None = None, - testLoader: unittest.loader.TestLoader = ..., - exit: bool = True, - verbosity: int = 1, - failfast: bool | None = None, - catchbreak: bool | None = None, - buffer: bool | None = None, - warnings: str | None = None, - *, - tb_locals: bool = False, - ) -> None: ... + if sys.version_info >= (3, 12): + durations: unittest.result._DurationsType | None + def __init__( + self, + module: None | str | ModuleType = "__main__", + defaultTest: str | Iterable[str] | None = None, + argv: list[str] | None = None, + testRunner: type[_TestRunner] | _TestRunner | None = None, + testLoader: unittest.loader.TestLoader = ..., + exit: bool = True, + verbosity: int = 1, + failfast: bool | None = None, + catchbreak: bool | None = None, + buffer: bool | None = None, + warnings: str | None = None, + *, + tb_locals: bool = False, + durations: unittest.result._DurationsType | None = None, + ) -> None: ... + else: + def __init__( + self, + module: None | str | ModuleType = "__main__", + defaultTest: str | Iterable[str] | None = None, + argv: list[str] | None = None, + testRunner: type[_TestRunner] | _TestRunner | None = None, + testLoader: unittest.loader.TestLoader = ..., + exit: bool = True, + verbosity: int = 1, + failfast: bool | None = None, + catchbreak: bool | None = None, + buffer: bool | None = None, + warnings: str | None = None, + *, + tb_locals: bool = False, + ) -> None: ... + def usageExit(self, msg: Any = None) -> None: ... def parseArgs(self, argv: list[str]) -> None: ... def createTests(self, from_discovery: bool = False, Loader: unittest.loader.TestLoader | None = None) -> None: ... diff --git a/mypy/typeshed/stdlib/unittest/result.pyi b/mypy/typeshed/stdlib/unittest/result.pyi index 8d78bc0f7dcf..dfc505936f59 100644 --- a/mypy/typeshed/stdlib/unittest/result.pyi +++ b/mypy/typeshed/stdlib/unittest/result.pyi @@ -1,9 +1,12 @@ +import sys import unittest.case from _typeshed import OptExcInfo from collections.abc import Callable from typing import Any, TextIO, TypeVar +from typing_extensions import TypeAlias _F = TypeVar("_F", bound=Callable[..., Any]) +_DurationsType: TypeAlias = list[tuple[str, float]] STDOUT_LINE: str STDERR_LINE: str @@ -22,6 +25,8 @@ class TestResult: buffer: bool failfast: bool tb_locals: bool + if sys.version_info >= (3, 12): + collectedDurations: _DurationsType def __init__(self, stream: TextIO | None = None, descriptions: bool | None = None, verbosity: int | None = None) -> None: ... def printErrors(self) -> None: ... def wasSuccessful(self) -> bool: ... @@ -37,3 +42,5 @@ class TestResult: def addExpectedFailure(self, test: unittest.case.TestCase, err: OptExcInfo) -> None: ... def addUnexpectedSuccess(self, test: unittest.case.TestCase) -> None: ... def addSubTest(self, test: unittest.case.TestCase, subtest: unittest.case.TestCase, err: OptExcInfo | None) -> None: ... + if sys.version_info >= (3, 12): + def addDuration(self, test: unittest.case.TestCase, elapsed: float) -> None: ... diff --git a/mypy/typeshed/stdlib/unittest/runner.pyi b/mypy/typeshed/stdlib/unittest/runner.pyi index c0ddcdb49208..0033083ac406 100644 --- a/mypy/typeshed/stdlib/unittest/runner.pyi +++ b/mypy/typeshed/stdlib/unittest/runner.pyi @@ -1,6 +1,8 @@ +import sys import unittest.case import unittest.result import unittest.suite +from _typeshed import Incomplete from collections.abc import Callable, Iterable from typing import TextIO from typing_extensions import TypeAlias @@ -14,23 +16,57 @@ class TextTestResult(unittest.result.TestResult): separator2: str showAll: bool # undocumented stream: TextIO # undocumented - def __init__(self, stream: TextIO, descriptions: bool, verbosity: int) -> None: ... + if sys.version_info >= (3, 12): + durations: unittest.result._DurationsType | None + def __init__( + self, stream: TextIO, descriptions: bool, verbosity: int, *, durations: unittest.result._DurationsType | None = None + ) -> None: ... + else: + def __init__(self, stream: TextIO, descriptions: bool, verbosity: int) -> None: ... + def getDescription(self, test: unittest.case.TestCase) -> str: ... def printErrorList(self, flavour: str, errors: Iterable[tuple[unittest.case.TestCase, str]]) -> None: ... class TextTestRunner: resultclass: _ResultClassType - def __init__( - self, - stream: TextIO | None = None, - descriptions: bool = True, - verbosity: int = 1, - failfast: bool = False, - buffer: bool = False, - resultclass: _ResultClassType | None = None, - warnings: type[Warning] | None = None, - *, - tb_locals: bool = False, - ) -> None: ... + # TODO: add `_WritelnDecorator` type + # stream: _WritelnDecorator + stream: Incomplete + descriptions: bool + verbosity: int + failfast: bool + buffer: bool + warnings: str | None + tb_locals: bool + + if sys.version_info >= (3, 12): + durations: unittest.result._DurationsType | None + def __init__( + self, + stream: TextIO | None = None, + descriptions: bool = True, + verbosity: int = 1, + failfast: bool = False, + buffer: bool = False, + resultclass: _ResultClassType | None = None, + warnings: str | None = None, + *, + tb_locals: bool = False, + durations: unittest.result._DurationsType | None = None, + ) -> None: ... + else: + def __init__( + self, + stream: TextIO | None = None, + descriptions: bool = True, + verbosity: int = 1, + failfast: bool = False, + buffer: bool = False, + resultclass: _ResultClassType | None = None, + warnings: str | None = None, + *, + tb_locals: bool = False, + ) -> None: ... + def _makeResult(self) -> unittest.result.TestResult: ... def run(self, test: unittest.suite.TestSuite | unittest.case.TestCase) -> unittest.result.TestResult: ... From d440490270b643b2be333b5b27b154813f016ab6 Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Fri, 1 Sep 2023 03:40:24 -0700 Subject: [PATCH 013/144] Deduplicate iterable logic (#16006) This e.g. makes sure both code paths receive my fix in #15688 --- mypy/checker.py | 35 ++++++++++++----------------------- 1 file changed, 12 insertions(+), 23 deletions(-) diff --git a/mypy/checker.py b/mypy/checker.py index fffa87c4f634..fa7c645873d0 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -41,7 +41,6 @@ from mypy.errorcodes import TYPE_VAR, UNUSED_AWAITABLE, UNUSED_COROUTINE, ErrorCode from mypy.errors import Errors, ErrorWatcher, report_internal_error from mypy.expandtype import expand_self_type, expand_type, expand_type_by_instance -from mypy.join import join_types from mypy.literals import Key, extract_var_from_literal_hash, literal, literal_hash from mypy.maptype import map_instance_to_supertype from mypy.meet import is_overlapping_erased_types, is_overlapping_types @@ -4653,42 +4652,32 @@ def analyze_async_iterable_item_type(self, expr: Expression) -> tuple[Type, Type def analyze_iterable_item_type(self, expr: Expression) -> tuple[Type, Type]: """Analyse iterable expression and return iterator and iterator item types.""" - echk = self.expr_checker - iterable = get_proper_type(echk.accept(expr)) - iterator = echk.check_method_call_by_name("__iter__", iterable, [], [], expr)[0] - + iterator, iterable = self.analyze_iterable_item_type_without_expression( + self.expr_checker.accept(expr), context=expr + ) int_type = self.analyze_range_native_int_type(expr) if int_type: return iterator, int_type - - if ( - isinstance(iterable, TupleType) - and iterable.partial_fallback.type.fullname == "builtins.tuple" - ): - return iterator, tuple_fallback(iterable).args[0] - else: - # Non-tuple iterable. - return iterator, echk.check_method_call_by_name("__next__", iterator, [], [], expr)[0] + return iterator, iterable def analyze_iterable_item_type_without_expression( self, type: Type, context: Context ) -> tuple[Type, Type]: """Analyse iterable type and return iterator and iterator item types.""" echk = self.expr_checker + iterable: Type iterable = get_proper_type(type) iterator = echk.check_method_call_by_name("__iter__", iterable, [], [], context)[0] - if isinstance(iterable, TupleType): - joined: Type = UninhabitedType() - for item in iterable.items: - joined = join_types(joined, item) - return iterator, joined + if ( + isinstance(iterable, TupleType) + and iterable.partial_fallback.type.fullname == "builtins.tuple" + ): + return iterator, tuple_fallback(iterable).args[0] else: # Non-tuple iterable. - return ( - iterator, - echk.check_method_call_by_name("__next__", iterator, [], [], context)[0], - ) + iterable = echk.check_method_call_by_name("__next__", iterator, [], [], context)[0] + return iterator, iterable def analyze_range_native_int_type(self, expr: Expression) -> Type | None: """Try to infer native int item type from arguments to range(...). From 803f61097b0eba6505c976d72ce2176b8c64d987 Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Fri, 1 Sep 2023 15:33:01 -0700 Subject: [PATCH 014/144] Fix crash when parsing error code config with typo (#16005) Fixes #16002 --- mypy/config_parser.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/mypy/config_parser.py b/mypy/config_parser.py index a84f3594a0d2..7748c3b25966 100644 --- a/mypy/config_parser.py +++ b/mypy/config_parser.py @@ -434,11 +434,26 @@ def parse_section( """ results: dict[str, object] = {} report_dirs: dict[str, str] = {} + + # Because these fields exist on Options, without proactive checking, we would accept them + # and crash later + invalid_options = { + "enabled_error_codes": "enable_error_code", + "disabled_error_codes": "disable_error_code", + } + for key in section: invert = False options_key = key if key in config_types: ct = config_types[key] + elif key in invalid_options: + print( + f"{prefix}Unrecognized option: {key} = {section[key]}" + f" (did you mean {invalid_options[key]}?)", + file=stderr, + ) + continue else: dv = None # We have to keep new_semantic_analyzer in Options From 0c29507e6ef870eb96da222a734dc8ef8e5fbe24 Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Fri, 1 Sep 2023 19:16:11 -0700 Subject: [PATCH 015/144] Make PEP 695 constructs give a reasonable error message (#16013) Mypy does not yet support PEP 695 Fixes #16011, linking #15238 --- mypy/fastparse.py | 31 +++++++++++++++ mypy/test/helpers.py | 4 +- mypy/test/testcheck.py | 2 + test-data/unit/check-python312.test | 59 +++++++++++++++++++++++++++++ 4 files changed, 95 insertions(+), 1 deletion(-) create mode 100644 test-data/unit/check-python312.test diff --git a/mypy/fastparse.py b/mypy/fastparse.py index 6aa626afb81e..a96e697d40bf 100644 --- a/mypy/fastparse.py +++ b/mypy/fastparse.py @@ -144,6 +144,11 @@ def ast3_parse( NamedExpr = ast3.NamedExpr Constant = ast3.Constant +if sys.version_info >= (3, 12): + ast_TypeAlias = ast3.TypeAlias +else: + ast_TypeAlias = Any + if sys.version_info >= (3, 10): Match = ast3.Match MatchValue = ast3.MatchValue @@ -936,6 +941,14 @@ def do_func_def( arg_types = [AnyType(TypeOfAny.from_error)] * len(args) return_type = AnyType(TypeOfAny.from_error) else: + if sys.version_info >= (3, 12) and n.type_params: + self.fail( + ErrorMessage("PEP 695 generics are not yet supported", code=codes.VALID_TYPE), + n.type_params[0].lineno, + n.type_params[0].col_offset, + blocker=False, + ) + arg_types = [a.type_annotation for a in args] return_type = TypeConverter( self.errors, line=n.returns.lineno if n.returns else lineno @@ -1110,6 +1123,14 @@ def visit_ClassDef(self, n: ast3.ClassDef) -> ClassDef: self.class_and_function_stack.append("C") keywords = [(kw.arg, self.visit(kw.value)) for kw in n.keywords if kw.arg] + if sys.version_info >= (3, 12) and n.type_params: + self.fail( + ErrorMessage("PEP 695 generics are not yet supported", code=codes.VALID_TYPE), + n.type_params[0].lineno, + n.type_params[0].col_offset, + blocker=False, + ) + cdef = ClassDef( n.name, self.as_required_block(n.body), @@ -1717,6 +1738,16 @@ def visit_MatchOr(self, n: MatchOr) -> OrPattern: node = OrPattern([self.visit(pattern) for pattern in n.patterns]) return self.set_line(node, n) + def visit_TypeAlias(self, n: ast_TypeAlias) -> AssignmentStmt: + self.fail( + ErrorMessage("PEP 695 type aliases are not yet supported", code=codes.VALID_TYPE), + n.lineno, + n.col_offset, + blocker=False, + ) + node = AssignmentStmt([NameExpr(n.name.id)], self.visit(n.value)) + return self.set_line(node, n) + class TypeConverter: def __init__( diff --git a/mypy/test/helpers.py b/mypy/test/helpers.py index d1850219e60a..7447391593d5 100644 --- a/mypy/test/helpers.py +++ b/mypy/test/helpers.py @@ -241,7 +241,9 @@ def num_skipped_suffix_lines(a1: list[str], a2: list[str]) -> int: def testfile_pyversion(path: str) -> tuple[int, int]: - if path.endswith("python311.test"): + if path.endswith("python312.test"): + return 3, 12 + elif path.endswith("python311.test"): return 3, 11 elif path.endswith("python310.test"): return 3, 10 diff --git a/mypy/test/testcheck.py b/mypy/test/testcheck.py index 7b81deeafe9d..b20e8cc25f3d 100644 --- a/mypy/test/testcheck.py +++ b/mypy/test/testcheck.py @@ -43,6 +43,8 @@ typecheck_files.remove("check-python310.test") if sys.version_info < (3, 11): typecheck_files.remove("check-python311.test") +if sys.version_info < (3, 12): + typecheck_files.remove("check-python312.test") # Special tests for platforms with case-insensitive filesystems. if sys.platform not in ("darwin", "win32"): diff --git a/test-data/unit/check-python312.test b/test-data/unit/check-python312.test new file mode 100644 index 000000000000..91aca7794071 --- /dev/null +++ b/test-data/unit/check-python312.test @@ -0,0 +1,59 @@ +[case test695TypeAlias] +type MyInt = int # E: PEP 695 type aliases are not yet supported + +def f(x: MyInt) -> MyInt: + return reveal_type(x) # N: Revealed type is "builtins.int" + +type MyList[T] = list[T] # E: PEP 695 type aliases are not yet supported \ + # E: Name "T" is not defined + +def g(x: MyList[int]) -> MyList[int]: # E: Variable "__main__.MyList" is not valid as a type \ + # N: See https://mypy.readthedocs.io/en/stable/common_issues.html#variables-vs-type-aliases + return reveal_type(x) # N: Revealed type is "MyList?[builtins.int]" + +[case test695Class] +class MyGen[T]: # E: PEP 695 generics are not yet supported + def __init__(self, x: T) -> None: # E: Name "T" is not defined + self.x = x + +def f(x: MyGen[int]): # E: "MyGen" expects no type arguments, but 1 given + reveal_type(x.x) # N: Revealed type is "Any" + +[case test695Function] +def f[T](x: T) -> T: # E: PEP 695 generics are not yet supported \ + # E: Name "T" is not defined + return reveal_type(x) # N: Revealed type is "Any" + +reveal_type(f(1)) # N: Revealed type is "Any" + +async def g[T](x: T) -> T: # E: PEP 695 generics are not yet supported \ + # E: Name "T" is not defined + return reveal_type(x) # N: Revealed type is "Any" + +reveal_type(g(1)) # E: Value of type "Coroutine[Any, Any, Any]" must be used \ + # N: Are you missing an await? \ + # N: Revealed type is "typing.Coroutine[Any, Any, Any]" + +[case test695TypeVar] +from typing import Callable +type Alias1[T: int] = list[T] # E: PEP 695 type aliases are not yet supported +type Alias2[**P] = Callable[P, int] # E: PEP 695 type aliases are not yet supported \ + # E: Value of type "int" is not indexable \ + # E: Name "P" is not defined +type Alias3[*Ts] = tuple[*Ts] # E: PEP 695 type aliases are not yet supported \ + # E: Type expected within [...] \ + # E: The type "Type[Tuple[Any, ...]]" is not generic and not indexable \ + # E: Name "Ts" is not defined + +class Cls1[T: int]: ... # E: PEP 695 generics are not yet supported +class Cls2[**P]: ... # E: PEP 695 generics are not yet supported +class Cls3[*Ts]: ... # E: PEP 695 generics are not yet supported + +def func1[T: int](x: T) -> T: ... # E: PEP 695 generics are not yet supported +def func2[**P](x: Callable[P, int]) -> Callable[P, str]: ... # E: PEP 695 generics are not yet supported \ + # E: The first argument to Callable must be a list of types, parameter specification, or "..." \ + # N: See https://mypy.readthedocs.io/en/stable/kinds_of_types.html#callable-types-and-lambdas \ + # E: Name "P" is not defined +def func3[*Ts](x: tuple[*Ts]) -> tuple[int, *Ts]: ... # E: PEP 695 generics are not yet supported \ + # E: Name "Ts" is not defined +[builtins fixtures/tuple.pyi] From 17e9e228f2efaeab2ca063cca44411feaa370dd5 Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Fri, 1 Sep 2023 22:47:37 -0700 Subject: [PATCH 016/144] Match note error codes to import error codes (#16004) Fixes #16003. Follow up to #14740 --- mypy/build.py | 2 +- mypy/errors.py | 2 +- mypy/report.py | 2 +- mypy/test/testcheck.py | 2 +- mypy/test/testcmdline.py | 2 +- mypy/test/testreports.py | 4 ++-- test-data/unit/pep561.test | 1 + 7 files changed, 8 insertions(+), 7 deletions(-) diff --git a/mypy/build.py b/mypy/build.py index 525d5f436e7e..39629c2dc455 100644 --- a/mypy/build.py +++ b/mypy/build.py @@ -2798,7 +2798,7 @@ def module_not_found( for note in notes: if "{stub_dist}" in note: note = note.format(stub_dist=stub_distribution_name(module)) - errors.report(line, 0, note, severity="note", only_once=True, code=codes.IMPORT) + errors.report(line, 0, note, severity="note", only_once=True, code=code) if reason is ModuleNotFoundReason.APPROVED_STUBS_NOT_INSTALLED: manager.missing_stub_packages.add(stub_distribution_name(module)) errors.set_import_context(save_import_context) diff --git a/mypy/errors.py b/mypy/errors.py index 680b7f1d31ea..a678b790cb8c 100644 --- a/mypy/errors.py +++ b/mypy/errors.py @@ -469,7 +469,7 @@ def _add_error_info(self, file: str, info: ErrorInfo) -> None: self.error_info_map[file].append(info) if info.blocker: self.has_blockers.add(file) - if info.code is IMPORT: + if info.code in (IMPORT, IMPORT_UNTYPED, IMPORT_NOT_FOUND): self.seen_import_error = True def _filter_error(self, file: str, info: ErrorInfo) -> bool: diff --git a/mypy/report.py b/mypy/report.py index 5d93351aa37d..d5f16464c0fb 100644 --- a/mypy/report.py +++ b/mypy/report.py @@ -25,7 +25,7 @@ from mypy.version import __version__ try: - from lxml import etree # type: ignore[import] + from lxml import etree # type: ignore[import-untyped] LXML_INSTALLED = True except ImportError: diff --git a/mypy/test/testcheck.py b/mypy/test/testcheck.py index b20e8cc25f3d..85fbe5dc2990 100644 --- a/mypy/test/testcheck.py +++ b/mypy/test/testcheck.py @@ -26,7 +26,7 @@ from mypy.test.update_data import update_testcase_output try: - import lxml # type: ignore[import] + import lxml # type: ignore[import-untyped] except ImportError: lxml = None diff --git a/mypy/test/testcmdline.py b/mypy/test/testcmdline.py index 30ecef07a821..9bc02d319964 100644 --- a/mypy/test/testcmdline.py +++ b/mypy/test/testcmdline.py @@ -20,7 +20,7 @@ ) try: - import lxml # type: ignore[import] + import lxml # type: ignore[import-untyped] except ImportError: lxml = None diff --git a/mypy/test/testreports.py b/mypy/test/testreports.py index a422b4bb2a7b..5ff315f83ba8 100644 --- a/mypy/test/testreports.py +++ b/mypy/test/testreports.py @@ -7,7 +7,7 @@ from mypy.test.helpers import Suite, assert_equal try: - import lxml # type: ignore[import] + import lxml # type: ignore[import-untyped] except ImportError: lxml = None @@ -22,7 +22,7 @@ def test_get_line_rate(self) -> None: @pytest.mark.skipif(lxml is None, reason="Cannot import lxml. Is it installed?") def test_as_xml(self) -> None: - import lxml.etree as etree # type: ignore[import] + import lxml.etree as etree # type: ignore[import-untyped] cobertura_package = CoberturaPackage("foobar") cobertura_package.covered_lines = 21 diff --git a/test-data/unit/pep561.test b/test-data/unit/pep561.test index e8ebbd03dca7..9969c2894c36 100644 --- a/test-data/unit/pep561.test +++ b/test-data/unit/pep561.test @@ -167,6 +167,7 @@ a.bf(False) b.bf(False) a.bf(1) b.bf(1) +import typedpkg_ns.whatever as c # type: ignore[import-untyped] [out] testNamespacePkgWStubs.py:4: error: Skipping analyzing "typedpkg_ns.b.bbb": module is installed, but missing library stubs or py.typed marker testNamespacePkgWStubs.py:4: note: See https://mypy.readthedocs.io/en/stable/running_mypy.html#missing-imports From 6884aa2b27f7ec25ebeffab9e39b35807642a9d2 Mon Sep 17 00:00:00 2001 From: DS/Charlie <82801887+ds-cbo@users.noreply.github.com> Date: Sat, 2 Sep 2023 08:57:28 +0200 Subject: [PATCH 017/144] Fix case Any() in match statement (#14479) Fixes #14477 --- mypy/checkpattern.py | 6 ++++++ test-data/unit/check-python310.test | 10 ++++++++++ 2 files changed, 16 insertions(+) diff --git a/mypy/checkpattern.py b/mypy/checkpattern.py index e432675b0b5a..3f9a99b21530 100644 --- a/mypy/checkpattern.py +++ b/mypy/checkpattern.py @@ -462,6 +462,12 @@ def visit_class_pattern(self, o: ClassPattern) -> PatternType: typ: Type = Instance(type_info, [any_type] * len(type_info.defn.type_vars)) elif isinstance(type_info, TypeAlias): typ = type_info.target + elif ( + isinstance(type_info, Var) + and type_info.type is not None + and isinstance(get_proper_type(type_info.type), AnyType) + ): + typ = type_info.type else: if isinstance(type_info, Var) and type_info.type is not None: name = type_info.type.str_with_options(self.options) diff --git a/test-data/unit/check-python310.test b/test-data/unit/check-python310.test index f81da23d148c..640e64c78d5f 100644 --- a/test-data/unit/check-python310.test +++ b/test-data/unit/check-python310.test @@ -896,6 +896,16 @@ match m: reveal_type(i) reveal_type(j) +[case testMatchClassPatternAny] +from typing import Any + +Foo: Any +m: object + +match m: + case Foo(): + pass + [case testMatchClassPatternNestedGenerics] # From cpython test_patma.py x = [[{0: 0}]] From 1655b0ce16af04cc76cf769a693366e9206a03de Mon Sep 17 00:00:00 2001 From: Albert Tugushev Date: Sat, 2 Sep 2023 09:02:40 +0200 Subject: [PATCH 018/144] Reword the error message related to void functions (#15876) Fixes #3226. Aims to provide better assistance to users who may be confused when their void functions technically return None. Co-authored-by: Ilya Priven Co-authored-by: hauntsaninja --- docs/source/error_code_list.rst | 2 +- mypy/messages.py | 15 ++---- test-data/unit/check-errorcodes.test | 6 +-- test-data/unit/check-expressions.test | 66 +++++++++++++------------- test-data/unit/check-functions.test | 2 +- test-data/unit/check-inference.test | 8 ++-- test-data/unit/check-optional.test | 6 +-- test-data/unit/check-tuples.test | 4 +- test-data/unit/check-varargs.test | 4 +- test-data/unit/pythoneval-asyncio.test | 2 +- 10 files changed, 54 insertions(+), 61 deletions(-) diff --git a/docs/source/error_code_list.rst b/docs/source/error_code_list.rst index 1f75ac54d525..a865a4dd1532 100644 --- a/docs/source/error_code_list.rst +++ b/docs/source/error_code_list.rst @@ -741,7 +741,7 @@ returns ``None``: # OK: we don't do anything with the return value f() - # Error: "f" does not return a value [func-returns-value] + # Error: "f" does not return a value (it only ever returns None) [func-returns-value] if f(): print("not false") diff --git a/mypy/messages.py b/mypy/messages.py index cda4cda25ee4..4b71bd876dcc 100644 --- a/mypy/messages.py +++ b/mypy/messages.py @@ -1021,18 +1021,11 @@ def duplicate_argument_value(self, callee: CallableType, index: int, context: Co def does_not_return_value(self, callee_type: Type | None, context: Context) -> None: """Report an error about use of an unusable type.""" - name: str | None = None callee_type = get_proper_type(callee_type) - if isinstance(callee_type, FunctionLike): - name = callable_name(callee_type) - if name is not None: - self.fail( - f"{capitalize(name)} does not return a value", - context, - code=codes.FUNC_RETURNS_VALUE, - ) - else: - self.fail("Function does not return a value", context, code=codes.FUNC_RETURNS_VALUE) + callee_name = callable_name(callee_type) if isinstance(callee_type, FunctionLike) else None + name = callee_name or "Function" + message = f"{name} does not return a value (it only ever returns None)" + self.fail(message, context, code=codes.FUNC_RETURNS_VALUE) def deleted_as_rvalue(self, typ: DeletedType, context: Context) -> None: """Report an error about using an deleted type as an rvalue.""" diff --git a/test-data/unit/check-errorcodes.test b/test-data/unit/check-errorcodes.test index 796e1c1ea98e..df14e328ed72 100644 --- a/test-data/unit/check-errorcodes.test +++ b/test-data/unit/check-errorcodes.test @@ -553,15 +553,15 @@ from typing import Callable def f() -> None: pass -x = f() # E: "f" does not return a value [func-returns-value] +x = f() # E: "f" does not return a value (it only ever returns None) [func-returns-value] class A: def g(self) -> None: pass -y = A().g() # E: "g" of "A" does not return a value [func-returns-value] +y = A().g() # E: "g" of "A" does not return a value (it only ever returns None) [func-returns-value] c: Callable[[], None] -z = c() # E: Function does not return a value [func-returns-value] +z = c() # E: Function does not return a value (it only ever returns None) [func-returns-value] [case testErrorCodeInstantiateAbstract] from abc import abstractmethod diff --git a/test-data/unit/check-expressions.test b/test-data/unit/check-expressions.test index c213255997f8..a3c1bc8795f2 100644 --- a/test-data/unit/check-expressions.test +++ b/test-data/unit/check-expressions.test @@ -1079,15 +1079,15 @@ class A: a: A o: object if int(): - a = f() # E: "f" does not return a value + a = f() # E: "f" does not return a value (it only ever returns None) if int(): - o = a() # E: Function does not return a value + o = a() # E: Function does not return a value (it only ever returns None) if int(): - o = A().g(a) # E: "g" of "A" does not return a value + o = A().g(a) # E: "g" of "A" does not return a value (it only ever returns None) if int(): - o = A.g(a, a) # E: "g" of "A" does not return a value -A().g(f()) # E: "f" does not return a value -x: A = f() # E: "f" does not return a value + o = A.g(a, a) # E: "g" of "A" does not return a value (it only ever returns None) +A().g(f()) # E: "f" does not return a value (it only ever returns None) +x: A = f() # E: "f" does not return a value (it only ever returns None) f() A().g(a) [builtins fixtures/tuple.pyi] @@ -1096,15 +1096,15 @@ A().g(a) import typing def f() -> None: pass -if f(): # E: "f" does not return a value +if f(): # E: "f" does not return a value (it only ever returns None) pass -elif f(): # E: "f" does not return a value +elif f(): # E: "f" does not return a value (it only ever returns None) pass -while f(): # E: "f" does not return a value +while f(): # E: "f" does not return a value (it only ever returns None) pass def g() -> object: - return f() # E: "f" does not return a value -raise f() # E: "f" does not return a value + return f() # E: "f" does not return a value (it only ever returns None) +raise f() # E: "f" does not return a value (it only ever returns None) [builtins fixtures/exception.pyi] [case testNoneReturnTypeWithExpressions] @@ -1115,13 +1115,13 @@ class A: def __add__(self, x: 'A') -> 'A': pass a: A -[f()] # E: "f" does not return a value -f() + a # E: "f" does not return a value -a + f() # E: "f" does not return a value -f() == a # E: "f" does not return a value -a != f() # E: "f" does not return a value +[f()] # E: "f" does not return a value (it only ever returns None) +f() + a # E: "f" does not return a value (it only ever returns None) +a + f() # E: "f" does not return a value (it only ever returns None) +f() == a # E: "f" does not return a value (it only ever returns None) +a != f() # E: "f" does not return a value (it only ever returns None) cast(A, f()) -f().foo # E: "f" does not return a value +f().foo # E: "f" does not return a value (it only ever returns None) [builtins fixtures/list.pyi] [case testNoneReturnTypeWithExpressions2] @@ -1134,14 +1134,14 @@ class A: a: A b: bool -f() in a # E: "f" does not return a value # E: Unsupported right operand type for in ("A") -a < f() # E: "f" does not return a value -f() <= a # E: "f" does not return a value -a in f() # E: "f" does not return a value --f() # E: "f" does not return a value -not f() # E: "f" does not return a value -f() and b # E: "f" does not return a value -b or f() # E: "f" does not return a value +f() in a # E: "f" does not return a value (it only ever returns None) # E: Unsupported right operand type for in ("A") +a < f() # E: "f" does not return a value (it only ever returns None) +f() <= a # E: "f" does not return a value (it only ever returns None) +a in f() # E: "f" does not return a value (it only ever returns None) +-f() # E: "f" does not return a value (it only ever returns None) +not f() # E: "f" does not return a value (it only ever returns None) +f() and b # E: "f" does not return a value (it only ever returns None) +b or f() # E: "f" does not return a value (it only ever returns None) [builtins fixtures/bool.pyi] @@ -1441,7 +1441,7 @@ if int(): [case testConditionalExpressionWithEmptyCondition] import typing def f() -> None: pass -x = 1 if f() else 2 # E: "f" does not return a value +x = 1 if f() else 2 # E: "f" does not return a value (it only ever returns None) [case testConditionalExpressionWithSubtyping] import typing @@ -1504,7 +1504,7 @@ from typing import List, Union x = [] y = "" x.append(y) if bool() else x.append(y) -z = x.append(y) if bool() else x.append(y) # E: "append" of "list" does not return a value +z = x.append(y) if bool() else x.append(y) # E: "append" of "list" does not return a value (it only ever returns None) [builtins fixtures/list.pyi] -- Special cases @@ -1604,7 +1604,7 @@ def f(x: int) -> None: [builtins fixtures/for.pyi] [out] main:1: error: The return type of a generator function should be "Generator" or one of its supertypes -main:2: error: "f" does not return a value +main:2: error: "f" does not return a value (it only ever returns None) main:2: error: Argument 1 to "f" has incompatible type "str"; expected "int" [case testYieldExpressionWithNone] @@ -1624,7 +1624,7 @@ from typing import Iterator def f() -> Iterator[int]: yield 5 def g() -> Iterator[int]: - a = yield from f() # E: Function does not return a value + a = yield from f() # E: Function does not return a value (it only ever returns None) [case testYieldFromGeneratorHasValue] from typing import Iterator, Generator @@ -1639,12 +1639,12 @@ def g() -> Iterator[int]: [case testYieldFromTupleExpression] from typing import Generator def g() -> Generator[int, None, None]: - x = yield from () # E: Function does not return a value - x = yield from (0, 1, 2) # E: Function does not return a value + x = yield from () # E: Function does not return a value (it only ever returns None) + x = yield from (0, 1, 2) # E: Function does not return a value (it only ever returns None) x = yield from (0, "ERROR") # E: Incompatible types in "yield from" (actual type "object", expected type "int") \ - # E: Function does not return a value + # E: Function does not return a value (it only ever returns None) x = yield from ("ERROR",) # E: Incompatible types in "yield from" (actual type "str", expected type "int") \ - # E: Function does not return a value + # E: Function does not return a value (it only ever returns None) [builtins fixtures/tuple.pyi] -- dict(...) diff --git a/test-data/unit/check-functions.test b/test-data/unit/check-functions.test index 4cc523a595d1..cd098a84d4d3 100644 --- a/test-data/unit/check-functions.test +++ b/test-data/unit/check-functions.test @@ -250,7 +250,7 @@ if int(): if int(): f = o # E: Incompatible types in assignment (expression has type "object", variable has type "Callable[[], None]") if int(): - f = f() # E: Function does not return a value + f = f() # E: Function does not return a value (it only ever returns None) if int(): f = f diff --git a/test-data/unit/check-inference.test b/test-data/unit/check-inference.test index 36b028977591..f9a4d58c74af 100644 --- a/test-data/unit/check-inference.test +++ b/test-data/unit/check-inference.test @@ -33,8 +33,8 @@ class B: pass [case testLvarInitializedToVoid] import typing def f() -> None: - a = g() # E: "g" does not return a value - #b, c = g() # "g" does not return a value TODO + a = g() # E: "g" does not return a value (it only ever returns None) + #b, c = g() # "g" does not return a value (it only ever returns None) TODO def g() -> None: pass [out] @@ -1180,7 +1180,7 @@ for e, f in [[]]: # E: Need type annotation for "e" \ [case testForStatementInferenceWithVoid] def f() -> None: pass -for x in f(): # E: "f" does not return a value +for x in f(): # E: "f" does not return a value (it only ever returns None) pass [builtins fixtures/for.pyi] @@ -2118,7 +2118,7 @@ arr = [] arr.append(arr.append(1)) [builtins fixtures/list.pyi] [out] -main:3: error: "append" of "list" does not return a value +main:3: error: "append" of "list" does not return a value (it only ever returns None) -- Multipass -- --------- diff --git a/test-data/unit/check-optional.test b/test-data/unit/check-optional.test index ae247b0047f1..70f3c4486e14 100644 --- a/test-data/unit/check-optional.test +++ b/test-data/unit/check-optional.test @@ -361,9 +361,9 @@ def f() -> None: def g(x: Optional[int]) -> int: pass -x = f() # E: "f" does not return a value -f() + 1 # E: "f" does not return a value -g(f()) # E: "f" does not return a value +x = f() # E: "f" does not return a value (it only ever returns None) +f() + 1 # E: "f" does not return a value (it only ever returns None) +g(f()) # E: "f" does not return a value (it only ever returns None) [case testEmptyReturn] def f() -> None: diff --git a/test-data/unit/check-tuples.test b/test-data/unit/check-tuples.test index cff261774663..0e7c81edc498 100644 --- a/test-data/unit/check-tuples.test +++ b/test-data/unit/check-tuples.test @@ -169,8 +169,8 @@ class C(B): pass import typing def f() -> None: pass -(None, f()) # E: "f" does not return a value -(f(), None) # E: "f" does not return a value +(None, f()) # E: "f" does not return a value (it only ever returns None) +(f(), None) # E: "f" does not return a value (it only ever returns None) [builtins fixtures/tuple.pyi] diff --git a/test-data/unit/check-varargs.test b/test-data/unit/check-varargs.test index 54546f3973b3..ef2c3c57fad5 100644 --- a/test-data/unit/check-varargs.test +++ b/test-data/unit/check-varargs.test @@ -52,8 +52,8 @@ c: C f(c) # E: Argument 1 to "f" has incompatible type "C"; expected "A" f(a, b, c) # E: Argument 3 to "f" has incompatible type "C"; expected "A" -f(g()) # E: "g" does not return a value -f(a, g()) # E: "g" does not return a value +f(g()) # E: "g" does not return a value (it only ever returns None) +f(a, g()) # E: "g" does not return a value (it only ever returns None) f() f(a) f(b) diff --git a/test-data/unit/pythoneval-asyncio.test b/test-data/unit/pythoneval-asyncio.test index 069374b9635c..4a185557495b 100644 --- a/test-data/unit/pythoneval-asyncio.test +++ b/test-data/unit/pythoneval-asyncio.test @@ -261,7 +261,7 @@ try: finally: loop.close() [out] -_program.py:11: error: Function does not return a value +_program.py:11: error: Function does not return a value (it only ever returns None) [case testErrorReturnIsNotTheSameType] from typing import Any From 5adf934804c512c37a9506aeae426622abf92cb5 Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Sat, 2 Sep 2023 00:53:35 -0700 Subject: [PATCH 019/144] Fix crash with report generation on namespace packages (again) (#16019) Fixes #15979. Fix is similar to that in `iterate_python_lines`. --- mypy/report.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/mypy/report.py b/mypy/report.py index d5f16464c0fb..86fcee0521a6 100644 --- a/mypy/report.py +++ b/mypy/report.py @@ -171,8 +171,12 @@ def on_file( ) -> None: # Count physical lines. This assumes the file's encoding is a # superset of ASCII (or at least uses \n in its line endings). - with open(tree.path, "rb") as f: - physical_lines = len(f.readlines()) + try: + with open(tree.path, "rb") as f: + physical_lines = len(f.readlines()) + except IsADirectoryError: + # can happen with namespace packages + physical_lines = 0 func_counter = FuncCounterVisitor() tree.accept(func_counter) From fb32db7237ec1847960c93cfb17c8f24182d5d77 Mon Sep 17 00:00:00 2001 From: Ilya Priven Date: Sat, 2 Sep 2023 17:09:03 -0400 Subject: [PATCH 020/144] docs: document dataclass_transform behavior (#16017) Document behavior discussed in https://github.com/python/typing/discussions/1456. --------- Co-authored-by: Shantanu <12621235+hauntsaninja@users.noreply.github.com> --- docs/source/additional_features.rst | 45 +++++++++++++++++++++-------- 1 file changed, 33 insertions(+), 12 deletions(-) diff --git a/docs/source/additional_features.rst b/docs/source/additional_features.rst index 5dd136476eaa..ae625c157654 100644 --- a/docs/source/additional_features.rst +++ b/docs/source/additional_features.rst @@ -71,12 +71,12 @@ and :pep:`557`. Caveats/Known Issues ==================== -Some functions in the :py:mod:`dataclasses` module, such as :py:func:`~dataclasses.replace` and :py:func:`~dataclasses.asdict`, +Some functions in the :py:mod:`dataclasses` module, such as :py:func:`~dataclasses.asdict`, have imprecise (too permissive) types. This will be fixed in future releases. Mypy does not yet recognize aliases of :py:func:`dataclasses.dataclass `, and will -probably never recognize dynamically computed decorators. The following examples -do **not** work: +probably never recognize dynamically computed decorators. The following example +does **not** work: .. code-block:: python @@ -94,16 +94,37 @@ do **not** work: """ attribute: int - @dataclass_wrapper - class DynamicallyDecorated: - """ - Mypy doesn't recognize this as a dataclass because it is decorated by a - function returning `dataclass` rather than by `dataclass` itself. - """ - attribute: int - AliasDecorated(attribute=1) # error: Unexpected keyword argument - DynamicallyDecorated(attribute=1) # error: Unexpected keyword argument + + +To have Mypy recognize a wrapper of :py:func:`dataclasses.dataclass ` +as a dataclass decorator, consider using the :py:func:`~typing.dataclass_transform` decorator: + +.. code-block:: python + + from dataclasses import dataclass, Field + from typing import TypeVar, dataclass_transform + + T = TypeVar('T') + + @dataclass_transform(field_specifiers=(Field,)) + def my_dataclass(cls: type[T]) -> type[T]: + ... + return dataclass(cls) + + +Data Class Transforms +********************* + +Mypy supports the :py:func:`~typing.dataclass_transform` decorator as described in +`PEP 681 `_. + +.. note:: + + Pragmatically, mypy will assume such classes have the internal attribute :code:`__dataclass_fields__` + (even though they might lack it in runtime) and will assume functions such as :py:func:`dataclasses.is_dataclass` + and :py:func:`dataclasses.fields` treat them as if they were dataclasses + (even though they may fail at runtime). .. _attrs_package: From 6a6d2e8a2d919af7557063de8f1faa580969b011 Mon Sep 17 00:00:00 2001 From: Ilya Priven Date: Sat, 2 Sep 2023 17:09:34 -0400 Subject: [PATCH 021/144] meta tests: refactor run_pytest (#15481) Factor `run_pytest` out of mypy/test/meta/test_*.py. --- mypy/test/meta/_pytest.py | 72 ++++++++++++++++++++++++++++++ mypy/test/meta/test_parse_data.py | 65 +++++++-------------------- mypy/test/meta/test_update_data.py | 53 +++++++--------------- 3 files changed, 104 insertions(+), 86 deletions(-) create mode 100644 mypy/test/meta/_pytest.py diff --git a/mypy/test/meta/_pytest.py b/mypy/test/meta/_pytest.py new file mode 100644 index 000000000000..b8648f033143 --- /dev/null +++ b/mypy/test/meta/_pytest.py @@ -0,0 +1,72 @@ +import shlex +import subprocess +import sys +import textwrap +import uuid +from dataclasses import dataclass +from pathlib import Path +from typing import Iterable + +from mypy.test.config import test_data_prefix + + +@dataclass +class PytestResult: + input: str + input_updated: str # any updates made by --update-data + stdout: str + stderr: str + + +def dedent_docstring(s: str) -> str: + return textwrap.dedent(s).lstrip() + + +def run_pytest_data_suite( + data_suite: str, + *, + data_file_prefix: str = "check", + pytest_node_prefix: str = "mypy/test/testcheck.py::TypeCheckSuite", + extra_args: Iterable[str], + max_attempts: int, +) -> PytestResult: + """ + Runs a suite of data test cases through pytest until either tests pass + or until a maximum number of attempts (needed for incremental tests). + + :param data_suite: the actual "suite" i.e. the contents of a .test file + """ + p_test_data = Path(test_data_prefix) + p_root = p_test_data.parent.parent + p = p_test_data / f"{data_file_prefix}-meta-{uuid.uuid4()}.test" + assert not p.exists() + data_suite = dedent_docstring(data_suite) + try: + p.write_text(data_suite) + + test_nodeid = f"{pytest_node_prefix}::{p.name}" + extra_args = [sys.executable, "-m", "pytest", "-n", "0", "-s", *extra_args, test_nodeid] + cmd = shlex.join(extra_args) + for i in range(max_attempts - 1, -1, -1): + print(f">> {cmd}") + proc = subprocess.run(extra_args, capture_output=True, check=False, cwd=p_root) + if proc.returncode == 0: + break + prefix = "NESTED PYTEST STDOUT" + for line in proc.stdout.decode().splitlines(): + print(f"{prefix}: {line}") + prefix = " " * len(prefix) + prefix = "NESTED PYTEST STDERR" + for line in proc.stderr.decode().splitlines(): + print(f"{prefix}: {line}") + prefix = " " * len(prefix) + print(f"Exit code {proc.returncode} ({i} attempts remaining)") + + return PytestResult( + input=data_suite, + input_updated=p.read_text(), + stdout=proc.stdout.decode(), + stderr=proc.stderr.decode(), + ) + finally: + p.unlink() diff --git a/mypy/test/meta/test_parse_data.py b/mypy/test/meta/test_parse_data.py index 6593dbc45704..797fdd7b2c8c 100644 --- a/mypy/test/meta/test_parse_data.py +++ b/mypy/test/meta/test_parse_data.py @@ -2,37 +2,18 @@ A "meta test" which tests the parsing of .test files. This is not meant to become exhaustive but to ensure we maintain a basic level of ergonomics for mypy contributors. """ -import subprocess -import sys -import textwrap -import uuid -from pathlib import Path - -from mypy.test.config import test_data_prefix from mypy.test.helpers import Suite +from mypy.test.meta._pytest import PytestResult, run_pytest_data_suite -class ParseTestDataSuite(Suite): - def _dedent(self, s: str) -> str: - return textwrap.dedent(s).lstrip() +def _run_pytest(data_suite: str) -> PytestResult: + return run_pytest_data_suite(data_suite, extra_args=[], max_attempts=1) - def _run_pytest(self, data_suite: str) -> str: - p_test_data = Path(test_data_prefix) - p_root = p_test_data.parent.parent - p = p_test_data / f"check-meta-{uuid.uuid4()}.test" - assert not p.exists() - try: - p.write_text(data_suite) - test_nodeid = f"mypy/test/testcheck.py::TypeCheckSuite::{p.name}" - args = [sys.executable, "-m", "pytest", "-n", "0", "-s", test_nodeid] - proc = subprocess.run(args, cwd=p_root, capture_output=True, check=False) - return proc.stdout.decode() - finally: - p.unlink() +class ParseTestDataSuite(Suite): def test_parse_invalid_case(self) -> None: - # Arrange - data = self._dedent( + # Act + result = _run_pytest( """ [case abc] s: str @@ -41,15 +22,12 @@ def test_parse_invalid_case(self) -> None: """ ) - # Act - actual = self._run_pytest(data) - # Assert - assert "Invalid testcase id 'foo-XFAIL'" in actual + assert "Invalid testcase id 'foo-XFAIL'" in result.stdout def test_parse_invalid_section(self) -> None: - # Arrange - data = self._dedent( + # Act + result = _run_pytest( """ [case abc] s: str @@ -58,19 +36,16 @@ def test_parse_invalid_section(self) -> None: """ ) - # Act - actual = self._run_pytest(data) - # Assert - expected_lineno = data.splitlines().index("[unknownsection]") + 1 + expected_lineno = result.input.splitlines().index("[unknownsection]") + 1 expected = ( f".test:{expected_lineno}: Invalid section header [unknownsection] in case 'abc'" ) - assert expected in actual + assert expected in result.stdout def test_bad_ge_version_check(self) -> None: - # Arrange - data = self._dedent( + # Act + actual = _run_pytest( """ [case abc] s: str @@ -79,15 +54,12 @@ def test_bad_ge_version_check(self) -> None: """ ) - # Act - actual = self._run_pytest(data) - # Assert - assert "version>=3.8 always true since minimum runtime version is (3, 8)" in actual + assert "version>=3.8 always true since minimum runtime version is (3, 8)" in actual.stdout def test_bad_eq_version_check(self) -> None: - # Arrange - data = self._dedent( + # Act + actual = _run_pytest( """ [case abc] s: str @@ -96,8 +68,5 @@ def test_bad_eq_version_check(self) -> None: """ ) - # Act - actual = self._run_pytest(data) - # Assert - assert "version==3.7 always false since minimum runtime version is (3, 8)" in actual + assert "version==3.7 always false since minimum runtime version is (3, 8)" in actual.stdout diff --git a/mypy/test/meta/test_update_data.py b/mypy/test/meta/test_update_data.py index 4e4bdd193dbf..40b70157a0e3 100644 --- a/mypy/test/meta/test_update_data.py +++ b/mypy/test/meta/test_update_data.py @@ -3,47 +3,23 @@ Updating the expected output, especially when it's in the form of inline (comment) assertions, can be brittle, which is why we're "meta-testing" here. """ -import shlex -import subprocess -import sys -import textwrap -import uuid -from pathlib import Path - -from mypy.test.config import test_data_prefix from mypy.test.helpers import Suite +from mypy.test.meta._pytest import PytestResult, dedent_docstring, run_pytest_data_suite -class UpdateDataSuite(Suite): - def _run_pytest_update_data(self, data_suite: str, *, max_attempts: int) -> str: - """ - Runs a suite of data test cases through 'pytest --update-data' until either tests pass - or until a maximum number of attempts (needed for incremental tests). - """ - p_test_data = Path(test_data_prefix) - p_root = p_test_data.parent.parent - p = p_test_data / f"check-meta-{uuid.uuid4()}.test" - assert not p.exists() - try: - p.write_text(textwrap.dedent(data_suite).lstrip()) - - test_nodeid = f"mypy/test/testcheck.py::TypeCheckSuite::{p.name}" - args = [sys.executable, "-m", "pytest", "-n", "0", "-s", "--update-data", test_nodeid] - cmd = shlex.join(args) - for i in range(max_attempts - 1, -1, -1): - res = subprocess.run(args, cwd=p_root) - if res.returncode == 0: - break - print(f"`{cmd}` returned {res.returncode}: {i} attempts remaining") - - return p.read_text() - finally: - p.unlink() +def _run_pytest_update_data(data_suite: str) -> PytestResult: + """ + Runs a suite of data test cases through 'pytest --update-data' until either tests pass + or until a maximum number of attempts (needed for incremental tests). + """ + return run_pytest_data_suite(data_suite, extra_args=["--update-data"], max_attempts=3) + +class UpdateDataSuite(Suite): def test_update_data(self) -> None: # Note: We test multiple testcases rather than 'test case per test case' # so we could also exercise rewriting multiple testcases at once. - actual = self._run_pytest_update_data( + result = _run_pytest_update_data( """ [case testCorrect] s: str = 42 # E: Incompatible types in assignment (expression has type "int", variable has type "str") @@ -97,12 +73,12 @@ def test_update_data(self) -> None: [file b.py] s2: str = 43 # E: baz [builtins fixtures/list.pyi] - """, - max_attempts=3, + """ ) # Assert - expected = """ + expected = dedent_docstring( + """ [case testCorrect] s: str = 42 # E: Incompatible types in assignment (expression has type "int", variable has type "str") @@ -154,4 +130,5 @@ def test_update_data(self) -> None: s2: str = 43 # E: Incompatible types in assignment (expression has type "int", variable has type "str") [builtins fixtures/list.pyi] """ - assert actual == textwrap.dedent(expected).lstrip() + ) + assert result.input_updated == expected From cc8a4b50f7e65004a97c9ba51c69f7c9340370d9 Mon Sep 17 00:00:00 2001 From: Ilya Priven Date: Sat, 2 Sep 2023 17:29:42 -0400 Subject: [PATCH 022/144] Document we're not tracking relationships between symbols (#16018) Fixes #15653. I did not use erictraut's "quantum entanglement" metaphor, though I find it to be quite illustrative :) --- docs/source/type_narrowing.rst | 42 ++++++++++++++++++++++++++++++---- 1 file changed, 37 insertions(+), 5 deletions(-) diff --git a/docs/source/type_narrowing.rst b/docs/source/type_narrowing.rst index 4bc0fda70138..4c5c2851edd0 100644 --- a/docs/source/type_narrowing.rst +++ b/docs/source/type_narrowing.rst @@ -3,7 +3,7 @@ Type narrowing ============== -This section is dedicated to several type narrowing +This section is dedicated to several type narrowing techniques which are supported by mypy. Type narrowing is when you convince a type checker that a broader type is actually more specific, for instance, that an object of type ``Shape`` is actually of the narrower type ``Square``. @@ -14,10 +14,11 @@ Type narrowing expressions The simplest way to narrow a type is to use one of the supported expressions: -- :py:func:`isinstance` like in ``isinstance(obj, float)`` will narrow ``obj`` to have ``float`` type -- :py:func:`issubclass` like in ``issubclass(cls, MyClass)`` will narrow ``cls`` to be ``Type[MyClass]`` -- :py:class:`type` like in ``type(obj) is int`` will narrow ``obj`` to have ``int`` type -- :py:func:`callable` like in ``callable(obj)`` will narrow object to callable type +- :py:func:`isinstance` like in :code:`isinstance(obj, float)` will narrow ``obj`` to have ``float`` type +- :py:func:`issubclass` like in :code:`issubclass(cls, MyClass)` will narrow ``cls`` to be ``Type[MyClass]`` +- :py:class:`type` like in :code:`type(obj) is int` will narrow ``obj`` to have ``int`` type +- :py:func:`callable` like in :code:`callable(obj)` will narrow object to callable type +- :code:`obj is not None` will narrow object to its :ref:`non-optional form ` Type narrowing is contextual. For example, based on the condition, mypy will narrow an expression only within an ``if`` branch: @@ -83,6 +84,7 @@ We can also use ``assert`` to narrow types in the same context: reveal_type(x) # Revealed type is "builtins.int" print(x + '!') # Typechecks with `mypy`, but fails in runtime. + issubclass ~~~~~~~~~~ @@ -359,3 +361,33 @@ What happens here? .. note:: The same will work with ``isinstance(x := a, float)`` as well. + +Limitations +----------- + +Mypy's analysis is limited to individual symbols and it will not track +relationships between symbols. For example, in the following code +it's easy to deduce that if :code:`a` is None then :code:`b` must not be, +therefore :code:`a or b` will always be a string, but Mypy will not be able to tell that: + +.. code-block:: python + + def f(a: str | None, b: str | None) -> str: + if a is not None or b is not None: + return a or b # Incompatible return value type (got "str | None", expected "str") + return 'spam' + +Tracking these sort of cross-variable conditions in a type checker would add significant complexity +and performance overhead. + +You can use an ``assert`` to convince the type checker, override it with a :ref:`cast ` +or rewrite the function to be slightly more verbose: + +.. code-block:: python + + def f(a: str | None, b: str | None) -> str: + if a is not None: + return a + elif b is not None: + return b + return 'spam' From f83d6eb9070137f0b060bb5a8b81858bf8910424 Mon Sep 17 00:00:00 2001 From: Ilya Priven Date: Sun, 3 Sep 2023 03:59:28 -0400 Subject: [PATCH 023/144] ruff: add pyupgrade (#16023) For example, this [review comment](https://github.com/python/mypy/pull/15481#discussion_r1313755961) could've been spared with [UP036](https://beta.ruff.rs/docs/rules/outdated-version-block/). --- mypy/checkmember.py | 4 ++-- mypy/config_parser.py | 22 +++++++++++++--------- mypy/main.py | 3 +-- mypy/messages.py | 15 +++++++-------- mypy/solve.py | 4 ++-- mypyc/ir/class_ir.py | 2 +- mypyc/ir/ops.py | 4 ++-- mypyc/ir/rtypes.py | 4 ++-- pyproject.toml | 4 +++- 9 files changed, 33 insertions(+), 29 deletions(-) diff --git a/mypy/checkmember.py b/mypy/checkmember.py index f7d002f17eb9..60430839ff62 100644 --- a/mypy/checkmember.py +++ b/mypy/checkmember.py @@ -2,7 +2,7 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Callable, Optional, Sequence, cast +from typing import TYPE_CHECKING, Callable, Sequence, cast from mypy import meet, message_registry, subtypes from mypy.erasetype import erase_typevars @@ -777,7 +777,7 @@ def analyze_var( result: Type = t typ = get_proper_type(typ) - call_type: Optional[ProperType] = None + call_type: ProperType | None = None if var.is_initialized_in_class and (not is_instance_var(var) or mx.is_operator): if isinstance(typ, FunctionLike) and not typ.is_type_obj(): call_type = typ diff --git a/mypy/config_parser.py b/mypy/config_parser.py index 7748c3b25966..4dbd6477c81e 100644 --- a/mypy/config_parser.py +++ b/mypy/config_parser.py @@ -292,14 +292,18 @@ def parse_config_file( ) if report_dirs: print( - "%sPer-module sections should not specify reports (%s)" - % (prefix, ", ".join(s + "_report" for s in sorted(report_dirs))), + prefix, + "Per-module sections should not specify reports ({})".format( + ", ".join(s + "_report" for s in sorted(report_dirs)) + ), file=stderr, ) if set(updates) - PER_MODULE_OPTIONS: print( - "%sPer-module sections should only specify per-module flags (%s)" - % (prefix, ", ".join(sorted(set(updates) - PER_MODULE_OPTIONS))), + prefix, + "Per-module sections should only specify per-module flags ({})".format( + ", ".join(sorted(set(updates) - PER_MODULE_OPTIONS)) + ), file=stderr, ) updates = {k: v for k, v in updates.items() if k in PER_MODULE_OPTIONS} @@ -315,8 +319,9 @@ def parse_config_file( "*" in x and x != "*" for x in glob.split(".") ): print( - "%sPatterns must be fully-qualified module names, optionally " - "with '*' in some components (e.g spam.*.eggs.*)" % prefix, + prefix, + "Patterns must be fully-qualified module names, optionally " + "with '*' in some components (e.g spam.*.eggs.*)", file=stderr, ) else: @@ -329,7 +334,7 @@ def get_prefix(file_read: str, name: str) -> str: else: module_name_str = name - return f"{file_read}: [{module_name_str}]: " + return f"{file_read}: [{module_name_str}]:" def is_toml(filename: str) -> bool: @@ -411,8 +416,7 @@ def destructure_overrides(toml_data: dict[str, Any]) -> dict[str, Any]: raise ConfigTOMLValueError( "toml config file contains " "[[tool.mypy.overrides]] sections with conflicting " - "values. Module '%s' has two different values for '%s'" - % (module, new_key) + f"values. Module '{module}' has two different values for '{new_key}'" ) result[old_config_name][new_key] = new_value diff --git a/mypy/main.py b/mypy/main.py index 30f6cfe97455..a4357dca7890 100644 --- a/mypy/main.py +++ b/mypy/main.py @@ -187,8 +187,7 @@ def flush_errors(new_messages: list[str], serious: bool) -> None: and not options.non_interactive ): print( - "Warning: unused section(s) in %s: %s" - % ( + "Warning: unused section(s) in {}: {}".format( options.config_file, get_config_module_names( options.config_file, diff --git a/mypy/messages.py b/mypy/messages.py index 4b71bd876dcc..bba9c3c3cdea 100644 --- a/mypy/messages.py +++ b/mypy/messages.py @@ -1454,20 +1454,19 @@ def cannot_determine_type_in_base(self, name: str, base: str, context: Context) self.fail(f'Cannot determine type of "{name}" in base class "{base}"', context) def no_formal_self(self, name: str, item: CallableType, context: Context) -> None: + type = format_type(item, self.options) self.fail( - 'Attribute function "%s" with type %s does not accept self argument' - % (name, format_type(item, self.options)), - context, + f'Attribute function "{name}" with type {type} does not accept self argument', context ) def incompatible_self_argument( self, name: str, arg: Type, sig: CallableType, is_classmethod: bool, context: Context ) -> None: kind = "class attribute function" if is_classmethod else "attribute function" + arg_type = format_type(arg, self.options) + sig_type = format_type(sig, self.options) self.fail( - 'Invalid self argument %s to %s "%s" with type %s' - % (format_type(arg, self.options), kind, name, format_type(sig, self.options)), - context, + f'Invalid self argument {arg_type} to {kind} "{name}" with type {sig_type}', context ) def incompatible_conditional_function_def( @@ -1487,8 +1486,8 @@ def cannot_instantiate_abstract_class( ) -> None: attrs = format_string_list([f'"{a}"' for a in abstract_attributes]) self.fail( - 'Cannot instantiate abstract class "%s" with abstract ' - "attribute%s %s" % (class_name, plural_s(abstract_attributes), attrs), + f'Cannot instantiate abstract class "{class_name}" with abstract ' + f"attribute{plural_s(abstract_attributes)} {attrs}", context, code=codes.ABSTRACT, ) diff --git a/mypy/solve.py b/mypy/solve.py index 95377ea9f93e..17e1ca047818 100644 --- a/mypy/solve.py +++ b/mypy/solve.py @@ -3,7 +3,7 @@ from __future__ import annotations from collections import defaultdict -from typing import Iterable, Sequence, Tuple +from typing import Iterable, Sequence from typing_extensions import TypeAlias as _TypeAlias from mypy.constraints import SUBTYPE_OF, SUPERTYPE_OF, Constraint, infer_constraints @@ -333,7 +333,7 @@ def is_trivial_bound(tp: ProperType) -> bool: return isinstance(tp, Instance) and tp.type.fullname == "builtins.object" -def find_linear(c: Constraint) -> Tuple[bool, TypeVarId | None]: +def find_linear(c: Constraint) -> tuple[bool, TypeVarId | None]: """Find out if this constraint represent a linear relationship, return target id if yes.""" if isinstance(c.origin_type_var, TypeVarType): if isinstance(c.target, TypeVarType): diff --git a/mypyc/ir/class_ir.py b/mypyc/ir/class_ir.py index 682e30629118..61f0fc36e1b3 100644 --- a/mypyc/ir/class_ir.py +++ b/mypyc/ir/class_ir.py @@ -70,7 +70,7 @@ class VTableMethod(NamedTuple): - cls: "ClassIR" + cls: "ClassIR" # noqa: UP037 name: str method: FuncIR shadow_method: FuncIR | None diff --git a/mypyc/ir/ops.py b/mypyc/ir/ops.py index d80c479211b7..2d64cc79d822 100644 --- a/mypyc/ir/ops.py +++ b/mypyc/ir/ops.py @@ -1575,5 +1575,5 @@ def visit_keep_alive(self, op: KeepAlive) -> T: # (Serialization and deserialization *will* be used for incremental # compilation but so far it is not hooked up to anything.) class DeserMaps(NamedTuple): - classes: dict[str, "ClassIR"] - functions: dict[str, "FuncIR"] + classes: dict[str, ClassIR] + functions: dict[str, FuncIR] diff --git a/mypyc/ir/rtypes.py b/mypyc/ir/rtypes.py index fa46feb0b59a..fecfaee5ef77 100644 --- a/mypyc/ir/rtypes.py +++ b/mypyc/ir/rtypes.py @@ -23,8 +23,8 @@ from __future__ import annotations from abc import abstractmethod -from typing import TYPE_CHECKING, ClassVar, Generic, TypeVar -from typing_extensions import Final, TypeGuard +from typing import TYPE_CHECKING, ClassVar, Final, Generic, TypeVar +from typing_extensions import TypeGuard from mypyc.common import IS_32_BIT_PLATFORM, PLATFORM_SIZE, JsonDict, short_name from mypyc.namegen import NameGenerator diff --git a/pyproject.toml b/pyproject.toml index 67201acb9b94..18ba23671d9c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -36,7 +36,8 @@ select = [ "B", # flake8-bugbear "I", # isort "RUF100", # Unused noqa comments - "PGH004" # blanket noqa comments + "PGH004", # blanket noqa comments + "UP", # pyupgrade ] ignore = [ @@ -49,6 +50,7 @@ ignore = [ "E501", # conflicts with black "E731", # Do not assign a `lambda` expression, use a `def` "E741", # Ambiguous variable name + "UP032", # 'f-string always preferable to format' is controversial ] unfixable = [ From 87365eb3b2ef5f89c19de2708a826f3c80e914a6 Mon Sep 17 00:00:00 2001 From: Nikita Sobolev Date: Sun, 3 Sep 2023 15:24:24 +0300 Subject: [PATCH 024/144] Exclude `assert False` from coverage (#16026) --- pyproject.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/pyproject.toml b/pyproject.toml index 18ba23671d9c..1d6562756e22 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -121,5 +121,6 @@ exclude_lines = [ '^\s*raise NotImplementedError\b', '^\s*return NotImplemented\b', '^\s*raise$', + '^assert False\b', '''^if __name__ == ['"]__main__['"]:$''', ] From 92e054b7dad3641fe74326ef60e773b974ca614f Mon Sep 17 00:00:00 2001 From: Nikita Sobolev Date: Sun, 3 Sep 2023 20:01:51 +0300 Subject: [PATCH 025/144] Do not set `is_final` twice for `FuncBase` subclasses (#16030) --- mypy/nodes.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/mypy/nodes.py b/mypy/nodes.py index 9b4ba5e76667..db42dd6b3949 100644 --- a/mypy/nodes.py +++ b/mypy/nodes.py @@ -573,7 +573,6 @@ def __init__(self, items: list[OverloadPart]) -> None: if items: # TODO: figure out how to reliably set end position (we don't know the impl here). self.set_line(items[0].line, items[0].column) - self.is_final = False @property def name(self) -> str: @@ -772,7 +771,6 @@ def __init__( # Is this an abstract method with trivial body? # Such methods can't be called via super(). self.is_trivial_body = False - self.is_final = False # Original conditional definition self.original_def: None | FuncDef | Var | Decorator = None # Used for error reporting (to keep backward compatibility with pre-3.8) From 488ad4f31dca387f87093e8d0b0fef2e021daa0b Mon Sep 17 00:00:00 2001 From: Nikita Sobolev Date: Mon, 4 Sep 2023 12:53:26 +0300 Subject: [PATCH 026/144] Bundle `misc/proper_plugin.py` as a part of `mypy` (#16036) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit I modified ```diff diff --git mypy/binder.py mypy/binder.py index 8a68f24f6..194883f86 100644 --- mypy/binder.py +++ mypy/binder.py @@ -345,7 +345,7 @@ class ConditionalTypeBinder: self._cleanse_key(dep) def most_recent_enclosing_type(self, expr: BindableExpression, type: Type) -> Type | None: - type = get_proper_type(type) + # type = get_proper_type(type) if isinstance(type, AnyType): return get_declaration(expr) key = literal_hash(expr) ``` to see if it still works. It is: ```python » python runtests.py self run self: ['/Users/sobolev/Desktop/mypy/.venv/bin/python', '-m', 'mypy', '--config-file', 'mypy_self_check.ini', '-p', 'mypy', '-p', 'mypyc'] mypy/binder.py:349: error: Never apply isinstance() to unexpanded types; use mypy.types.get_proper_type() first [misc] if isinstance(type, AnyType): ^~~~~~~~~~~~~~~~~~~~~~~~~ mypy/binder.py:349: note: If you pass on the original type after the check, always use its unexpanded version Found 1 error in 1 file (checked 288 source files) FAILED: self ``` I will add this plugin to my CI checks in like ~5 plugins I maintain :) - https://github.com/typeddjango/django-stubs - https://github.com/typeddjango/djangorestframework-stubs - https://github.com/dry-python/returns - https://github.com/dry-python/classes - https://github.com/wemake-services/mypy-extras Closes https://github.com/python/mypy/issues/16035 --- MANIFEST.in | 1 - docs/source/extending_mypy.rst | 9 +++++++++ {misc => mypy/plugins}/proper_plugin.py | 9 +++++++++ mypy_self_check.ini | 2 +- 4 files changed, 19 insertions(+), 2 deletions(-) rename {misc => mypy/plugins}/proper_plugin.py (95%) diff --git a/MANIFEST.in b/MANIFEST.in index b77b762b4852..a1c15446de3f 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -34,7 +34,6 @@ include build-requirements.txt include test-requirements.txt include mypy_self_check.ini prune misc -include misc/proper_plugin.py graft test-data include conftest.py include runtests.py diff --git a/docs/source/extending_mypy.rst b/docs/source/extending_mypy.rst index 506f548db687..bbbec2ad3880 100644 --- a/docs/source/extending_mypy.rst +++ b/docs/source/extending_mypy.rst @@ -237,3 +237,12 @@ mypy's cache for that module so that it can be rechecked. This hook should be used to report to mypy any relevant configuration data, so that mypy knows to recheck the module if the configuration changes. The hooks should return data encodable as JSON. + +Useful tools +************ + +Mypy ships ``mypy.plugins.proper_plugin`` plugin which can be useful +for plugin authors, since it finds missing ``get_proper_type()`` calls, +which is a pretty common mistake. + +It is recommended to enable it is a part of your plugin's CI. diff --git a/misc/proper_plugin.py b/mypy/plugins/proper_plugin.py similarity index 95% rename from misc/proper_plugin.py rename to mypy/plugins/proper_plugin.py index a6e6dc03b625..ab93f0d126db 100644 --- a/misc/proper_plugin.py +++ b/mypy/plugins/proper_plugin.py @@ -1,3 +1,12 @@ +""" +This plugin is helpful for mypy development itself. +By default, it is not enabled for mypy users. + +It also can be used by plugin developers as a part of their CI checks. + +It finds missing ``get_proper_type()`` call, which can lead to multiple errors. +""" + from __future__ import annotations from typing import Callable diff --git a/mypy_self_check.ini b/mypy_self_check.ini index fcdbe641d6d6..6e1ad8187b7a 100644 --- a/mypy_self_check.ini +++ b/mypy_self_check.ini @@ -5,7 +5,7 @@ disallow_any_unimported = True show_traceback = True pretty = True always_false = MYPYC -plugins = misc/proper_plugin.py +plugins = mypy.plugins.proper_plugin python_version = 3.8 exclude = mypy/typeshed/|mypyc/test-data/|mypyc/lib-rt/ new_type_inference = True From 8738886861682e0d168ea321c2cc6ee5b566cb8b Mon Sep 17 00:00:00 2001 From: Nikita Sobolev Date: Mon, 4 Sep 2023 13:23:42 +0300 Subject: [PATCH 027/144] Add type annotations to `test-data/unit/plugins` (#16028) Closes https://github.com/python/mypy/issues/16027 --- mypy/plugin.py | 2 +- test-data/unit/plugins/add_classmethod.py | 8 ++-- test-data/unit/plugins/arg_kinds.py | 19 ++++---- test-data/unit/plugins/arg_names.py | 48 ++++++++++++------- test-data/unit/plugins/attrhook.py | 14 +++--- test-data/unit/plugins/attrhook2.py | 16 ++++--- test-data/unit/plugins/badreturn.py | 2 +- test-data/unit/plugins/badreturn2.py | 6 ++- test-data/unit/plugins/callable_instance.py | 19 +++++--- test-data/unit/plugins/class_attr_hook.py | 15 +++--- test-data/unit/plugins/class_callable.py | 41 ++++++++++------ .../unit/plugins/common_api_incremental.py | 36 +++++++------- test-data/unit/plugins/config_data.py | 9 ++-- test-data/unit/plugins/custom_errorcode.py | 14 ++++-- test-data/unit/plugins/customentry.py | 20 +++++--- test-data/unit/plugins/customize_mro.py | 15 ++++-- test-data/unit/plugins/decimal_to_int.py | 19 +++++--- test-data/unit/plugins/depshook.py | 12 ++--- test-data/unit/plugins/descriptor.py | 32 ++++++++----- test-data/unit/plugins/dyn_class.py | 46 +++++++++++------- .../unit/plugins/dyn_class_from_method.py | 24 +++++++--- test-data/unit/plugins/fnplugin.py | 20 +++++--- .../unit/plugins/fully_qualified_test_hook.py | 24 +++++++--- test-data/unit/plugins/function_sig_hook.py | 20 ++++---- test-data/unit/plugins/method_in_decorator.py | 22 +++++---- test-data/unit/plugins/method_sig_hook.py | 27 +++++++---- test-data/unit/plugins/named_callable.py | 31 +++++++----- test-data/unit/plugins/plugin2.py | 20 +++++--- test-data/unit/plugins/type_anal_hook.py | 31 ++++++------ test-data/unit/plugins/union_method.py | 32 ++++++++----- tox.ini | 1 + 31 files changed, 407 insertions(+), 238 deletions(-) diff --git a/mypy/plugin.py b/mypy/plugin.py index 4d62c2bd184b..38016191de8f 100644 --- a/mypy/plugin.py +++ b/mypy/plugin.py @@ -247,7 +247,7 @@ def fail( @abstractmethod def named_generic_type(self, name: str, args: list[Type]) -> Instance: - """Construct an instance of a builtin type with given type arguments.""" + """Construct an instance of a generic type with given type arguments.""" raise NotImplementedError @abstractmethod diff --git a/test-data/unit/plugins/add_classmethod.py b/test-data/unit/plugins/add_classmethod.py index 5aacc69a8f01..9bc2c4e079dd 100644 --- a/test-data/unit/plugins/add_classmethod.py +++ b/test-data/unit/plugins/add_classmethod.py @@ -1,4 +1,6 @@ -from typing import Callable, Optional +from __future__ import annotations + +from typing import Callable from mypy.nodes import ARG_POS, Argument, Var from mypy.plugin import ClassDefContext, Plugin @@ -7,7 +9,7 @@ class ClassMethodPlugin(Plugin): - def get_base_class_hook(self, fullname: str) -> Optional[Callable[[ClassDefContext], None]]: + def get_base_class_hook(self, fullname: str) -> Callable[[ClassDefContext], None] | None: if "BaseAddMethod" in fullname: return add_extra_methods_hook return None @@ -24,5 +26,5 @@ def add_extra_methods_hook(ctx: ClassDefContext) -> None: ) -def plugin(version): +def plugin(version: str) -> type[ClassMethodPlugin]: return ClassMethodPlugin diff --git a/test-data/unit/plugins/arg_kinds.py b/test-data/unit/plugins/arg_kinds.py index 5392e64c4f11..388a3c738b62 100644 --- a/test-data/unit/plugins/arg_kinds.py +++ b/test-data/unit/plugins/arg_kinds.py @@ -1,18 +1,19 @@ -from typing import Optional, Callable -from mypy.plugin import Plugin, MethodContext, FunctionContext +from __future__ import annotations + +from typing import Callable + +from mypy.plugin import FunctionContext, MethodContext, Plugin from mypy.types import Type class ArgKindsPlugin(Plugin): - def get_function_hook(self, fullname: str - ) -> Optional[Callable[[FunctionContext], Type]]: - if 'func' in fullname: + def get_function_hook(self, fullname: str) -> Callable[[FunctionContext], Type] | None: + if "func" in fullname: return extract_arg_kinds_from_function return None - def get_method_hook(self, fullname: str - ) -> Optional[Callable[[MethodContext], Type]]: - if 'Class.method' in fullname: + def get_method_hook(self, fullname: str) -> Callable[[MethodContext], Type] | None: + if "Class.method" in fullname: return extract_arg_kinds_from_method return None @@ -27,5 +28,5 @@ def extract_arg_kinds_from_method(ctx: MethodContext) -> Type: return ctx.default_return_type -def plugin(version): +def plugin(version: str) -> type[ArgKindsPlugin]: return ArgKindsPlugin diff --git a/test-data/unit/plugins/arg_names.py b/test-data/unit/plugins/arg_names.py index 6c1cbb9415cc..981c1a2eb12d 100644 --- a/test-data/unit/plugins/arg_names.py +++ b/test-data/unit/plugins/arg_names.py @@ -1,35 +1,51 @@ -from typing import Optional, Callable +from __future__ import annotations -from mypy.plugin import Plugin, MethodContext, FunctionContext +from typing import Callable + +from mypy.nodes import StrExpr +from mypy.plugin import FunctionContext, MethodContext, Plugin from mypy.types import Type class ArgNamesPlugin(Plugin): - def get_function_hook(self, fullname: str - ) -> Optional[Callable[[FunctionContext], Type]]: - if fullname in {'mod.func', 'mod.func_unfilled', 'mod.func_star_expr', - 'mod.ClassInit', 'mod.Outer.NestedClassInit'}: + def get_function_hook(self, fullname: str) -> Callable[[FunctionContext], Type] | None: + if fullname in { + "mod.func", + "mod.func_unfilled", + "mod.func_star_expr", + "mod.ClassInit", + "mod.Outer.NestedClassInit", + }: return extract_classname_and_set_as_return_type_function return None - def get_method_hook(self, fullname: str - ) -> Optional[Callable[[MethodContext], Type]]: - if fullname in {'mod.Class.method', 'mod.Class.myclassmethod', 'mod.Class.mystaticmethod', - 'mod.ClassUnfilled.method', 'mod.ClassStarExpr.method', - 'mod.ClassChild.method', 'mod.ClassChild.myclassmethod'}: + def get_method_hook(self, fullname: str) -> Callable[[MethodContext], Type] | None: + if fullname in { + "mod.Class.method", + "mod.Class.myclassmethod", + "mod.Class.mystaticmethod", + "mod.ClassUnfilled.method", + "mod.ClassStarExpr.method", + "mod.ClassChild.method", + "mod.ClassChild.myclassmethod", + }: return extract_classname_and_set_as_return_type_method return None def extract_classname_and_set_as_return_type_function(ctx: FunctionContext) -> Type: - classname = ctx.args[ctx.callee_arg_names.index('classname')][0].value - return ctx.api.named_generic_type(classname, []) + arg = ctx.args[ctx.callee_arg_names.index("classname")][0] + if not isinstance(arg, StrExpr): + return ctx.default_return_type + return ctx.api.named_generic_type(arg.value, []) def extract_classname_and_set_as_return_type_method(ctx: MethodContext) -> Type: - classname = ctx.args[ctx.callee_arg_names.index('classname')][0].value - return ctx.api.named_generic_type(classname, []) + arg = ctx.args[ctx.callee_arg_names.index("classname")][0] + if not isinstance(arg, StrExpr): + return ctx.default_return_type + return ctx.api.named_generic_type(arg.value, []) -def plugin(version): +def plugin(version: str) -> type[ArgNamesPlugin]: return ArgNamesPlugin diff --git a/test-data/unit/plugins/attrhook.py b/test-data/unit/plugins/attrhook.py index c177072aa47f..9500734daa6c 100644 --- a/test-data/unit/plugins/attrhook.py +++ b/test-data/unit/plugins/attrhook.py @@ -1,12 +1,14 @@ -from typing import Optional, Callable +from __future__ import annotations -from mypy.plugin import Plugin, AttributeContext -from mypy.types import Type, Instance +from typing import Callable + +from mypy.plugin import AttributeContext, Plugin +from mypy.types import Instance, Type class AttrPlugin(Plugin): - def get_attribute_hook(self, fullname: str) -> Optional[Callable[[AttributeContext], Type]]: - if fullname == 'm.Signal.__call__': + def get_attribute_hook(self, fullname: str) -> Callable[[AttributeContext], Type] | None: + if fullname == "m.Signal.__call__": return signal_call_callback return None @@ -17,5 +19,5 @@ def signal_call_callback(ctx: AttributeContext) -> Type: return ctx.default_attr_type -def plugin(version): +def plugin(version: str) -> type[AttrPlugin]: return AttrPlugin diff --git a/test-data/unit/plugins/attrhook2.py b/test-data/unit/plugins/attrhook2.py index cc14341a6f97..2d41a0fdf52f 100644 --- a/test-data/unit/plugins/attrhook2.py +++ b/test-data/unit/plugins/attrhook2.py @@ -1,14 +1,16 @@ -from typing import Optional, Callable +from __future__ import annotations -from mypy.plugin import Plugin, AttributeContext -from mypy.types import Type, AnyType, TypeOfAny +from typing import Callable + +from mypy.plugin import AttributeContext, Plugin +from mypy.types import AnyType, Type, TypeOfAny class AttrPlugin(Plugin): - def get_attribute_hook(self, fullname: str) -> Optional[Callable[[AttributeContext], Type]]: - if fullname == 'm.Magic.magic_field': + def get_attribute_hook(self, fullname: str) -> Callable[[AttributeContext], Type] | None: + if fullname == "m.Magic.magic_field": return magic_field_callback - if fullname == 'm.Magic.nonexistent_field': + if fullname == "m.Magic.nonexistent_field": return nonexistent_field_callback return None @@ -22,5 +24,5 @@ def nonexistent_field_callback(ctx: AttributeContext) -> Type: return AnyType(TypeOfAny.from_error) -def plugin(version): +def plugin(version: str) -> type[AttrPlugin]: return AttrPlugin diff --git a/test-data/unit/plugins/badreturn.py b/test-data/unit/plugins/badreturn.py index fd7430606dd6..9dce3b3e99c2 100644 --- a/test-data/unit/plugins/badreturn.py +++ b/test-data/unit/plugins/badreturn.py @@ -1,2 +1,2 @@ -def plugin(version): +def plugin(version: str) -> None: pass diff --git a/test-data/unit/plugins/badreturn2.py b/test-data/unit/plugins/badreturn2.py index c7e0447841c1..1ae551ecbf20 100644 --- a/test-data/unit/plugins/badreturn2.py +++ b/test-data/unit/plugins/badreturn2.py @@ -1,5 +1,9 @@ +from __future__ import annotations + + class MyPlugin: pass -def plugin(version): + +def plugin(version: str) -> type[MyPlugin]: return MyPlugin diff --git a/test-data/unit/plugins/callable_instance.py b/test-data/unit/plugins/callable_instance.py index 40e7df418539..a9f562effb34 100644 --- a/test-data/unit/plugins/callable_instance.py +++ b/test-data/unit/plugins/callable_instance.py @@ -1,23 +1,30 @@ +from __future__ import annotations + +from typing import Callable + from mypy.plugin import MethodContext, Plugin from mypy.types import Instance, Type + class CallableInstancePlugin(Plugin): - def get_function_hook(self, fullname): - assert not fullname.endswith(' of Foo') + def get_function_hook(self, fullname: str) -> None: + assert not fullname.endswith(" of Foo") - def get_method_hook(self, fullname): + def get_method_hook(self, fullname: str) -> Callable[[MethodContext], Type] | None: # Ensure that all names are fully qualified - assert not fullname.endswith(' of Foo') + assert not fullname.endswith(" of Foo") - if fullname == '__main__.Class.__call__': + if fullname == "__main__.Class.__call__": return my_hook return None + def my_hook(ctx: MethodContext) -> Type: if isinstance(ctx.type, Instance) and len(ctx.type.args) == 1: return ctx.type.args[0] return ctx.default_return_type -def plugin(version): + +def plugin(version: str) -> type[CallableInstancePlugin]: return CallableInstancePlugin diff --git a/test-data/unit/plugins/class_attr_hook.py b/test-data/unit/plugins/class_attr_hook.py index 348e5df0ee03..5d6a87df48bb 100644 --- a/test-data/unit/plugins/class_attr_hook.py +++ b/test-data/unit/plugins/class_attr_hook.py @@ -1,20 +1,23 @@ -from typing import Callable, Optional +from __future__ import annotations + +from typing import Callable from mypy.plugin import AttributeContext, Plugin from mypy.types import Type as MypyType class ClassAttrPlugin(Plugin): - def get_class_attribute_hook(self, fullname: str - ) -> Optional[Callable[[AttributeContext], MypyType]]: - if fullname == '__main__.Cls.attr': + def get_class_attribute_hook( + self, fullname: str + ) -> Callable[[AttributeContext], MypyType] | None: + if fullname == "__main__.Cls.attr": return my_hook return None def my_hook(ctx: AttributeContext) -> MypyType: - return ctx.api.named_generic_type('builtins.int', []) + return ctx.api.named_generic_type("builtins.int", []) -def plugin(_version: str): +def plugin(_version: str) -> type[ClassAttrPlugin]: return ClassAttrPlugin diff --git a/test-data/unit/plugins/class_callable.py b/test-data/unit/plugins/class_callable.py index 07f75ec80ac1..9fab30e60458 100644 --- a/test-data/unit/plugins/class_callable.py +++ b/test-data/unit/plugins/class_callable.py @@ -1,32 +1,43 @@ -from mypy.plugin import Plugin +from __future__ import annotations + +from typing import Callable + from mypy.nodes import NameExpr -from mypy.types import UnionType, NoneType, Instance +from mypy.plugin import FunctionContext, Plugin +from mypy.types import Instance, NoneType, Type, UnionType, get_proper_type + class AttrPlugin(Plugin): - def get_function_hook(self, fullname): - if fullname.startswith('mod.Attr'): + def get_function_hook(self, fullname: str) -> Callable[[FunctionContext], Type] | None: + if fullname.startswith("mod.Attr"): return attr_hook return None -def attr_hook(ctx): - assert isinstance(ctx.default_return_type, Instance) - if ctx.default_return_type.type.fullname == 'mod.Attr': - attr_base = ctx.default_return_type + +def attr_hook(ctx: FunctionContext) -> Type: + default = get_proper_type(ctx.default_return_type) + assert isinstance(default, Instance) + if default.type.fullname == "mod.Attr": + attr_base = default else: attr_base = None - for base in ctx.default_return_type.type.bases: - if base.type.fullname == 'mod.Attr': + for base in default.type.bases: + if base.type.fullname == "mod.Attr": attr_base = base break assert attr_base is not None last_arg_exprs = ctx.args[-1] - if any(isinstance(expr, NameExpr) and expr.name == 'True' for expr in last_arg_exprs): + if any(isinstance(expr, NameExpr) and expr.name == "True" for expr in last_arg_exprs): return attr_base assert len(attr_base.args) == 1 arg_type = attr_base.args[0] - return Instance(attr_base.type, [UnionType([arg_type, NoneType()])], - line=ctx.default_return_type.line, - column=ctx.default_return_type.column) + return Instance( + attr_base.type, + [UnionType([arg_type, NoneType()])], + line=default.line, + column=default.column, + ) + -def plugin(version): +def plugin(version: str) -> type[AttrPlugin]: return AttrPlugin diff --git a/test-data/unit/plugins/common_api_incremental.py b/test-data/unit/plugins/common_api_incremental.py index 2dcd559777ec..b14b2f92073e 100644 --- a/test-data/unit/plugins/common_api_incremental.py +++ b/test-data/unit/plugins/common_api_incremental.py @@ -1,44 +1,48 @@ -from mypy.plugin import Plugin -from mypy.nodes import ( - ClassDef, Block, TypeInfo, SymbolTable, SymbolTableNode, MDEF, GDEF, Var -) +from __future__ import annotations + +from typing import Callable + +from mypy.nodes import GDEF, MDEF, Block, ClassDef, SymbolTable, SymbolTableNode, TypeInfo, Var +from mypy.plugin import ClassDefContext, DynamicClassDefContext, Plugin class DynPlugin(Plugin): - def get_dynamic_class_hook(self, fullname): - if fullname == 'lib.declarative_base': + def get_dynamic_class_hook( + self, fullname: str + ) -> Callable[[DynamicClassDefContext], None] | None: + if fullname == "lib.declarative_base": return add_info_hook return None - def get_base_class_hook(self, fullname: str): + def get_base_class_hook(self, fullname: str) -> Callable[[ClassDefContext], None] | None: sym = self.lookup_fully_qualified(fullname) if sym and isinstance(sym.node, TypeInfo): - if sym.node.metadata.get('magic'): + if sym.node.metadata.get("magic"): return add_magic_hook return None -def add_info_hook(ctx) -> None: +def add_info_hook(ctx: DynamicClassDefContext) -> None: class_def = ClassDef(ctx.name, Block([])) class_def.fullname = ctx.api.qualified_name(ctx.name) info = TypeInfo(SymbolTable(), class_def, ctx.api.cur_mod_id) class_def.info = info - obj = ctx.api.named_type('builtins.object') + obj = ctx.api.named_type("builtins.object", []) info.mro = [info, obj.type] info.bases = [obj] ctx.api.add_symbol_table_node(ctx.name, SymbolTableNode(GDEF, info)) - info.metadata['magic'] = True + info.metadata["magic"] = {"value": True} -def add_magic_hook(ctx) -> None: +def add_magic_hook(ctx: ClassDefContext) -> None: info = ctx.cls.info - str_type = ctx.api.named_type_or_none('builtins.str', []) + str_type = ctx.api.named_type_or_none("builtins.str", []) assert str_type is not None - var = Var('__magic__', str_type) + var = Var("__magic__", str_type) var.info = info - info.names['__magic__'] = SymbolTableNode(MDEF, var) + info.names["__magic__"] = SymbolTableNode(MDEF, var) -def plugin(version): +def plugin(version: str) -> type[DynPlugin]: return DynPlugin diff --git a/test-data/unit/plugins/config_data.py b/test-data/unit/plugins/config_data.py index 059e036d5e32..9b828bc9ac0a 100644 --- a/test-data/unit/plugins/config_data.py +++ b/test-data/unit/plugins/config_data.py @@ -1,6 +1,7 @@ -import os -import json +from __future__ import annotations +import json +import os from typing import Any from mypy.plugin import Plugin, ReportConfigContext @@ -8,11 +9,11 @@ class ConfigDataPlugin(Plugin): def report_config_data(self, ctx: ReportConfigContext) -> Any: - path = os.path.join('tmp/test.json') + path = os.path.join("tmp/test.json") with open(path) as f: data = json.load(f) return data.get(ctx.id) -def plugin(version): +def plugin(version: str) -> type[ConfigDataPlugin]: return ConfigDataPlugin diff --git a/test-data/unit/plugins/custom_errorcode.py b/test-data/unit/plugins/custom_errorcode.py index 0e2209a32eca..0af87658e59f 100644 --- a/test-data/unit/plugins/custom_errorcode.py +++ b/test-data/unit/plugins/custom_errorcode.py @@ -1,20 +1,24 @@ +from __future__ import annotations + +from typing import Callable + from mypy.errorcodes import ErrorCode -from mypy.plugin import Plugin -from mypy.types import AnyType, TypeOfAny +from mypy.plugin import FunctionContext, Plugin +from mypy.types import AnyType, Type, TypeOfAny CUSTOM_ERROR = ErrorCode(code="custom", description="", category="Custom") class CustomErrorCodePlugin(Plugin): - def get_function_hook(self, fullname): + def get_function_hook(self, fullname: str) -> Callable[[FunctionContext], Type] | None: if fullname.endswith(".main"): return self.emit_error return None - def emit_error(self, ctx): + def emit_error(self, ctx: FunctionContext) -> Type: ctx.api.fail("Custom error", ctx.context, code=CUSTOM_ERROR) return AnyType(TypeOfAny.from_error) -def plugin(version): +def plugin(version: str) -> type[CustomErrorCodePlugin]: return CustomErrorCodePlugin diff --git a/test-data/unit/plugins/customentry.py b/test-data/unit/plugins/customentry.py index b3dacfd4cf44..1a7ed3348e12 100644 --- a/test-data/unit/plugins/customentry.py +++ b/test-data/unit/plugins/customentry.py @@ -1,14 +1,22 @@ -from mypy.plugin import Plugin +from __future__ import annotations + +from typing import Callable + +from mypy.plugin import FunctionContext, Plugin +from mypy.types import Type + class MyPlugin(Plugin): - def get_function_hook(self, fullname): - if fullname == '__main__.f': + def get_function_hook(self, fullname: str) -> Callable[[FunctionContext], Type] | None: + if fullname == "__main__.f": return my_hook assert fullname return None -def my_hook(ctx): - return ctx.api.named_generic_type('builtins.int', []) -def register(version): +def my_hook(ctx: FunctionContext) -> Type: + return ctx.api.named_generic_type("builtins.int", []) + + +def register(version: str) -> type[MyPlugin]: return MyPlugin diff --git a/test-data/unit/plugins/customize_mro.py b/test-data/unit/plugins/customize_mro.py index 0f2396d98965..3b13b2e9d998 100644 --- a/test-data/unit/plugins/customize_mro.py +++ b/test-data/unit/plugins/customize_mro.py @@ -1,10 +1,17 @@ -from mypy.plugin import Plugin +from __future__ import annotations + +from typing import Callable + +from mypy.plugin import ClassDefContext, Plugin + class DummyPlugin(Plugin): - def get_customize_class_mro_hook(self, fullname): - def analyze(classdef_ctx): + def get_customize_class_mro_hook(self, fullname: str) -> Callable[[ClassDefContext], None]: + def analyze(classdef_ctx: ClassDefContext) -> None: pass + return analyze -def plugin(version): + +def plugin(version: str) -> type[DummyPlugin]: return DummyPlugin diff --git a/test-data/unit/plugins/decimal_to_int.py b/test-data/unit/plugins/decimal_to_int.py index 94aa33ef6df1..2318b2367d33 100644 --- a/test-data/unit/plugins/decimal_to_int.py +++ b/test-data/unit/plugins/decimal_to_int.py @@ -1,14 +1,21 @@ -from mypy.plugin import Plugin +from __future__ import annotations + +from typing import Callable + +from mypy.plugin import AnalyzeTypeContext, Plugin +from mypy.types import Type class MyPlugin(Plugin): - def get_type_analyze_hook(self, fullname): + def get_type_analyze_hook(self, fullname: str) -> Callable[[AnalyzeTypeContext], Type] | None: if fullname in ("decimal.Decimal", "_decimal.Decimal"): return decimal_to_int_hook return None -def plugin(version): - return MyPlugin -def decimal_to_int_hook(ctx): - return ctx.api.named_type('builtins.int', []) +def decimal_to_int_hook(ctx: AnalyzeTypeContext) -> Type: + return ctx.api.named_type("builtins.int", []) + + +def plugin(version: str) -> type[MyPlugin]: + return MyPlugin diff --git a/test-data/unit/plugins/depshook.py b/test-data/unit/plugins/depshook.py index 76277f3cb82b..bb2460de1196 100644 --- a/test-data/unit/plugins/depshook.py +++ b/test-data/unit/plugins/depshook.py @@ -1,15 +1,15 @@ -from typing import List, Tuple +from __future__ import annotations -from mypy.plugin import Plugin from mypy.nodes import MypyFile +from mypy.plugin import Plugin class DepsPlugin(Plugin): - def get_additional_deps(self, file: MypyFile) -> List[Tuple[int, str, int]]: - if file.fullname == '__main__': - return [(10, 'err', -1)] + def get_additional_deps(self, file: MypyFile) -> list[tuple[int, str, int]]: + if file.fullname == "__main__": + return [(10, "err", -1)] return [] -def plugin(version): +def plugin(version: str) -> type[DepsPlugin]: return DepsPlugin diff --git a/test-data/unit/plugins/descriptor.py b/test-data/unit/plugins/descriptor.py index afbadcdfb671..d38853367906 100644 --- a/test-data/unit/plugins/descriptor.py +++ b/test-data/unit/plugins/descriptor.py @@ -1,28 +1,38 @@ -from mypy.plugin import Plugin -from mypy.types import NoneType, CallableType +from __future__ import annotations + +from typing import Callable + +from mypy.plugin import MethodContext, MethodSigContext, Plugin +from mypy.types import CallableType, NoneType, Type, get_proper_type class DescriptorPlugin(Plugin): - def get_method_hook(self, fullname): + def get_method_hook(self, fullname: str) -> Callable[[MethodContext], Type] | None: if fullname == "__main__.Desc.__get__": return get_hook return None - def get_method_signature_hook(self, fullname): + def get_method_signature_hook( + self, fullname: str + ) -> Callable[[MethodSigContext], CallableType] | None: if fullname == "__main__.Desc.__set__": return set_hook return None -def get_hook(ctx): - if isinstance(ctx.arg_types[0][0], NoneType): - return ctx.api.named_type("builtins.str") - return ctx.api.named_type("builtins.int") +def get_hook(ctx: MethodContext) -> Type: + arg = get_proper_type(ctx.arg_types[0][0]) + if isinstance(arg, NoneType): + return ctx.api.named_generic_type("builtins.str", []) + return ctx.api.named_generic_type("builtins.int", []) -def set_hook(ctx): +def set_hook(ctx: MethodSigContext) -> CallableType: return CallableType( - [ctx.api.named_type("__main__.Cls"), ctx.api.named_type("builtins.int")], + [ + ctx.api.named_generic_type("__main__.Cls", []), + ctx.api.named_generic_type("builtins.int", []), + ], ctx.default_signature.arg_kinds, ctx.default_signature.arg_names, ctx.default_signature.ret_type, @@ -30,5 +40,5 @@ def set_hook(ctx): ) -def plugin(version): +def plugin(version: str) -> type[DescriptorPlugin]: return DescriptorPlugin diff --git a/test-data/unit/plugins/dyn_class.py b/test-data/unit/plugins/dyn_class.py index 54bf377aa8ef..18e948e3dd2a 100644 --- a/test-data/unit/plugins/dyn_class.py +++ b/test-data/unit/plugins/dyn_class.py @@ -1,47 +1,57 @@ -from mypy.plugin import Plugin -from mypy.nodes import ( - ClassDef, Block, TypeInfo, SymbolTable, SymbolTableNode, GDEF, Var -) -from mypy.types import Instance +from __future__ import annotations + +from typing import Callable + +from mypy.nodes import GDEF, Block, ClassDef, SymbolTable, SymbolTableNode, TypeInfo, Var +from mypy.plugin import ClassDefContext, DynamicClassDefContext, Plugin +from mypy.types import Instance, get_proper_type DECL_BASES = set() + class DynPlugin(Plugin): - def get_dynamic_class_hook(self, fullname): - if fullname == 'mod.declarative_base': + def get_dynamic_class_hook( + self, fullname: str + ) -> Callable[[DynamicClassDefContext], None] | None: + if fullname == "mod.declarative_base": return add_info_hook return None - def get_base_class_hook(self, fullname: str): + def get_base_class_hook(self, fullname: str) -> Callable[[ClassDefContext], None] | None: if fullname in DECL_BASES: return replace_col_hook return None -def add_info_hook(ctx): + +def add_info_hook(ctx: DynamicClassDefContext) -> None: class_def = ClassDef(ctx.name, Block([])) class_def.fullname = ctx.api.qualified_name(ctx.name) info = TypeInfo(SymbolTable(), class_def, ctx.api.cur_mod_id) class_def.info = info - obj = ctx.api.named_type('builtins.object') + obj = ctx.api.named_type("builtins.object") info.mro = [info, obj.type] info.bases = [obj] ctx.api.add_symbol_table_node(ctx.name, SymbolTableNode(GDEF, info)) DECL_BASES.add(class_def.fullname) -def replace_col_hook(ctx): + +def replace_col_hook(ctx: ClassDefContext) -> None: info = ctx.cls.info for sym in info.names.values(): node = sym.node - if isinstance(node, Var) and isinstance(node.type, Instance): - if node.type.type.fullname == 'mod.Column': - new_sym = ctx.api.lookup_fully_qualified_or_none('mod.Instr') + if isinstance(node, Var) and isinstance( + (node_type := get_proper_type(node.type)), Instance + ): + if node_type.type.fullname == "mod.Column": + new_sym = ctx.api.lookup_fully_qualified_or_none("mod.Instr") if new_sym: new_info = new_sym.node assert isinstance(new_info, TypeInfo) - node.type = Instance(new_info, node.type.args, - node.type.line, - node.type.column) + node.type = Instance( + new_info, node_type.args, node_type.line, node_type.column + ) + -def plugin(version): +def plugin(version: str) -> type[DynPlugin]: return DynPlugin diff --git a/test-data/unit/plugins/dyn_class_from_method.py b/test-data/unit/plugins/dyn_class_from_method.py index 4c3904907750..b84754654084 100644 --- a/test-data/unit/plugins/dyn_class_from_method.py +++ b/test-data/unit/plugins/dyn_class_from_method.py @@ -1,28 +1,38 @@ -from mypy.nodes import (Block, ClassDef, GDEF, SymbolTable, SymbolTableNode, TypeInfo) +from __future__ import annotations + +from typing import Callable + +from mypy.nodes import GDEF, Block, ClassDef, RefExpr, SymbolTable, SymbolTableNode, TypeInfo from mypy.plugin import DynamicClassDefContext, Plugin from mypy.types import Instance class DynPlugin(Plugin): - def get_dynamic_class_hook(self, fullname): - if 'from_queryset' in fullname: + def get_dynamic_class_hook( + self, fullname: str + ) -> Callable[[DynamicClassDefContext], None] | None: + if "from_queryset" in fullname: return add_info_hook return None -def add_info_hook(ctx: DynamicClassDefContext): +def add_info_hook(ctx: DynamicClassDefContext) -> None: class_def = ClassDef(ctx.name, Block([])) class_def.fullname = ctx.api.qualified_name(ctx.name) info = TypeInfo(SymbolTable(), class_def, ctx.api.cur_mod_id) class_def.info = info + assert isinstance(ctx.call.args[0], RefExpr) queryset_type_fullname = ctx.call.args[0].fullname - queryset_info = ctx.api.lookup_fully_qualified_or_none(queryset_type_fullname).node # type: TypeInfo - obj = ctx.api.named_type('builtins.object') + queryset_node = ctx.api.lookup_fully_qualified_or_none(queryset_type_fullname) + assert queryset_node is not None + queryset_info = queryset_node.node + assert isinstance(queryset_info, TypeInfo) + obj = ctx.api.named_type("builtins.object") info.mro = [info, queryset_info, obj.type] info.bases = [Instance(queryset_info, [])] ctx.api.add_symbol_table_node(ctx.name, SymbolTableNode(GDEF, info)) -def plugin(version): +def plugin(version: str) -> type[DynPlugin]: return DynPlugin diff --git a/test-data/unit/plugins/fnplugin.py b/test-data/unit/plugins/fnplugin.py index 684d6343458e..a5a7e57101c2 100644 --- a/test-data/unit/plugins/fnplugin.py +++ b/test-data/unit/plugins/fnplugin.py @@ -1,14 +1,22 @@ -from mypy.plugin import Plugin +from __future__ import annotations + +from typing import Callable + +from mypy.plugin import FunctionContext, Plugin +from mypy.types import Type + class MyPlugin(Plugin): - def get_function_hook(self, fullname): - if fullname == '__main__.f': + def get_function_hook(self, fullname: str) -> Callable[[FunctionContext], Type] | None: + if fullname == "__main__.f": return my_hook assert fullname is not None return None -def my_hook(ctx): - return ctx.api.named_generic_type('builtins.int', []) -def plugin(version): +def my_hook(ctx: FunctionContext) -> Type: + return ctx.api.named_generic_type("builtins.int", []) + + +def plugin(version: str) -> type[MyPlugin]: return MyPlugin diff --git a/test-data/unit/plugins/fully_qualified_test_hook.py b/test-data/unit/plugins/fully_qualified_test_hook.py index 529cf25a1215..9230091bba1a 100644 --- a/test-data/unit/plugins/fully_qualified_test_hook.py +++ b/test-data/unit/plugins/fully_qualified_test_hook.py @@ -1,16 +1,28 @@ -from mypy.plugin import CallableType, MethodSigContext, Plugin +from __future__ import annotations + +from typing import Callable + +from mypy.plugin import MethodSigContext, Plugin +from mypy.types import CallableType + class FullyQualifiedTestPlugin(Plugin): - def get_method_signature_hook(self, fullname): + def get_method_signature_hook( + self, fullname: str + ) -> Callable[[MethodSigContext], CallableType] | None: # Ensure that all names are fully qualified - if 'FullyQualifiedTest' in fullname: - assert fullname.startswith('__main__.') and not ' of ' in fullname, fullname + if "FullyQualifiedTest" in fullname: + assert fullname.startswith("__main__.") and " of " not in fullname, fullname return my_hook return None + def my_hook(ctx: MethodSigContext) -> CallableType: - return ctx.default_signature.copy_modified(ret_type=ctx.api.named_generic_type('builtins.int', [])) + return ctx.default_signature.copy_modified( + ret_type=ctx.api.named_generic_type("builtins.int", []) + ) + -def plugin(version): +def plugin(version: str) -> type[FullyQualifiedTestPlugin]: return FullyQualifiedTestPlugin diff --git a/test-data/unit/plugins/function_sig_hook.py b/test-data/unit/plugins/function_sig_hook.py index 4d901b96716e..a8d3cf058062 100644 --- a/test-data/unit/plugins/function_sig_hook.py +++ b/test-data/unit/plugins/function_sig_hook.py @@ -1,9 +1,16 @@ -from mypy.plugin import CallableType, FunctionSigContext, Plugin +from __future__ import annotations + +from typing import Callable + +from mypy.plugin import FunctionSigContext, Plugin +from mypy.types import CallableType class FunctionSigPlugin(Plugin): - def get_function_signature_hook(self, fullname): - if fullname == '__main__.dynamic_signature': + def get_function_signature_hook( + self, fullname: str + ) -> Callable[[FunctionSigContext], CallableType] | None: + if fullname == "__main__.dynamic_signature": return my_hook return None @@ -13,11 +20,8 @@ def my_hook(ctx: FunctionSigContext) -> CallableType: if len(arg1_args) != 1: return ctx.default_signature arg1_type = ctx.api.get_expression_type(arg1_args[0]) - return ctx.default_signature.copy_modified( - arg_types=[arg1_type], - ret_type=arg1_type, - ) + return ctx.default_signature.copy_modified(arg_types=[arg1_type], ret_type=arg1_type) -def plugin(version): +def plugin(version: str) -> type[FunctionSigPlugin]: return FunctionSigPlugin diff --git a/test-data/unit/plugins/method_in_decorator.py b/test-data/unit/plugins/method_in_decorator.py index 99774dfcc7ef..3fba7692266c 100644 --- a/test-data/unit/plugins/method_in_decorator.py +++ b/test-data/unit/plugins/method_in_decorator.py @@ -1,19 +1,25 @@ -from mypy.types import CallableType, Type -from typing import Callable, Optional +from __future__ import annotations + +from typing import Callable + from mypy.plugin import MethodContext, Plugin +from mypy.types import CallableType, Type, get_proper_type class MethodDecoratorPlugin(Plugin): - def get_method_hook(self, fullname: str) -> Optional[Callable[[MethodContext], Type]]: - if 'Foo.a' in fullname: + def get_method_hook(self, fullname: str) -> Callable[[MethodContext], Type] | None: + if "Foo.a" in fullname: return method_decorator_callback return None + def method_decorator_callback(ctx: MethodContext) -> Type: - if isinstance(ctx.default_return_type, CallableType): - str_type = ctx.api.named_generic_type('builtins.str', []) - return ctx.default_return_type.copy_modified(ret_type=str_type) + default = get_proper_type(ctx.default_return_type) + if isinstance(default, CallableType): + str_type = ctx.api.named_generic_type("builtins.str", []) + return default.copy_modified(ret_type=str_type) return ctx.default_return_type -def plugin(version): + +def plugin(version: str) -> type[MethodDecoratorPlugin]: return MethodDecoratorPlugin diff --git a/test-data/unit/plugins/method_sig_hook.py b/test-data/unit/plugins/method_sig_hook.py index 25c2842e6620..b78831cc45d5 100644 --- a/test-data/unit/plugins/method_sig_hook.py +++ b/test-data/unit/plugins/method_sig_hook.py @@ -1,30 +1,41 @@ -from mypy.plugin import CallableType, CheckerPluginInterface, MethodSigContext, Plugin -from mypy.types import Instance, Type +from __future__ import annotations + +from typing import Callable + +from mypy.plugin import CheckerPluginInterface, MethodSigContext, Plugin +from mypy.types import CallableType, Instance, Type, get_proper_type + class MethodSigPlugin(Plugin): - def get_method_signature_hook(self, fullname): + def get_method_signature_hook( + self, fullname: str + ) -> Callable[[MethodSigContext], CallableType] | None: # Ensure that all names are fully qualified - assert not fullname.endswith(' of Foo') + assert not fullname.endswith(" of Foo") - if fullname.startswith('__main__.Foo.'): + if fullname.startswith("__main__.Foo."): return my_hook return None + def _str_to_int(api: CheckerPluginInterface, typ: Type) -> Type: + typ = get_proper_type(typ) if isinstance(typ, Instance): - if typ.type.fullname == 'builtins.str': - return api.named_generic_type('builtins.int', []) + if typ.type.fullname == "builtins.str": + return api.named_generic_type("builtins.int", []) elif typ.args: return typ.copy_modified(args=[_str_to_int(api, t) for t in typ.args]) return typ + def my_hook(ctx: MethodSigContext) -> CallableType: return ctx.default_signature.copy_modified( arg_types=[_str_to_int(ctx.api, t) for t in ctx.default_signature.arg_types], ret_type=_str_to_int(ctx.api, ctx.default_signature.ret_type), ) -def plugin(version): + +def plugin(version: str) -> type[MethodSigPlugin]: return MethodSigPlugin diff --git a/test-data/unit/plugins/named_callable.py b/test-data/unit/plugins/named_callable.py index e40d181d2bad..c37e11c32125 100644 --- a/test-data/unit/plugins/named_callable.py +++ b/test-data/unit/plugins/named_callable.py @@ -1,28 +1,33 @@ -from mypy.plugin import Plugin -from mypy.types import CallableType +from __future__ import annotations + +from typing import Callable + +from mypy.plugin import FunctionContext, Plugin +from mypy.types import CallableType, Type, get_proper_type class MyPlugin(Plugin): - def get_function_hook(self, fullname): - if fullname == 'm.decorator1': + def get_function_hook(self, fullname: str) -> Callable[[FunctionContext], Type] | None: + if fullname == "m.decorator1": return decorator_call_hook - if fullname == 'm._decorated': # This is a dummy name generated by the plugin + if fullname == "m._decorated": # This is a dummy name generated by the plugin return decorate_hook return None -def decorator_call_hook(ctx): - if isinstance(ctx.default_return_type, CallableType): - return ctx.default_return_type.copy_modified(name='m._decorated') +def decorator_call_hook(ctx: FunctionContext) -> Type: + default = get_proper_type(ctx.default_return_type) + if isinstance(default, CallableType): + return default.copy_modified(name="m._decorated") return ctx.default_return_type -def decorate_hook(ctx): - if isinstance(ctx.default_return_type, CallableType): - return ctx.default_return_type.copy_modified( - ret_type=ctx.api.named_generic_type('builtins.str', [])) +def decorate_hook(ctx: FunctionContext) -> Type: + default = get_proper_type(ctx.default_return_type) + if isinstance(default, CallableType): + return default.copy_modified(ret_type=ctx.api.named_generic_type("builtins.str", [])) return ctx.default_return_type -def plugin(version): +def plugin(version: str) -> type[MyPlugin]: return MyPlugin diff --git a/test-data/unit/plugins/plugin2.py b/test-data/unit/plugins/plugin2.py index b530a62d23aa..e486d96ea8bf 100644 --- a/test-data/unit/plugins/plugin2.py +++ b/test-data/unit/plugins/plugin2.py @@ -1,13 +1,21 @@ -from mypy.plugin import Plugin +from __future__ import annotations + +from typing import Callable + +from mypy.plugin import FunctionContext, Plugin +from mypy.types import Type + class Plugin2(Plugin): - def get_function_hook(self, fullname): - if fullname in ('__main__.f', '__main__.g'): + def get_function_hook(self, fullname: str) -> Callable[[FunctionContext], Type] | None: + if fullname in ("__main__.f", "__main__.g"): return str_hook return None -def str_hook(ctx): - return ctx.api.named_generic_type('builtins.str', []) -def plugin(version): +def str_hook(ctx: FunctionContext) -> Type: + return ctx.api.named_generic_type("builtins.str", []) + + +def plugin(version: str) -> type[Plugin2]: return Plugin2 diff --git a/test-data/unit/plugins/type_anal_hook.py b/test-data/unit/plugins/type_anal_hook.py index 86d18d8c8611..c380bbe873fe 100644 --- a/test-data/unit/plugins/type_anal_hook.py +++ b/test-data/unit/plugins/type_anal_hook.py @@ -1,22 +1,23 @@ -from typing import Optional, Callable +from __future__ import annotations + +from typing import Callable + +from mypy.plugin import AnalyzeTypeContext, Plugin -from mypy.plugin import Plugin, AnalyzeTypeContext -from mypy.types import Type, TypeList, AnyType, CallableType, TypeOfAny # The official name changed to NoneType but we have an alias for plugin compat reasons # so we'll keep testing that here. -from mypy.types import NoneTyp +from mypy.types import AnyType, CallableType, NoneTyp, Type, TypeList, TypeOfAny + class TypeAnalyzePlugin(Plugin): - def get_type_analyze_hook(self, fullname: str - ) -> Optional[Callable[[AnalyzeTypeContext], Type]]: - if fullname == 'm.Signal': + def get_type_analyze_hook(self, fullname: str) -> Callable[[AnalyzeTypeContext], Type] | None: + if fullname == "m.Signal": return signal_type_analyze_callback return None def signal_type_analyze_callback(ctx: AnalyzeTypeContext) -> Type: - if (len(ctx.type.args) != 1 - or not isinstance(ctx.type.args[0], TypeList)): + if len(ctx.type.args) != 1 or not isinstance(ctx.type.args[0], TypeList): ctx.api.fail('Invalid "Signal" type (expected "Signal[[t, ...]]")', ctx.context) return AnyType(TypeOfAny.from_error) @@ -27,13 +28,11 @@ def signal_type_analyze_callback(ctx: AnalyzeTypeContext) -> Type: return AnyType(TypeOfAny.from_error) # Error generated elsewhere arg_types, arg_kinds, arg_names = analyzed arg_types = [ctx.api.analyze_type(arg) for arg in arg_types] - type_arg = CallableType(arg_types, - arg_kinds, - arg_names, - NoneTyp(), - ctx.api.named_type('builtins.function', [])) - return ctx.api.named_type('m.Signal', [type_arg]) + type_arg = CallableType( + arg_types, arg_kinds, arg_names, NoneTyp(), ctx.api.named_type("builtins.function", []) + ) + return ctx.api.named_type("m.Signal", [type_arg]) -def plugin(version): +def plugin(version: str) -> type[TypeAnalyzePlugin]: return TypeAnalyzePlugin diff --git a/test-data/unit/plugins/union_method.py b/test-data/unit/plugins/union_method.py index a7621553f6ad..7c62ffb8c0cc 100644 --- a/test-data/unit/plugins/union_method.py +++ b/test-data/unit/plugins/union_method.py @@ -1,34 +1,40 @@ -from mypy.plugin import ( - CallableType, CheckerPluginInterface, MethodSigContext, MethodContext, Plugin -) -from mypy.types import Instance, Type +from __future__ import annotations + +from typing import Callable + +from mypy.plugin import CheckerPluginInterface, MethodContext, MethodSigContext, Plugin +from mypy.types import CallableType, Instance, Type, get_proper_type class MethodPlugin(Plugin): - def get_method_signature_hook(self, fullname): - if fullname.startswith('__main__.Foo.'): + def get_method_signature_hook( + self, fullname: str + ) -> Callable[[MethodSigContext], CallableType] | None: + if fullname.startswith("__main__.Foo."): return my_meth_sig_hook return None - def get_method_hook(self, fullname): - if fullname.startswith('__main__.Bar.'): + def get_method_hook(self, fullname: str) -> Callable[[MethodContext], Type] | None: + if fullname.startswith("__main__.Bar."): return my_meth_hook return None def _str_to_int(api: CheckerPluginInterface, typ: Type) -> Type: + typ = get_proper_type(typ) if isinstance(typ, Instance): - if typ.type.fullname == 'builtins.str': - return api.named_generic_type('builtins.int', []) + if typ.type.fullname == "builtins.str": + return api.named_generic_type("builtins.int", []) elif typ.args: return typ.copy_modified(args=[_str_to_int(api, t) for t in typ.args]) return typ def _float_to_int(api: CheckerPluginInterface, typ: Type) -> Type: + typ = get_proper_type(typ) if isinstance(typ, Instance): - if typ.type.fullname == 'builtins.float': - return api.named_generic_type('builtins.int', []) + if typ.type.fullname == "builtins.float": + return api.named_generic_type("builtins.int", []) elif typ.args: return typ.copy_modified(args=[_float_to_int(api, t) for t in typ.args]) return typ @@ -45,5 +51,5 @@ def my_meth_hook(ctx: MethodContext) -> Type: return _float_to_int(ctx.api, ctx.default_return_type) -def plugin(version): +def plugin(version: str) -> type[MethodPlugin]: return MethodPlugin diff --git a/tox.ini b/tox.ini index a809c4d2c570..e07acdc5200d 100644 --- a/tox.ini +++ b/tox.ini @@ -55,3 +55,4 @@ passenv = commands = python runtests.py self python -m mypy --config-file mypy_self_check.ini misc --exclude misc/sync-typeshed.py + python -m mypy --config-file mypy_self_check.ini test-data/unit/plugins From bd212bcc2229779c0f6c96b16bf9d685e98884c1 Mon Sep 17 00:00:00 2001 From: Nikita Sobolev Date: Mon, 4 Sep 2023 18:43:05 +0300 Subject: [PATCH 028/144] Remove type aliases that are long supported (#16039) Some builtin aliases are available for all python versions that we support. So, there's no need to check them in `semanal`: https://github.com/python/mypy/blob/8738886861682e0d168ea321c2cc6ee5b566cb8b/mypy/semanal.py#L673-L689 --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- mypy/nodes.py | 13 +------------ mypy/semanal.py | 5 ++++- 2 files changed, 5 insertions(+), 13 deletions(-) diff --git a/mypy/nodes.py b/mypy/nodes.py index db42dd6b3949..d29e99ccace7 100644 --- a/mypy/nodes.py +++ b/mypy/nodes.py @@ -138,18 +138,7 @@ def set_line( # This keeps track of the oldest supported Python version where the corresponding # alias source is available. -type_aliases_source_versions: Final = { - "typing.List": (2, 7), - "typing.Dict": (2, 7), - "typing.Set": (2, 7), - "typing.FrozenSet": (2, 7), - "typing.ChainMap": (3, 3), - "typing.Counter": (2, 7), - "typing.DefaultDict": (2, 7), - "typing.Deque": (2, 7), - "typing.OrderedDict": (3, 7), - "typing.LiteralString": (3, 11), -} +type_aliases_source_versions: Final = {"typing.LiteralString": (3, 11)} # This keeps track of aliases in `typing_extensions`, which we treat specially. typing_extensions_aliases: Final = { diff --git a/mypy/semanal.py b/mypy/semanal.py index be7e733a0816..ec4d32aefeb9 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -680,7 +680,10 @@ def add_builtin_aliases(self, tree: MypyFile) -> None: """ assert tree.fullname == "typing" for alias, target_name in type_aliases.items(): - if type_aliases_source_versions[alias] > self.options.python_version: + if ( + alias in type_aliases_source_versions + and type_aliases_source_versions[alias] > self.options.python_version + ): # This alias is not available on this Python version. continue name = alias.split(".")[-1] From c712079e1cbd74e2ea37da02d66152810fb69903 Mon Sep 17 00:00:00 2001 From: Nikita Sobolev Date: Mon, 4 Sep 2023 18:44:51 +0300 Subject: [PATCH 029/144] Do not use deprecated `add_method` in `attrs` plugin (#16037) CC @ikonst --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- mypy/plugins/attrs.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/mypy/plugins/attrs.py b/mypy/plugins/attrs.py index 6f5b6f35da07..3ddc234a7e4a 100644 --- a/mypy/plugins/attrs.py +++ b/mypy/plugins/attrs.py @@ -51,7 +51,7 @@ _get_bool_argument, _get_decorator_bool_argument, add_attribute_to_class, - add_method, + add_method_to_class, deserialize_and_fixup_type, ) from mypy.server.trigger import make_wildcard_trigger @@ -952,7 +952,9 @@ def add_method( tvd: If the method is generic these should be the type variables. """ self_type = self_type if self_type is not None else self.self_type - add_method(self.ctx, method_name, args, ret_type, self_type, tvd) + add_method_to_class( + self.ctx.api, self.ctx.cls, method_name, args, ret_type, self_type, tvd + ) def _get_attrs_init_type(typ: Instance) -> CallableType | None: From 4496a005a84f7daedc1ef2e801583127f5995f75 Mon Sep 17 00:00:00 2001 From: Nikita Sobolev Date: Mon, 4 Sep 2023 19:28:00 +0300 Subject: [PATCH 030/144] Use latest `actions/checkout@v4` (#16042) Looks like recent CI failures are related. Release docs: https://github.com/actions/checkout/releases/tag/v4.0.0 --- .github/workflows/build_wheels.yml | 2 +- .github/workflows/docs.yml | 2 +- .github/workflows/mypy_primer.yml | 2 +- .github/workflows/sync_typeshed.yml | 2 +- .github/workflows/test.yml | 4 ++-- .github/workflows/test_stubgenc.yml | 2 +- 6 files changed, 7 insertions(+), 7 deletions(-) diff --git a/.github/workflows/build_wheels.yml b/.github/workflows/build_wheels.yml index 3f4ea5e42f9b..f1438279673d 100644 --- a/.github/workflows/build_wheels.yml +++ b/.github/workflows/build_wheels.yml @@ -13,7 +13,7 @@ jobs: if: github.repository == 'python/mypy' runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: actions/setup-python@v4 with: python-version: '3.11' diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 037738d4b3aa..6c53afb9aa7c 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -29,7 +29,7 @@ jobs: TOX_SKIP_MISSING_INTERPRETERS: False VERIFY_MYPY_ERROR_CODES: 1 steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: actions/setup-python@v4 with: python-version: '3.8' diff --git a/.github/workflows/mypy_primer.yml b/.github/workflows/mypy_primer.yml index 2958b8fc325b..f8991e27970a 100644 --- a/.github/workflows/mypy_primer.yml +++ b/.github/workflows/mypy_primer.yml @@ -33,7 +33,7 @@ jobs: shard-index: [0, 1, 2, 3, 4] fail-fast: false steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: path: mypy_to_test fetch-depth: 0 diff --git a/.github/workflows/sync_typeshed.yml b/.github/workflows/sync_typeshed.yml index 1db2e846f099..de9e0aad599f 100644 --- a/.github/workflows/sync_typeshed.yml +++ b/.github/workflows/sync_typeshed.yml @@ -15,7 +15,7 @@ jobs: if: github.repository == 'python/mypy' runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: fetch-depth: 0 # TODO: use whatever solution ends up working for diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index d2e7e7258500..3bcd9e059589 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -119,7 +119,7 @@ jobs: # Pytest PYTEST_ADDOPTS: --color=yes steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: actions/setup-python@v4 with: python-version: ${{ matrix.python }} @@ -162,7 +162,7 @@ jobs: CXX: i686-linux-gnu-g++ CC: i686-linux-gnu-gcc steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Install 32-bit build dependencies run: | sudo dpkg --add-architecture i386 && \ diff --git a/.github/workflows/test_stubgenc.yml b/.github/workflows/test_stubgenc.yml index 33466b9870ff..a2fb3e9dce6b 100644 --- a/.github/workflows/test_stubgenc.yml +++ b/.github/workflows/test_stubgenc.yml @@ -26,7 +26,7 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Setup 🐍 3.8 uses: actions/setup-python@v4 From 5d9d13ebc9899ec43699b8e91ec5587d6f962283 Mon Sep 17 00:00:00 2001 From: Nikita Sobolev Date: Tue, 5 Sep 2023 14:38:58 +0300 Subject: [PATCH 031/144] Document `force_union_syntax` and `force_uppercase_builtins` (#16048) Users don't know about them: https://github.com/typeddjango/pytest-mypy-plugins/issues/126 Since they are quite important for testing, I think that it is a must to include them. --------- Co-authored-by: Alex Waygood --- docs/source/config_file.rst | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/docs/source/config_file.rst b/docs/source/config_file.rst index c0798bbf03f1..b5ce23ff11ec 100644 --- a/docs/source/config_file.rst +++ b/docs/source/config_file.rst @@ -816,6 +816,22 @@ These options may only be set in the global section (``[mypy]``). Show absolute paths to files. +.. confval:: force_uppercase_builtins + + :type: boolean + :default: False + + Always use ``List`` instead of ``list`` in error messages, + even on Python 3.9+. + +.. confval:: force_union_syntax + + :type: boolean + :default: False + + Always use ``Union[]`` and ``Optional[]`` for union types + in error messages (instead of the ``|`` operator), + even on Python 3.10+. Incremental mode **************** From c0906408c10d24d748711fa24be5befb2c794d4c Mon Sep 17 00:00:00 2001 From: Nikita Sobolev Date: Tue, 5 Sep 2023 14:57:38 +0300 Subject: [PATCH 032/144] Add docs about `--force-uppercase-builtins` and `--force-union-syntax` (#16049) Refs https://github.com/python/mypy/pull/16048 --- docs/source/command_line.rst | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/docs/source/command_line.rst b/docs/source/command_line.rst index 727d500e2d4d..4e954c7c2ccb 100644 --- a/docs/source/command_line.rst +++ b/docs/source/command_line.rst @@ -789,6 +789,17 @@ in error messages. useful or they may be overly noisy. If ``N`` is negative, there is no limit. The default limit is 200. +.. option:: --force-uppercase-builtins + + Always use ``List`` instead of ``list`` in error messages, + even on Python 3.9+. + +.. option:: --force-union-syntax + + Always use ``Union[]`` and ``Optional[]`` for union types + in error messages (instead of the ``|`` operator), + even on Python 3.10+. + .. _incremental: From ed9b8990025a81a12e32bec59f2f3bfab3d7c71b Mon Sep 17 00:00:00 2001 From: Marc Mueller <30130371+cdce8p@users.noreply.github.com> Date: Wed, 6 Sep 2023 19:43:24 +0200 Subject: [PATCH 033/144] Clear cache when adding --new-type-inference (#16059) Add `new_type_inference` to the list of options affecting the cache. --- mypy/options.py | 1 + 1 file changed, 1 insertion(+) diff --git a/mypy/options.py b/mypy/options.py index 5e451c0aa0a3..007ae0a78aa1 100644 --- a/mypy/options.py +++ b/mypy/options.py @@ -62,6 +62,7 @@ class BuildType: | { "platform", "bazel", + "new_type_inference", "plugins", "disable_bytearray_promotion", "disable_memoryview_promotion", From 175c5a59f18df9d56b3c2fb0e2a9669dd196c311 Mon Sep 17 00:00:00 2001 From: Randolf Scholz Date: Thu, 7 Sep 2023 18:30:07 +0200 Subject: [PATCH 034/144] Introduce error category [unsafe-overload] (#16061) fixes #16060 Co-authored-by: Alex Waygood --- mypy/errorcodes.py | 7 +++++++ mypy/messages.py | 1 + mypy/types.py | 2 +- test-data/unit/check-errorcodes.test | 14 ++++++++++++++ 4 files changed, 23 insertions(+), 1 deletion(-) diff --git a/mypy/errorcodes.py b/mypy/errorcodes.py index 3594458fa362..70b8cffe9053 100644 --- a/mypy/errorcodes.py +++ b/mypy/errorcodes.py @@ -261,3 +261,10 @@ def __hash__(self) -> int: # This is a catch-all for remaining uncategorized errors. MISC: Final = ErrorCode("misc", "Miscellaneous other checks", "General") + +UNSAFE_OVERLOAD: Final[ErrorCode] = ErrorCode( + "unsafe-overload", + "Warn if multiple @overload variants overlap in unsafe ways", + "General", + sub_code_of=MISC, +) diff --git a/mypy/messages.py b/mypy/messages.py index bba9c3c3cdea..a58c5f91c4b1 100644 --- a/mypy/messages.py +++ b/mypy/messages.py @@ -1604,6 +1604,7 @@ def overloaded_signatures_overlap(self, index1: int, index2: int, context: Conte "Overloaded function signatures {} and {} overlap with " "incompatible return types".format(index1, index2), context, + code=codes.UNSAFE_OVERLOAD, ) def overloaded_signature_will_never_match( diff --git a/mypy/types.py b/mypy/types.py index f974157ce84d..cee4595b67cc 100644 --- a/mypy/types.py +++ b/mypy/types.py @@ -3019,7 +3019,7 @@ def get_proper_type(typ: Type | None) -> ProperType | None: @overload -def get_proper_types(types: list[Type] | tuple[Type, ...]) -> list[ProperType]: # type: ignore[misc] +def get_proper_types(types: list[Type] | tuple[Type, ...]) -> list[ProperType]: # type: ignore[unsafe-overload] ... diff --git a/test-data/unit/check-errorcodes.test b/test-data/unit/check-errorcodes.test index df14e328ed72..72edf2f22c05 100644 --- a/test-data/unit/check-errorcodes.test +++ b/test-data/unit/check-errorcodes.test @@ -1072,3 +1072,17 @@ A.f = h # type: ignore[assignment] # E: Unused "type: ignore" comment, use nar [case testUnusedIgnoreEnableCode] # flags: --enable-error-code=unused-ignore x = 1 # type: ignore # E: Unused "type: ignore" comment [unused-ignore] + +[case testErrorCodeUnsafeOverloadError] +from typing import overload, Union + +@overload +def unsafe_func(x: int) -> int: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types [unsafe-overload] +@overload +def unsafe_func(x: object) -> str: ... +def unsafe_func(x: object) -> Union[int, str]: + if isinstance(x, int): + return 42 + else: + return "some string" +[builtins fixtures/isinstancelist.pyi] From 816ba3b33dd157def6b7d8c0b0fcca65ff2cbc05 Mon Sep 17 00:00:00 2001 From: Alex Waygood Date: Thu, 7 Sep 2023 21:21:46 +0100 Subject: [PATCH 035/144] Build the docs in CI for all PRs touching the `mypy/` directory (#16068) 1. #16061 added a new error code, but didn't add any docs for the new error code 2. Because nothing in the `docs/` directory was modified, the docs CI job didn't run on that PR 3. Now the docs build is failing on `master` because we have an error code without any documentation: https://github.com/python/mypy/actions/runs/6112378542/job/16589719563 --- .github/workflows/docs.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 6c53afb9aa7c..ad6b57c53fd9 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -8,6 +8,10 @@ on: pull_request: paths: - 'docs/**' + # We now have a docs check that fails if any error codes don't have documentation, + # so it's important to do the docs build on all PRs touching mypy/errorcodes.py + # in case somebody's adding a new error code without any docs + - 'mypy/errorcodes.py' - 'mypyc/doc/**' - '**/*.rst' - '**/*.md' From 8b73cc22c6a251682f777b104677fa0e1ed5fd67 Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Thu, 7 Sep 2023 23:23:25 +0100 Subject: [PATCH 036/144] Complete type analysis of variadic types (#15991) This PR closes the first part of support for `TypeVarTuple`: the "static" analysis of types (of course everything is static in mypy, but some parts are more static): `semanal`/`typeanal`, `expand_type()`, `map_instance_to_supertype()`, `erase_type()` (things that precede and/or form foundation for type inference and subtyping). This one was quite tricky, supporting unpacks of forward references required some thinking. What is included in this PR: * Moving argument count validation from `semanal_typeargs` to `typeanal`. In one of previous PRs I mentioned that `get_proper_type()` may be called during semantic analysis causing troubles if we have invalid aliases. So we need to move validation to early stage. For instances, this is not required, but I strongly prefer keeping instances and aliases similar. And ideally at some point we can combine the logic, since it gets more and more similar. At some point we may want to prohibit using `get_proper_type()` during semantic analysis, but I don't want to block `TypeVarTuple` support on this, since this may be a significant refactoring. * Fixing `map_instance_to_supertype()` and `erase_type()`. These two are straightforward, we either use `expand_type()` logic directly (by calling it), or following the same logic. * Few simplifications in `expandtype` and `typeops` following previous normalizations of representation, unless there is a flaw in my logic, removed branches should be all dead code. * Allow (only fixed) unpacks in argument lists for non-variadic types. They were prohibited for no good reason. * (Somewhat limited) support for forward references in unpacks. As I mentioned this one is tricky because of how forward references are represented. Usually they follow either a life cycle like: `Any` -> ``, or `` -> `` -> `` (second one is relatively rare and usually only appears for potentially recursive things like base classes or type alias targets). It looks like `` can never appear as a _valid_ unpack target, I don't have a proof for this, but I was not able to trigger this, so I am not handling it (possible downside is that there may be extra errors about invalid argument count for invalid unpack targets). If I am wrong and this can happen in some valid cases, we can add handling for unpacks of placeholders later. Currently, the handling for `Any` stage of forward references is following: if we detect it, we simply create a dummy valid alias or instance. This logic should work for the same reason having plain `Any` worked in the first place (and why all tests pass if we delete `visit_placeholder_type()`): because (almost) each time we analyze a type, it is either already complete, or we analyze it _from scratch_, i.e. we call `expr_to_unanalyzed_type()`, then `visit_unbound_type()` etc. We almost never store "partially analyzed" types (there are guards against incomplete references and placeholders in annotations), and when we do, it is done in a controlled way that guarantees a type will be re-analyzed again. Since this is such a tricky subject, I didn't add any complex logic to support more tricky use cases (like multiple forward references to fixed unpacks in single list). I propose that we release this, and then see what kind of bug reports we will get. * Additional validation for type arguments position to ensure that `TypeVarTuple`s are never split. Total count is not enough to ban case where we have type variables `[T, *Ts, S, U]` and arguments `[int, int, *Us, int]`. We need to explicitly ensure that actual suffix and prefix are longer or equal to formal ones. Such splitting would be very hard to support, and is explicitly banned by the PEP. * Few minor cleanups. Some random comments: * It is tricky to preserve valid parts of type arguments, if there is an argument count error involving an unpack. So after such error I simply set all arguments to `Any` (or `*tuple[Any, ...]` when needed). * I know there is some code duplication. I tried to factor it away, but it turned out non-trivial. I may do some de-duplication pass after everything is done, and it is easier to see the big picture. * Type applications (i.e. when we have `A[int, int]` in runtime context) are wild west currently. I decided to postpone variadic support for them to a separate PR, because there is already some support (we will just need to handle edge cases and more error conditions) and I wanted minimize size of this PR. * Something I wanted to mention in one of previous PRs but forgot: Long time ago I proposed to normalize away type aliases inside `Unpack`, but I abandoned this idea, it doesn't really give us any benefits. As I said, this is the last PR for the "static part", in the next PR I will work on fixing subtyping and inference for variadic instances. And then will continue with remaining items I mentioned in my master plan in https://github.com/python/mypy/pull/15924 Fixes https://github.com/python/mypy/issues/15978 --------- Co-authored-by: Shantanu <12621235+hauntsaninja@users.noreply.github.com> --- mypy/erasetype.py | 34 ++++- mypy/expandtype.py | 66 ++++----- mypy/maptype.py | 22 +-- mypy/semanal_typeargs.py | 61 ++------ mypy/test/testtypes.py | 2 +- mypy/typeanal.py | 177 ++++++++++++++++++------ mypy/typeops.py | 4 +- test-data/unit/check-typevar-tuple.test | 123 +++++++++++++++- 8 files changed, 329 insertions(+), 160 deletions(-) diff --git a/mypy/erasetype.py b/mypy/erasetype.py index fbbb4f80b578..d1a01fb6c779 100644 --- a/mypy/erasetype.py +++ b/mypy/erasetype.py @@ -165,9 +165,41 @@ def visit_type_var(self, t: TypeVarType) -> Type: return self.replacement return t + # TODO: below two methods duplicate some logic with expand_type(). + # In fact, we may want to refactor this whole visitor to use expand_type(). + def visit_instance(self, t: Instance) -> Type: + result = super().visit_instance(t) + assert isinstance(result, ProperType) and isinstance(result, Instance) + if t.type.fullname == "builtins.tuple": + # Normalize Tuple[*Tuple[X, ...], ...] -> Tuple[X, ...] + arg = result.args[0] + if isinstance(arg, UnpackType): + unpacked = get_proper_type(arg.type) + if isinstance(unpacked, Instance): + assert unpacked.type.fullname == "builtins.tuple" + return unpacked + return result + + def visit_tuple_type(self, t: TupleType) -> Type: + result = super().visit_tuple_type(t) + assert isinstance(result, ProperType) and isinstance(result, TupleType) + if len(result.items) == 1: + # Normalize Tuple[*Tuple[X, ...]] -> Tuple[X, ...] + item = result.items[0] + if isinstance(item, UnpackType): + unpacked = get_proper_type(item.type) + if isinstance(unpacked, Instance): + assert unpacked.type.fullname == "builtins.tuple" + if result.partial_fallback.type.fullname != "builtins.tuple": + # If it is a subtype (like named tuple) we need to preserve it, + # this essentially mimics the logic in tuple_fallback(). + return result.partial_fallback.accept(self) + return unpacked + return result + def visit_type_var_tuple(self, t: TypeVarTupleType) -> Type: if self.erase_id(t.id): - return self.replacement + return t.tuple_fallback.copy_modified(args=[self.replacement]) return t def visit_param_spec(self, t: ParamSpecType) -> Type: diff --git a/mypy/expandtype.py b/mypy/expandtype.py index be8ecb9ccfd9..c29fcb167777 100644 --- a/mypy/expandtype.py +++ b/mypy/expandtype.py @@ -212,10 +212,15 @@ def visit_erased_type(self, t: ErasedType) -> Type: def visit_instance(self, t: Instance) -> Type: args = self.expand_types_with_unpack(list(t.args)) - if isinstance(args, list): - return t.copy_modified(args=args) - else: - return args + if t.type.fullname == "builtins.tuple": + # Normalize Tuple[*Tuple[X, ...], ...] -> Tuple[X, ...] + arg = args[0] + if isinstance(arg, UnpackType): + unpacked = get_proper_type(arg.type) + if isinstance(unpacked, Instance): + assert unpacked.type.fullname == "builtins.tuple" + args = list(unpacked.args) + return t.copy_modified(args=args) def visit_type_var(self, t: TypeVarType) -> Type: # Normally upper bounds can't contain other type variables, the only exception is @@ -285,7 +290,7 @@ def expand_unpack(self, t: UnpackType) -> list[Type]: ): return [UnpackType(typ=repl)] elif isinstance(repl, (AnyType, UninhabitedType)): - # Replace *Ts = Any with *Ts = *tuple[Any, ...] and some for Never. + # Replace *Ts = Any with *Ts = *tuple[Any, ...] and same for Never. # These types may appear here as a result of user error or failed inference. return [UnpackType(t.type.tuple_fallback.copy_modified(args=[repl]))] else: @@ -377,15 +382,8 @@ def visit_overloaded(self, t: Overloaded) -> Type: items.append(new_item) return Overloaded(items) - def expand_types_with_unpack( - self, typs: Sequence[Type] - ) -> list[Type] | AnyType | UninhabitedType: - """Expands a list of types that has an unpack. - - In corner cases, this can return a type rather than a list, in which case this - indicates use of Any or some error occurred earlier. In this case callers should - simply propagate the resulting type. - """ + def expand_types_with_unpack(self, typs: Sequence[Type]) -> list[Type]: + """Expands a list of types that has an unpack.""" items: list[Type] = [] for item in typs: if isinstance(item, UnpackType) and isinstance(item.type, TypeVarTupleType): @@ -396,24 +394,21 @@ def expand_types_with_unpack( def visit_tuple_type(self, t: TupleType) -> Type: items = self.expand_types_with_unpack(t.items) - if isinstance(items, list): - if len(items) == 1: - # Normalize Tuple[*Tuple[X, ...]] -> Tuple[X, ...] - item = items[0] - if isinstance(item, UnpackType): - unpacked = get_proper_type(item.type) - if isinstance(unpacked, Instance): - assert unpacked.type.fullname == "builtins.tuple" - if t.partial_fallback.type.fullname != "builtins.tuple": - # If it is a subtype (like named tuple) we need to preserve it, - # this essentially mimics the logic in tuple_fallback(). - return t.partial_fallback.accept(self) - return unpacked - fallback = t.partial_fallback.accept(self) - assert isinstance(fallback, ProperType) and isinstance(fallback, Instance) - return t.copy_modified(items=items, fallback=fallback) - else: - return items + if len(items) == 1: + # Normalize Tuple[*Tuple[X, ...]] -> Tuple[X, ...] + item = items[0] + if isinstance(item, UnpackType): + unpacked = get_proper_type(item.type) + if isinstance(unpacked, Instance): + assert unpacked.type.fullname == "builtins.tuple" + if t.partial_fallback.type.fullname != "builtins.tuple": + # If it is a subtype (like named tuple) we need to preserve it, + # this essentially mimics the logic in tuple_fallback(). + return t.partial_fallback.accept(self) + return unpacked + fallback = t.partial_fallback.accept(self) + assert isinstance(fallback, ProperType) and isinstance(fallback, Instance) + return t.copy_modified(items=items, fallback=fallback) def visit_typeddict_type(self, t: TypedDictType) -> Type: fallback = t.fallback.accept(self) @@ -453,11 +448,8 @@ def visit_type_alias_type(self, t: TypeAliasType) -> Type: # Target of the type alias cannot contain type variables (not bound by the type # alias itself), so we just expand the arguments. args = self.expand_types_with_unpack(t.args) - if isinstance(args, list): - # TODO: normalize if target is Tuple, and args are [*tuple[X, ...]]? - return t.copy_modified(args=args) - else: - return args + # TODO: normalize if target is Tuple, and args are [*tuple[X, ...]]? + return t.copy_modified(args=args) def expand_types(self, types: Iterable[Type]) -> list[Type]: a: list[Type] = [] diff --git a/mypy/maptype.py b/mypy/maptype.py index 4951306573c2..0d54a83127df 100644 --- a/mypy/maptype.py +++ b/mypy/maptype.py @@ -1,8 +1,8 @@ from __future__ import annotations -from mypy.expandtype import expand_type +from mypy.expandtype import expand_type_by_instance from mypy.nodes import TypeInfo -from mypy.types import AnyType, Instance, TupleType, Type, TypeOfAny, TypeVarId, has_type_vars +from mypy.types import AnyType, Instance, TupleType, TypeOfAny, has_type_vars def map_instance_to_supertype(instance: Instance, superclass: TypeInfo) -> Instance: @@ -25,8 +25,7 @@ def map_instance_to_supertype(instance: Instance, superclass: TypeInfo) -> Insta if not alias._is_recursive: # Unfortunately we can't support this for generic recursive tuples. # If we skip this special casing we will fall back to tuple[Any, ...]. - env = instance_to_type_environment(instance) - tuple_type = expand_type(instance.type.tuple_type, env) + tuple_type = expand_type_by_instance(instance.type.tuple_type, instance) if isinstance(tuple_type, TupleType): # Make the import here to avoid cyclic imports. import mypy.typeops @@ -91,8 +90,7 @@ def map_instance_to_direct_supertypes(instance: Instance, supertype: TypeInfo) - for b in typ.bases: if b.type == supertype: - env = instance_to_type_environment(instance) - t = expand_type(b, env) + t = expand_type_by_instance(b, instance) assert isinstance(t, Instance) result.append(t) @@ -103,15 +101,3 @@ def map_instance_to_direct_supertypes(instance: Instance, supertype: TypeInfo) - # type arguments implicitly. any_type = AnyType(TypeOfAny.unannotated) return [Instance(supertype, [any_type] * len(supertype.type_vars))] - - -def instance_to_type_environment(instance: Instance) -> dict[TypeVarId, Type]: - """Given an Instance, produce the resulting type environment for type - variables bound by the Instance's class definition. - - An Instance is a type application of a class (a TypeInfo) to its - required number of type arguments. So this environment consists - of the class's type variables mapped to the Instance's actual - arguments. The type variables are mapped by their `id`. - """ - return {binder.id: arg for binder, arg in zip(instance.type.defn.type_vars, instance.args)} diff --git a/mypy/semanal_typeargs.py b/mypy/semanal_typeargs.py index 749b02391e06..3e11951376c9 100644 --- a/mypy/semanal_typeargs.py +++ b/mypy/semanal_typeargs.py @@ -18,7 +18,6 @@ from mypy.options import Options from mypy.scope import Scope from mypy.subtypes import is_same_type, is_subtype -from mypy.typeanal import fix_type_var_tuple_argument, set_any_tvars from mypy.types import ( AnyType, CallableType, @@ -88,36 +87,7 @@ def visit_type_alias_type(self, t: TypeAliasType) -> None: # types, since errors there have already been reported. return self.seen_aliases.add(t) - # Some recursive aliases may produce spurious args. In principle this is not very - # important, as we would simply ignore them when expanding, but it is better to keep - # correct aliases. Also, variadic aliases are better to check when fully analyzed, - # so we do this here. assert t.alias is not None, f"Unfixed type alias {t.type_ref}" - # TODO: consider moving this validation to typeanal.py, expanding invalid aliases - # during semantic analysis may cause crashes. - if t.alias.tvar_tuple_index is not None: - correct = len(t.args) >= len(t.alias.alias_tvars) - 1 - if any( - isinstance(a, UnpackType) and isinstance(get_proper_type(a.type), Instance) - for a in t.args - ): - correct = True - else: - correct = len(t.args) == len(t.alias.alias_tvars) - if not correct: - if t.alias.tvar_tuple_index is not None: - exp_len = f"at least {len(t.alias.alias_tvars) - 1}" - else: - exp_len = f"{len(t.alias.alias_tvars)}" - self.fail( - "Bad number of arguments for type alias," - f" expected: {exp_len}, given: {len(t.args)}", - t, - code=codes.TYPE_ARG, - ) - t.args = set_any_tvars( - t.alias, t.line, t.column, self.options, from_error=True, fail=self.fail - ).args is_error = self.validate_args(t.alias.name, t.args, t.alias.alias_tvars, t) if not is_error: # If there was already an error for the alias itself, there is no point in checking @@ -144,34 +114,21 @@ def visit_callable_type(self, t: CallableType) -> None: t.arg_types[star_index] = p_type.args[0] def visit_instance(self, t: Instance) -> None: + super().visit_instance(t) # Type argument counts were checked in the main semantic analyzer pass. We assume # that the counts are correct here. info = t.type if isinstance(info, FakeInfo): return # https://github.com/python/mypy/issues/11079 - t.args = tuple(flatten_nested_tuples(t.args)) - if t.type.has_type_var_tuple_type: - # Regular Instances are already validated in typeanal.py. - # TODO: do something with partial overlap (probably just reject). - # also in other places where split_with_prefix_and_suffix() is used. - correct = len(t.args) >= len(t.type.type_vars) - 1 - if any( - isinstance(a, UnpackType) and isinstance(get_proper_type(a.type), Instance) - for a in t.args - ): - correct = True - if not correct: - exp_len = f"at least {len(t.type.type_vars) - 1}" - self.fail( - f"Bad number of arguments, expected: {exp_len}, given: {len(t.args)}", - t, - code=codes.TYPE_ARG, - ) - any_type = AnyType(TypeOfAny.from_error) - t.args = (any_type,) * len(t.type.type_vars) - fix_type_var_tuple_argument(any_type, t) self.validate_args(info.name, t.args, info.defn.type_vars, t) - super().visit_instance(t) + if t.type.fullname == "builtins.tuple" and len(t.args) == 1: + # Normalize Tuple[*Tuple[X, ...], ...] -> Tuple[X, ...] + arg = t.args[0] + if isinstance(arg, UnpackType): + unpacked = get_proper_type(arg.type) + if isinstance(unpacked, Instance): + assert unpacked.type.fullname == "builtins.tuple" + t.args = unpacked.args def validate_args( self, name: str, args: Sequence[Type], type_vars: list[TypeVarLikeType], ctx: Context diff --git a/mypy/test/testtypes.py b/mypy/test/testtypes.py index 12e7b207b00a..59457dfa5d3b 100644 --- a/mypy/test/testtypes.py +++ b/mypy/test/testtypes.py @@ -1464,7 +1464,7 @@ def make_call(*items: tuple[str, str | None]) -> CallExpr: class TestExpandTypeLimitGetProperType(TestCase): # WARNING: do not increase this number unless absolutely necessary, # and you understand what you are doing. - ALLOWED_GET_PROPER_TYPES = 7 + ALLOWED_GET_PROPER_TYPES = 8 @skipUnless(mypy.expandtype.__file__.endswith(".py"), "Skip for compiled mypy") def test_count_get_proper_type(self) -> None: diff --git a/mypy/typeanal.py b/mypy/typeanal.py index ed1a8073887b..e297f2bf1631 100644 --- a/mypy/typeanal.py +++ b/mypy/typeanal.py @@ -82,6 +82,7 @@ UnionType, UnpackType, callable_with_ellipsis, + find_unpack_in_list, flatten_nested_tuples, flatten_nested_unions, get_proper_type, @@ -404,7 +405,7 @@ def visit_unbound_type_nonoptional(self, t: UnboundType, defining_literal: bool) t.args, allow_param_spec=True, allow_param_spec_literals=node.has_param_spec_type, - allow_unpack=node.tvar_tuple_index is not None, + allow_unpack=True, # Fixed length unpacks can be used for non-variadic aliases. ) if node.has_param_spec_type and len(node.alias_tvars) == 1: an_args = self.pack_paramspec_args(an_args) @@ -425,9 +426,8 @@ def visit_unbound_type_nonoptional(self, t: UnboundType, defining_literal: bool) # when it is top-level instance, so no need to recurse. if ( isinstance(res, Instance) # type: ignore[misc] - and len(res.args) != len(res.type.type_vars) and not self.defining_alias - and not res.type.has_type_var_tuple_type + and not validate_instance(res, self.fail) ): fix_instance( res, @@ -510,9 +510,6 @@ def apply_concatenate_operator(self, t: UnboundType) -> Type: code=codes.VALID_TYPE, ) return AnyType(TypeOfAny.from_error) - - # TODO: this may not work well with aliases, if those worked. - # Those should be special-cased. elif isinstance(ps, ParamSpecType) and ps.prefix.arg_types: self.api.fail("Nested Concatenates are invalid", t, code=codes.VALID_TYPE) @@ -728,7 +725,7 @@ def analyze_type_with_type_info( args, allow_param_spec=True, allow_param_spec_literals=info.has_param_spec_type, - allow_unpack=info.has_type_var_tuple_type, + allow_unpack=True, # Fixed length tuples can be used for non-variadic types. ), ctx.line, ctx.column, @@ -736,19 +733,9 @@ def analyze_type_with_type_info( if len(info.type_vars) == 1 and info.has_param_spec_type: instance.args = tuple(self.pack_paramspec_args(instance.args)) - if info.has_type_var_tuple_type: - if instance.args: - # -1 to account for empty tuple - valid_arg_length = len(instance.args) >= len(info.type_vars) - 1 - # Empty case is special cased and we want to infer a Tuple[Any, ...] - # instead of the empty tuple, so no - 1 here. - else: - valid_arg_length = False - else: - valid_arg_length = len(instance.args) == len(info.type_vars) - # Check type argument count. - if not valid_arg_length and not self.defining_alias: + instance.args = tuple(flatten_nested_tuples(instance.args)) + if not self.defining_alias and not validate_instance(instance, self.fail): fix_instance( instance, self.fail, @@ -1342,9 +1329,7 @@ def analyze_callable_type(self, t: UnboundType) -> Type: callable_args, ret_type, fallback ) if isinstance(maybe_ret, CallableType): - maybe_ret = maybe_ret.copy_modified( - ret_type=ret_type.accept(self), variables=variables - ) + maybe_ret = maybe_ret.copy_modified(variables=variables) if maybe_ret is None: # Callable[?, RET] (where ? is something invalid) self.fail( @@ -1736,6 +1721,7 @@ def check_unpacks_in_list(self, items: list[Type]) -> list[Type]: num_unpacks = 0 final_unpack = None for item in items: + # TODO: handle forward references here, they appear as Unpack[Any]. if isinstance(item, UnpackType) and not isinstance( get_proper_type(item.type), TupleType ): @@ -1856,25 +1842,13 @@ def fix_instance( any_type = get_omitted_any(disallow_any, fail, note, t, options, fullname, unexpanded_type) t.args = (any_type,) * len(t.type.type_vars) fix_type_var_tuple_argument(any_type, t) - return - - if t.type.has_type_var_tuple_type: - # This can be only correctly analyzed when all arguments are fully - # analyzed, because there may be a variadic item among them, so we - # do this in semanal_typeargs.py. - return - - # Invalid number of type parameters. - fail( - wrong_type_arg_count(len(t.type.type_vars), str(len(t.args)), t.type.name), - t, - code=codes.TYPE_ARG, - ) # Construct the correct number of type arguments, as # otherwise the type checker may crash as it expects # things to be right. - t.args = tuple(AnyType(TypeOfAny.from_error) for _ in t.type.type_vars) + any_type = AnyType(TypeOfAny.from_error) + t.args = tuple(any_type for _ in t.type.type_vars) + fix_type_var_tuple_argument(any_type, t) t.invalid = True @@ -1903,6 +1877,15 @@ def instantiate_type_alias( ctx: context where expansion happens unexpanded_type, disallow_any, use_standard_error: used to customize error messages """ + # Type aliases are special, since they can be expanded during semantic analysis, + # so we need to normalize them as soon as possible. + # TODO: can this cause an infinite recursion? + args = flatten_nested_tuples(args) + if any(unknown_unpack(a) for a in args): + # This type is not ready to be validated, because of unknown total count. + # Note that we keep the kind of Any for consistency. + return set_any_tvars(node, ctx.line, ctx.column, options, special_form=True) + exp_len = len(node.alias_tvars) act_len = len(args) if ( @@ -1937,22 +1920,54 @@ def instantiate_type_alias( tp.line = ctx.line tp.column = ctx.column return tp - if act_len != exp_len and node.tvar_tuple_index is None: + if node.tvar_tuple_index is None: + if any(isinstance(a, UnpackType) for a in args): + # A variadic unpack in fixed size alias (fixed unpacks must be flattened by the caller) + fail(message_registry.INVALID_UNPACK_POSITION, ctx, code=codes.VALID_TYPE) + return set_any_tvars(node, ctx.line, ctx.column, options, from_error=True) + correct = act_len == exp_len + else: + correct = act_len >= exp_len - 1 + for a in args: + if isinstance(a, UnpackType): + unpacked = get_proper_type(a.type) + if isinstance(unpacked, Instance) and unpacked.type.fullname == "builtins.tuple": + # Variadic tuple is always correct. + correct = True + if not correct: if use_standard_error: # This is used if type alias is an internal representation of another type, # for example a generic TypedDict or NamedTuple. msg = wrong_type_arg_count(exp_len, str(act_len), node.name) else: - msg = f"Bad number of arguments for type alias, expected: {exp_len}, given: {act_len}" + if node.tvar_tuple_index is not None: + exp_len_str = f"at least {exp_len - 1}" + else: + exp_len_str = str(exp_len) + msg = ( + "Bad number of arguments for type alias," + f" expected: {exp_len_str}, given: {act_len}" + ) fail(msg, ctx, code=codes.TYPE_ARG) return set_any_tvars(node, ctx.line, ctx.column, options, from_error=True) + elif node.tvar_tuple_index is not None: + # We also need to check if we are not performing a type variable tuple split. + unpack = find_unpack_in_list(args) + if unpack is not None: + unpack_arg = args[unpack] + assert isinstance(unpack_arg, UnpackType) + if isinstance(unpack_arg.type, TypeVarTupleType): + exp_prefix = node.tvar_tuple_index + act_prefix = unpack + exp_suffix = len(node.alias_tvars) - node.tvar_tuple_index - 1 + act_suffix = len(args) - unpack - 1 + if act_prefix < exp_prefix or act_suffix < exp_suffix: + fail("TypeVarTuple cannot be split", ctx, code=codes.TYPE_ARG) + return set_any_tvars(node, ctx.line, ctx.column, options, from_error=True) # TODO: we need to check args validity w.r.t alias.alias_tvars. # Otherwise invalid instantiations will be allowed in runtime context. # Note: in type context, these will be still caught by semanal_typeargs. - # Type aliases are special, since they can be expanded during semantic analysis, - # so we need to normalize them as soon as possible. - # TODO: can this cause an infinite recursion? - typ = TypeAliasType(node, flatten_nested_tuples(args), ctx.line, ctx.column) + typ = TypeAliasType(node, args, ctx.line, ctx.column) assert typ.alias is not None # HACK: Implement FlexibleAlias[T, typ] by expanding it to typ here. if ( @@ -1973,11 +1988,14 @@ def set_any_tvars( *, from_error: bool = False, disallow_any: bool = False, + special_form: bool = False, fail: MsgCallback | None = None, unexpanded_type: Type | None = None, ) -> TypeAliasType: if from_error or disallow_any: type_of_any = TypeOfAny.from_error + elif special_form: + type_of_any = TypeOfAny.special_form else: type_of_any = TypeOfAny.from_omitted_generics if disallow_any and node.alias_tvars: @@ -2227,6 +2245,63 @@ def make_optional_type(t: Type) -> Type: return UnionType([t, NoneType()], t.line, t.column) +def validate_instance(t: Instance, fail: MsgCallback) -> bool: + """Check if this is a well-formed instance with respect to argument count/positions.""" + # TODO: combine logic with instantiate_type_alias(). + if any(unknown_unpack(a) for a in t.args): + # This type is not ready to be validated, because of unknown total count. + # TODO: is it OK to fill with TypeOfAny.from_error instead of special form? + return False + if t.type.has_type_var_tuple_type: + correct = len(t.args) >= len(t.type.type_vars) - 1 + if any( + isinstance(a, UnpackType) and isinstance(get_proper_type(a.type), Instance) + for a in t.args + ): + correct = True + if not correct: + exp_len = f"at least {len(t.type.type_vars) - 1}" + fail( + f"Bad number of arguments, expected: {exp_len}, given: {len(t.args)}", + t, + code=codes.TYPE_ARG, + ) + return False + elif not t.args: + # The Any arguments should be set by the caller. + return False + else: + # We also need to check if we are not performing a type variable tuple split. + unpack = find_unpack_in_list(t.args) + if unpack is not None: + unpack_arg = t.args[unpack] + assert isinstance(unpack_arg, UnpackType) + if isinstance(unpack_arg.type, TypeVarTupleType): + assert t.type.type_var_tuple_prefix is not None + assert t.type.type_var_tuple_suffix is not None + exp_prefix = t.type.type_var_tuple_prefix + act_prefix = unpack + exp_suffix = t.type.type_var_tuple_suffix + act_suffix = len(t.args) - unpack - 1 + if act_prefix < exp_prefix or act_suffix < exp_suffix: + fail("TypeVarTuple cannot be split", t, code=codes.TYPE_ARG) + return False + elif any(isinstance(a, UnpackType) for a in t.args): + # A variadic unpack in fixed size instance (fixed unpacks must be flattened by the caller) + fail(message_registry.INVALID_UNPACK_POSITION, t, code=codes.VALID_TYPE) + return False + elif len(t.args) != len(t.type.type_vars): + # Invalid number of type parameters. + if t.args: + fail( + wrong_type_arg_count(len(t.type.type_vars), str(len(t.args)), t.type.name), + t, + code=codes.TYPE_ARG, + ) + return False + return True + + def fix_instance_types(t: Type, fail: MsgCallback, note: MsgCallback, options: Options) -> None: """Recursively fix all instance types (type argument count) in a given type. @@ -2244,7 +2319,7 @@ def __init__(self, fail: MsgCallback, note: MsgCallback, options: Options) -> No def visit_instance(self, typ: Instance) -> None: super().visit_instance(typ) - if len(typ.args) != len(typ.type.type_vars) and not typ.type.has_type_var_tuple_type: + if not validate_instance(typ, self.fail): fix_instance( typ, self.fail, @@ -2269,3 +2344,17 @@ def visit_unbound_type(self, t: UnboundType) -> bool: if sym and sym.fullname in SELF_TYPE_NAMES: return True return super().visit_unbound_type(t) + + +def unknown_unpack(t: Type) -> bool: + """Check if a given type is an unpack of an unknown type. + + Unfortunately, there is no robust way to distinguish forward references from + genuine undefined names here. But this worked well so far, although it looks + quite fragile. + """ + if isinstance(t, UnpackType): + unpacked = get_proper_type(t.type) + if isinstance(unpacked, AnyType) and unpacked.type_of_any == TypeOfAny.special_form: + return True + return False diff --git a/mypy/typeops.py b/mypy/typeops.py index f9c1914cc9a8..3efa3cc3e965 100644 --- a/mypy/typeops.py +++ b/mypy/typeops.py @@ -110,10 +110,8 @@ def tuple_fallback(typ: TupleType) -> Instance: and unpacked_type.type.fullname == "builtins.tuple" ): items.append(unpacked_type.args[0]) - elif isinstance(unpacked_type, (AnyType, UninhabitedType)): - continue else: - raise NotImplementedError(unpacked_type) + raise NotImplementedError else: items.append(item) # TODO: we should really use a union here, tuple types are special. diff --git a/test-data/unit/check-typevar-tuple.test b/test-data/unit/check-typevar-tuple.test index f7faab4818c9..2b47ff30cdfb 100644 --- a/test-data/unit/check-typevar-tuple.test +++ b/test-data/unit/check-typevar-tuple.test @@ -619,8 +619,7 @@ T = TypeVar("T") Ts = TypeVarTuple("Ts") A = List[Tuple[T, Unpack[Ts], T]] -B = A[Unpack[Ts]] -x: B[int, str, str] +x: A[int, str, str] reveal_type(x) # N: Revealed type is "builtins.list[Tuple[builtins.int, builtins.str, builtins.str, builtins.int]]" [builtins fixtures/tuple.pyi] @@ -1052,8 +1051,7 @@ reveal_type(y.fn) # N: Revealed type is "def (builtins.int, builtins.str)" z: A[Unpack[Tuple[int, ...]]] reveal_type(z) # N: Revealed type is "__main__.A[Unpack[builtins.tuple[builtins.int, ...]]]" -# TODO: this requires fixing map_instance_to_supertype(). -# reveal_type(z[0]) +reveal_type(z[0]) # N: Revealed type is "builtins.int" reveal_type(z.fn) # N: Revealed type is "def (*builtins.int)" t: A[int, Unpack[Tuple[int, str]], str] @@ -1118,3 +1116,120 @@ reveal_type(td) # N: Revealed type is "TypedDict('__main__.A', {'fn': def (buil def bad() -> int: ... td2 = A({"fn": bad, "val": 42}) # E: Incompatible types (expression has type "Callable[[], int]", TypedDict item "fn" has type "Callable[[], None]") [builtins fixtures/tuple.pyi] + +[case testFixedUnpackWithRegularInstance] +from typing import Tuple, Generic, TypeVar +from typing_extensions import Unpack + +T1 = TypeVar("T1") +T2 = TypeVar("T2") +T3 = TypeVar("T3") +T4 = TypeVar("T4") + +class C(Generic[T1, T2, T3, T4]): ... +x: C[int, Unpack[Alias], str] +Alias = Tuple[int, str] +reveal_type(x) # N: Revealed type is "__main__.C[builtins.int, builtins.int, builtins.str, builtins.str]" +[builtins fixtures/tuple.pyi] + +[case testVariadicUnpackWithRegularInstance] +from typing import Tuple, Generic, TypeVar +from typing_extensions import Unpack + +T1 = TypeVar("T1") +T2 = TypeVar("T2") +T3 = TypeVar("T3") +T4 = TypeVar("T4") + +class C(Generic[T1, T2, T3, T4]): ... +x: C[int, Unpack[Alias], str, str] # E: Unpack is only valid in a variadic position +Alias = Tuple[int, ...] +reveal_type(x) # N: Revealed type is "__main__.C[Any, Any, Any, Any]" +y: C[int, Unpack[Undefined]] # E: Name "Undefined" is not defined +reveal_type(y) # N: Revealed type is "__main__.C[Any, Any, Any, Any]" +[builtins fixtures/tuple.pyi] + +[case testVariadicAliasInvalidUnpackNoCrash] +from typing import Tuple, Generic, Union, List +from typing_extensions import Unpack, TypeVarTuple + +Ts = TypeVarTuple("Ts") +Alias = Tuple[int, Unpack[Ts], str] + +A = Union[int, str] +x: List[Alias[int, Unpack[A], str]] # E: "Union[int, str]" cannot be unpacked (must be tuple or TypeVarTuple) +reveal_type(x) # N: Revealed type is "builtins.list[Tuple[builtins.int, builtins.int, Unpack[builtins.tuple[Any, ...]], builtins.str, builtins.str]]" +y: List[Alias[int, Unpack[Undefined], str]] # E: Name "Undefined" is not defined +reveal_type(y) # N: Revealed type is "builtins.list[Tuple[builtins.int, Unpack[builtins.tuple[Any, ...]], builtins.str]]" +[builtins fixtures/tuple.pyi] + +[case testVariadicAliasForwardRefToFixedUnpack] +from typing import Tuple, Generic, TypeVar +from typing_extensions import Unpack, TypeVarTuple + +T = TypeVar("T") +S = TypeVar("S") +Ts = TypeVarTuple("Ts") +Alias = Tuple[T, Unpack[Ts], S] +x: Alias[int, Unpack[Other]] +Other = Tuple[int, str] +reveal_type(x) # N: Revealed type is "Tuple[builtins.int, builtins.int, builtins.str]" +[builtins fixtures/tuple.pyi] + +[case testVariadicAliasForwardRefToVariadicUnpack] +from typing import Tuple, Generic, TypeVar +from typing_extensions import Unpack, TypeVarTuple + +T = TypeVar("T") +S = TypeVar("S") +Ts = TypeVarTuple("Ts") +Alias = Tuple[T, Unpack[Ts], S] +x: Alias[int, Unpack[Other]] +Other = Tuple[int, ...] +reveal_type(x) # N: Revealed type is "Tuple[builtins.int, Unpack[builtins.tuple[builtins.int, ...]], builtins.int]" +[builtins fixtures/tuple.pyi] + +[case testVariadicInstanceStrictPrefixSuffixCheck] +from typing import Tuple, Generic, TypeVar +from typing_extensions import Unpack, TypeVarTuple + +T = TypeVar("T") +S = TypeVar("S") +Ts = TypeVarTuple("Ts") +class C(Generic[T, Unpack[Ts], S]): ... + +def foo(x: Tuple[Unpack[Ts]]) -> Tuple[Unpack[Ts]]: + y: C[int, Unpack[Ts]] # E: TypeVarTuple cannot be split + z: C[Unpack[Ts], int] # E: TypeVarTuple cannot be split + return x +[builtins fixtures/tuple.pyi] + +[case testVariadicAliasStrictPrefixSuffixCheck] +from typing import Tuple, TypeVar +from typing_extensions import Unpack, TypeVarTuple + +T = TypeVar("T") +S = TypeVar("S") +Ts = TypeVarTuple("Ts") +Alias = Tuple[T, Unpack[Ts], S] + +def foo(x: Tuple[Unpack[Ts]]) -> Tuple[Unpack[Ts]]: + y: Alias[int, Unpack[Ts]] # E: TypeVarTuple cannot be split + z: Alias[Unpack[Ts], int] # E: TypeVarTuple cannot be split + return x +[builtins fixtures/tuple.pyi] + +[case testTypeVarTupleWithIsInstance] +# flags: --warn-unreachable +from typing import Tuple +from typing_extensions import TypeVarTuple, Unpack + +TP = TypeVarTuple("TP") +class A(Tuple[Unpack[TP]]): ... + +def test(d: A[int, str]) -> None: + if isinstance(d, A): + reveal_type(d) # N: Revealed type is "Tuple[builtins.int, builtins.str, fallback=__main__.A[builtins.int, builtins.str]]" + else: + reveal_type(d) # E: Statement is unreachable +[builtins fixtures/isinstancelist.pyi] From 49419835045b09c98b545171abb10384b6ecf6a9 Mon Sep 17 00:00:00 2001 From: Matt Bogosian Date: Fri, 8 Sep 2023 01:46:14 -0500 Subject: [PATCH 037/144] Differentiate between venv and tox setups in CONTRIBUTING.md (#16067) --- CONTRIBUTING.md | 42 ++++++++++++++++++++++++++++++------------ tox.ini | 1 + 2 files changed, 31 insertions(+), 12 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 82e55f437e87..46292c301406 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -62,18 +62,6 @@ like this: python3 runtests.py ``` -You can also use `tox` to run tests (`tox` handles setting up the test environment for you): - -```bash -tox run -e py - -# Or some specific python version: -tox run -e py39 - -# Or some specific command: -tox run -e lint -``` - Some useful commands for running specific tests include: ```bash @@ -95,6 +83,36 @@ python runtests.py lint For an in-depth guide on running and writing tests, see [the README in the test-data directory](test-data/unit/README.md). +#### Using `tox` + +You can also use [`tox`](https://tox.wiki/en/latest/) to run tests and other commands. +`tox` handles setting up test environments for you. + +```bash +# Run tests +tox run -e py + +# Run tests using some specific Python version +tox run -e py311 + +# Run a specific command +tox run -e lint + +# Run a single test from the test suite +tox run -e py -- -n0 -k 'test_name' + +# Run all test cases in the "test-data/unit/check-dataclasses.test" file using +# Python 3.11 specifically +tox run -e py311 -- mypy/test/testcheck.py::TypeCheckSuite::check-dataclasses.test + +# Set up a development environment with all the project libraries and run a command +tox -e dev -- mypy --verbose test_case.py +tox -e dev --override testenv:dev.allowlist_externals+=env -- env # inspect the environment +``` + +If you don't already have `tox` installed, you can use a virtual environment as +described above to install `tox` via `pip` (e.g., ``python3 -m pip install tox``). + ## First time contributors If you're looking for things to help with, browse our [issue tracker](https://github.com/python/mypy/issues)! diff --git a/tox.ini b/tox.ini index e07acdc5200d..31aed1a1ef48 100644 --- a/tox.ini +++ b/tox.ini @@ -30,6 +30,7 @@ deps = commands = python -m pip list --format=columns python -c 'import sys; print(sys.executable)' + {posargs} [testenv:docs] description = invoke sphinx-build to build the HTML docs From f9dc5610423d368bcf804b6a88a2d8502e62df1c Mon Sep 17 00:00:00 2001 From: Ilya Priven Date: Sun, 10 Sep 2023 01:55:57 -0400 Subject: [PATCH 038/144] Fix __post_init__() internal error (#16080) Fixes #16057. --- mypy/checker.py | 5 ++++- mypy/nodes.py | 1 - mypy/plugins/dataclasses.py | 4 ++-- test-data/unit/check-dataclasses.test | 4 ++++ 4 files changed, 10 insertions(+), 4 deletions(-) diff --git a/mypy/checker.py b/mypy/checker.py index fa7c645873d0..5a74f019dcf4 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -1076,6 +1076,8 @@ def check_func_item( if name == "__exit__": self.check__exit__return_type(defn) + # TODO: the following logic should move to the dataclasses plugin + # https://github.com/python/mypy/issues/15515 if name == "__post_init__": if dataclasses_plugin.is_processed_dataclass(defn.info): dataclasses_plugin.check_post_init(self, defn, defn.info) @@ -2882,7 +2884,8 @@ def check_assignment( typ = self.expr_checker.accept(rvalue) self.check_match_args(inferred, typ, lvalue) if name == "__post_init__": - if dataclasses_plugin.is_processed_dataclass(self.scope.active_class()): + active_class = self.scope.active_class() + if active_class and dataclasses_plugin.is_processed_dataclass(active_class): self.fail(message_registry.DATACLASS_POST_INIT_MUST_BE_A_FUNCTION, rvalue) # Defer PartialType's super type checking. diff --git a/mypy/nodes.py b/mypy/nodes.py index d29e99ccace7..6556cd910b46 100644 --- a/mypy/nodes.py +++ b/mypy/nodes.py @@ -515,7 +515,6 @@ def __init__(self) -> None: # Original, not semantically analyzed type (used for reprocessing) self.unanalyzed_type: mypy.types.ProperType | None = None # If method, reference to TypeInfo - # TODO: Type should be Optional[TypeInfo] self.info = FUNC_NO_INFO self.is_property = False self.is_class = False diff --git a/mypy/plugins/dataclasses.py b/mypy/plugins/dataclasses.py index 8b34c28b6832..99f079705c3f 100644 --- a/mypy/plugins/dataclasses.py +++ b/mypy/plugins/dataclasses.py @@ -1070,8 +1070,8 @@ def replace_function_sig_callback(ctx: FunctionSigContext) -> CallableType: ) -def is_processed_dataclass(info: TypeInfo | None) -> bool: - return info is not None and "dataclass" in info.metadata +def is_processed_dataclass(info: TypeInfo) -> bool: + return bool(info) and "dataclass" in info.metadata def check_post_init(api: TypeChecker, defn: FuncItem, info: TypeInfo) -> None: diff --git a/test-data/unit/check-dataclasses.test b/test-data/unit/check-dataclasses.test index 8a50e7124d05..35df84658259 100644 --- a/test-data/unit/check-dataclasses.test +++ b/test-data/unit/check-dataclasses.test @@ -2280,6 +2280,10 @@ reveal_type(a2) # N: Revealed type is "__main__.A[builtins.int]" [builtins fixtures/tuple.pyi] +[case testPostInitNotMethod] +def __post_init__() -> None: + pass + [case testPostInitCorrectSignature] from typing import Any, Generic, TypeVar, Callable, Self from dataclasses import dataclass, InitVar From ed18fea5b17ef3a969b37b4906dd7c237ddb1825 Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Sat, 9 Sep 2023 23:35:07 -0700 Subject: [PATCH 039/144] Document and rename overload-overlap error code (#16074) A new error code was introduced in https://github.com/python/mypy/pull/16061 As per https://github.com/python/mypy/pull/16068, we didn't previously run doc builds on changes to errorcodes.py, causing tests to fail on master when this was merged. Renaming the code as per: https://github.com/python/mypy/pull/16061#issuecomment-1710613890 All type ignores should be unsafe, so we should save the unsafe adjective for things that are really unsafe. As it stands, there are many cases where overloads overlap somewhat benignly. Fixes #8656 --- docs/source/error_code_list.rst | 35 ++++++++++++++++++++++++++++ docs/source/more_types.rst | 5 +++- mypy/errorcodes.py | 4 ++-- mypy/messages.py | 2 +- mypy/types.py | 2 +- test-data/unit/check-errorcodes.test | 2 +- 6 files changed, 44 insertions(+), 6 deletions(-) diff --git a/docs/source/error_code_list.rst b/docs/source/error_code_list.rst index a865a4dd1532..4decd37e6e8a 100644 --- a/docs/source/error_code_list.rst +++ b/docs/source/error_code_list.rst @@ -1114,6 +1114,41 @@ Warn about cases where a bytes object may be converted to a string in an unexpec print(f"The alphabet starts with {b!r}") # The alphabet starts with b'abc' print(f"The alphabet starts with {b.decode('utf-8')}") # The alphabet starts with abc +.. _code-overload-overlap: + +Check that overloaded functions don't overlap [overload-overlap] +---------------------------------------------------------------- + +Warn if multiple ``@overload`` variants overlap in potentially unsafe ways. +This guards against the following situation: + +.. code-block:: python + + from typing import overload + + class A: ... + class B(A): ... + + @overload + def foo(x: B) -> int: ... # Error: Overloaded function signatures 1 and 2 overlap with incompatible return types [overload-overlap] + @overload + def foo(x: A) -> str: ... + def foo(x): ... + + def takes_a(a: A) -> str: + return foo(a) + + a: A = B() + value = takes_a(a) + # mypy will think that value is a str, but it could actually be an int + reveal_type(value) # Revealed type is "builtins.str" + + +Note that in cases where you ignore this error, mypy will usually still infer the +types you expect. + +See :ref:`overloading ` for more explanation. + .. _code-annotation-unchecked: Notify about an annotation in an unchecked function [annotation-unchecked] diff --git a/docs/source/more_types.rst b/docs/source/more_types.rst index 4e6e9204fdca..b27764a9e87c 100644 --- a/docs/source/more_types.rst +++ b/docs/source/more_types.rst @@ -501,7 +501,7 @@ To prevent these kinds of issues, mypy will detect and prohibit inherently unsaf overlapping overloads on a best-effort basis. Two variants are considered unsafely overlapping when both of the following are true: -1. All of the arguments of the first variant are compatible with the second. +1. All of the arguments of the first variant are potentially compatible with the second. 2. The return type of the first variant is *not* compatible with (e.g. is not a subtype of) the second. @@ -510,6 +510,9 @@ the ``object`` argument in the second, yet the ``int`` return type is not a subt ``str``. Both conditions are true, so mypy will correctly flag ``unsafe_func`` as being unsafe. +Note that in cases where you ignore the overlapping overload error, mypy will usually +still infer the types you expect at callsites. + However, mypy will not detect *all* unsafe uses of overloads. For example, suppose we modify the above snippet so it calls ``summarize`` instead of ``unsafe_func``: diff --git a/mypy/errorcodes.py b/mypy/errorcodes.py index 70b8cffe9053..cd9978c2f31c 100644 --- a/mypy/errorcodes.py +++ b/mypy/errorcodes.py @@ -262,8 +262,8 @@ def __hash__(self) -> int: # This is a catch-all for remaining uncategorized errors. MISC: Final = ErrorCode("misc", "Miscellaneous other checks", "General") -UNSAFE_OVERLOAD: Final[ErrorCode] = ErrorCode( - "unsafe-overload", +OVERLOAD_OVERLAP: Final[ErrorCode] = ErrorCode( + "overload-overlap", "Warn if multiple @overload variants overlap in unsafe ways", "General", sub_code_of=MISC, diff --git a/mypy/messages.py b/mypy/messages.py index a58c5f91c4b1..b6fdaf06a8e0 100644 --- a/mypy/messages.py +++ b/mypy/messages.py @@ -1604,7 +1604,7 @@ def overloaded_signatures_overlap(self, index1: int, index2: int, context: Conte "Overloaded function signatures {} and {} overlap with " "incompatible return types".format(index1, index2), context, - code=codes.UNSAFE_OVERLOAD, + code=codes.OVERLOAD_OVERLAP, ) def overloaded_signature_will_never_match( diff --git a/mypy/types.py b/mypy/types.py index cee4595b67cc..04d90c9dc124 100644 --- a/mypy/types.py +++ b/mypy/types.py @@ -3019,7 +3019,7 @@ def get_proper_type(typ: Type | None) -> ProperType | None: @overload -def get_proper_types(types: list[Type] | tuple[Type, ...]) -> list[ProperType]: # type: ignore[unsafe-overload] +def get_proper_types(types: list[Type] | tuple[Type, ...]) -> list[ProperType]: # type: ignore[overload-overlap] ... diff --git a/test-data/unit/check-errorcodes.test b/test-data/unit/check-errorcodes.test index 72edf2f22c05..ac7c8b4c9f9d 100644 --- a/test-data/unit/check-errorcodes.test +++ b/test-data/unit/check-errorcodes.test @@ -1077,7 +1077,7 @@ x = 1 # type: ignore # E: Unused "type: ignore" comment [unused-ignore] from typing import overload, Union @overload -def unsafe_func(x: int) -> int: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types [unsafe-overload] +def unsafe_func(x: int) -> int: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types [overload-overlap] @overload def unsafe_func(x: object) -> str: ... def unsafe_func(x: object) -> Union[int, str]: From 9a35360739ced871feb6331a14a7bbacce00c7dc Mon Sep 17 00:00:00 2001 From: Nikita Sobolev Date: Sun, 10 Sep 2023 21:11:49 +0300 Subject: [PATCH 040/144] Add `add_overloaded_method_to_class` helper to `plugins/common.py` (#16038) There are several changes: 1. `add_overloaded_method_to_class` itself. It is very useful for plugin authors, because right now it is quite easy to add a regular method, but it is very hard to add a method with `@overload`s. I don't think that user must face all the chalenges that I've covered in this method. Moreover, it is quite easy even for experienced developers to forget some flags / props / etc (I am pretty sure that I might forgot something in the implementation) 2. `add_overloaded_method_to_class` and `add_method_to_class` now return added nodes, it is also helpful if you want to do something with this node in your plugin after it is created 3. I've refactored how `add_method_to_class` works and reused its parts in the new method as well 4. `tvar_def` in `add_method_to_class` can now accept a list of type vars, not just one Notice that `add_method_to_class` is unchanged from the user's POV, it should continue to work as before. Tests are also updated to check that our overloads are correct. Things to do later (in the next PRs / releases): 1. We can possibly add `is_final` param to methods as well 2. We can also support `@property` in a separate method at some point --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- mypy/plugins/common.py | 136 +++++++++++++++--- test-data/unit/check-custom-plugin.test | 24 +++- test-data/unit/check-incremental.test | 38 +++++ test-data/unit/deps.test | 6 +- .../unit/plugins/add_overloaded_method.py | 41 ++++++ 5 files changed, 222 insertions(+), 23 deletions(-) create mode 100644 test-data/unit/plugins/add_overloaded_method.py diff --git a/mypy/plugins/common.py b/mypy/plugins/common.py index 55f2870cadb4..84d50b7086c6 100644 --- a/mypy/plugins/common.py +++ b/mypy/plugins/common.py @@ -1,5 +1,7 @@ from __future__ import annotations +from typing import NamedTuple + from mypy.argmap import map_actuals_to_formals from mypy.fixup import TypeFixer from mypy.nodes import ( @@ -16,9 +18,11 @@ JsonDict, NameExpr, Node, + OverloadedFuncDef, PassStmt, RefExpr, SymbolTableNode, + TypeInfo, Var, ) from mypy.plugin import CheckerPluginInterface, ClassDefContext, SemanticAnalyzerPluginInterface @@ -209,24 +213,99 @@ def add_method( ) +class MethodSpec(NamedTuple): + """Represents a method signature to be added, except for `name`.""" + + args: list[Argument] + return_type: Type + self_type: Type | None = None + tvar_defs: list[TypeVarType] | None = None + + def add_method_to_class( api: SemanticAnalyzerPluginInterface | CheckerPluginInterface, cls: ClassDef, name: str, + # MethodSpec items kept for backward compatibility: args: list[Argument], return_type: Type, self_type: Type | None = None, - tvar_def: TypeVarType | None = None, + tvar_def: list[TypeVarType] | TypeVarType | None = None, is_classmethod: bool = False, is_staticmethod: bool = False, -) -> None: +) -> FuncDef | Decorator: """Adds a new method to a class definition.""" + _prepare_class_namespace(cls, name) - assert not ( - is_classmethod is True and is_staticmethod is True - ), "Can't add a new method that's both staticmethod and classmethod." + if tvar_def is not None and not isinstance(tvar_def, list): + tvar_def = [tvar_def] + + func, sym = _add_method_by_spec( + api, + cls.info, + name, + MethodSpec(args=args, return_type=return_type, self_type=self_type, tvar_defs=tvar_def), + is_classmethod=is_classmethod, + is_staticmethod=is_staticmethod, + ) + cls.info.names[name] = sym + cls.info.defn.defs.body.append(func) + return func + +def add_overloaded_method_to_class( + api: SemanticAnalyzerPluginInterface | CheckerPluginInterface, + cls: ClassDef, + name: str, + items: list[MethodSpec], + is_classmethod: bool = False, + is_staticmethod: bool = False, +) -> OverloadedFuncDef: + """Adds a new overloaded method to a class definition.""" + assert len(items) >= 2, "Overloads must contain at least two cases" + + # Save old definition, if it exists. + _prepare_class_namespace(cls, name) + + # Create function bodies for each passed method spec. + funcs: list[Decorator | FuncDef] = [] + for item in items: + func, _sym = _add_method_by_spec( + api, + cls.info, + name=name, + spec=item, + is_classmethod=is_classmethod, + is_staticmethod=is_staticmethod, + ) + if isinstance(func, FuncDef): + var = Var(func.name, func.type) + var.set_line(func.line) + func.is_decorated = True + func.deco_line = func.line + + deco = Decorator(func, [], var) + else: + deco = func + deco.is_overload = True + funcs.append(deco) + + # Create the final OverloadedFuncDef node: + overload_def = OverloadedFuncDef(funcs) + overload_def.info = cls.info + overload_def.is_class = is_classmethod + overload_def.is_static = is_staticmethod + sym = SymbolTableNode(MDEF, overload_def) + sym.plugin_generated = True + + cls.info.names[name] = sym + cls.info.defn.defs.body.append(overload_def) + return overload_def + + +def _prepare_class_namespace(cls: ClassDef, name: str) -> None: info = cls.info + assert info # First remove any previously generated methods with the same name # to avoid clashes and problems in the semantic analyzer. @@ -235,6 +314,29 @@ def add_method_to_class( if sym.plugin_generated and isinstance(sym.node, FuncDef): cls.defs.body.remove(sym.node) + # NOTE: we would like the plugin generated node to dominate, but we still + # need to keep any existing definitions so they get semantically analyzed. + if name in info.names: + # Get a nice unique name instead. + r_name = get_unique_redefinition_name(name, info.names) + info.names[r_name] = info.names[name] + + +def _add_method_by_spec( + api: SemanticAnalyzerPluginInterface | CheckerPluginInterface, + info: TypeInfo, + name: str, + spec: MethodSpec, + *, + is_classmethod: bool, + is_staticmethod: bool, +) -> tuple[FuncDef | Decorator, SymbolTableNode]: + args, return_type, self_type, tvar_defs = spec + + assert not ( + is_classmethod is True and is_staticmethod is True + ), "Can't add a new method that's both staticmethod and classmethod." + if isinstance(api, SemanticAnalyzerPluginInterface): function_type = api.named_type("builtins.function") else: @@ -258,8 +360,8 @@ def add_method_to_class( arg_kinds.append(arg.kind) signature = CallableType(arg_types, arg_kinds, arg_names, return_type, function_type) - if tvar_def: - signature.variables = [tvar_def] + if tvar_defs: + signature.variables = tvar_defs func = FuncDef(name, args, Block([PassStmt()])) func.info = info @@ -269,13 +371,6 @@ def add_method_to_class( func._fullname = info.fullname + "." + name func.line = info.line - # NOTE: we would like the plugin generated node to dominate, but we still - # need to keep any existing definitions so they get semantically analyzed. - if name in info.names: - # Get a nice unique name instead. - r_name = get_unique_redefinition_name(name, info.names) - info.names[r_name] = info.names[name] - # Add decorator for is_staticmethod. It's unnecessary for is_classmethod. if is_staticmethod: func.is_decorated = True @@ -286,12 +381,12 @@ def add_method_to_class( dec = Decorator(func, [], v) dec.line = info.line sym = SymbolTableNode(MDEF, dec) - else: - sym = SymbolTableNode(MDEF, func) - sym.plugin_generated = True - info.names[name] = sym + sym.plugin_generated = True + return dec, sym - info.defn.defs.body.append(func) + sym = SymbolTableNode(MDEF, func) + sym.plugin_generated = True + return func, sym def add_attribute_to_class( @@ -304,7 +399,7 @@ def add_attribute_to_class( override_allow_incompatible: bool = False, fullname: str | None = None, is_classvar: bool = False, -) -> None: +) -> Var: """ Adds a new attribute to a class definition. This currently only generates the symbol table entry and no corresponding AssignmentStatement @@ -335,6 +430,7 @@ def add_attribute_to_class( info.names[name] = SymbolTableNode( MDEF, node, plugin_generated=True, no_serialize=no_serialize ) + return node def deserialize_and_fixup_type(data: str | JsonDict, api: SemanticAnalyzerPluginInterface) -> Type: diff --git a/test-data/unit/check-custom-plugin.test b/test-data/unit/check-custom-plugin.test index 9a0668f98c21..22374d09cf9f 100644 --- a/test-data/unit/check-custom-plugin.test +++ b/test-data/unit/check-custom-plugin.test @@ -1011,13 +1011,35 @@ class BaseAddMethod: pass class MyClass(BaseAddMethod): pass -my_class = MyClass() reveal_type(MyClass.foo_classmethod) # N: Revealed type is "def ()" reveal_type(MyClass.foo_staticmethod) # N: Revealed type is "def (builtins.int) -> builtins.str" + +my_class = MyClass() +reveal_type(my_class.foo_classmethod) # N: Revealed type is "def ()" +reveal_type(my_class.foo_staticmethod) # N: Revealed type is "def (builtins.int) -> builtins.str" [file mypy.ini] \[mypy] plugins=/test-data/unit/plugins/add_classmethod.py +[case testAddOverloadedMethodPlugin] +# flags: --config-file tmp/mypy.ini +class AddOverloadedMethod: pass + +class MyClass(AddOverloadedMethod): + pass + +reveal_type(MyClass.method) # N: Revealed type is "Overload(def (self: __main__.MyClass, arg: builtins.int) -> builtins.str, def (self: __main__.MyClass, arg: builtins.str) -> builtins.int)" +reveal_type(MyClass.clsmethod) # N: Revealed type is "Overload(def (arg: builtins.int) -> builtins.str, def (arg: builtins.str) -> builtins.int)" +reveal_type(MyClass.stmethod) # N: Revealed type is "Overload(def (arg: builtins.int) -> builtins.str, def (arg: builtins.str) -> builtins.int)" + +my_class = MyClass() +reveal_type(my_class.method) # N: Revealed type is "Overload(def (arg: builtins.int) -> builtins.str, def (arg: builtins.str) -> builtins.int)" +reveal_type(my_class.clsmethod) # N: Revealed type is "Overload(def (arg: builtins.int) -> builtins.str, def (arg: builtins.str) -> builtins.int)" +reveal_type(my_class.stmethod) # N: Revealed type is "Overload(def (arg: builtins.int) -> builtins.str, def (arg: builtins.str) -> builtins.int)" +[file mypy.ini] +\[mypy] +plugins=/test-data/unit/plugins/add_overloaded_method.py + [case testCustomErrorCodePlugin] # flags: --config-file tmp/mypy.ini --show-error-codes def main() -> int: diff --git a/test-data/unit/check-incremental.test b/test-data/unit/check-incremental.test index fcab0545b982..b4cd21aa552c 100644 --- a/test-data/unit/check-incremental.test +++ b/test-data/unit/check-incremental.test @@ -5935,6 +5935,44 @@ tmp/b.py:4: note: Revealed type is "def ()" tmp/b.py:5: note: Revealed type is "def (builtins.int) -> builtins.str" tmp/b.py:6: note: Revealed type is "def ()" tmp/b.py:7: note: Revealed type is "def (builtins.int) -> builtins.str" + +[case testIncrementalAddOverloadedMethodPlugin] +# flags: --config-file tmp/mypy.ini +import b + +[file mypy.ini] +\[mypy] +plugins=/test-data/unit/plugins/add_overloaded_method.py + +[file a.py] +class AddOverloadedMethod: pass + +class MyClass(AddOverloadedMethod): + pass + +[file b.py] +import a + +[file b.py.2] +import a + +reveal_type(a.MyClass.method) +reveal_type(a.MyClass.clsmethod) +reveal_type(a.MyClass.stmethod) + +my_class = a.MyClass() +reveal_type(my_class.method) +reveal_type(my_class.clsmethod) +reveal_type(my_class.stmethod) +[rechecked b] +[out2] +tmp/b.py:3: note: Revealed type is "Overload(def (self: a.MyClass, arg: builtins.int) -> builtins.str, def (self: a.MyClass, arg: builtins.str) -> builtins.int)" +tmp/b.py:4: note: Revealed type is "Overload(def (arg: builtins.int) -> builtins.str, def (arg: builtins.str) -> builtins.int)" +tmp/b.py:5: note: Revealed type is "Overload(def (arg: builtins.int) -> builtins.str, def (arg: builtins.str) -> builtins.int)" +tmp/b.py:8: note: Revealed type is "Overload(def (arg: builtins.int) -> builtins.str, def (arg: builtins.str) -> builtins.int)" +tmp/b.py:9: note: Revealed type is "Overload(def (arg: builtins.int) -> builtins.str, def (arg: builtins.str) -> builtins.int)" +tmp/b.py:10: note: Revealed type is "Overload(def (arg: builtins.int) -> builtins.str, def (arg: builtins.str) -> builtins.int)" + [case testGenericNamedTupleSerialization] import b [file a.py] diff --git a/test-data/unit/deps.test b/test-data/unit/deps.test index c3295b79e4ed..5e77ff1d85e0 100644 --- a/test-data/unit/deps.test +++ b/test-data/unit/deps.test @@ -1387,12 +1387,13 @@ class B(A): -> , m -> -> , m.B.__init__ - -> , m.B.__mypy-replace + -> , m, m.B.__mypy-replace -> -> -> -> m, m.A, m.B -> m + -> m -> m -> m.B -> m @@ -1419,12 +1420,13 @@ class B(A): -> -> , m.B.__init__ -> - -> , m.B.__mypy-replace + -> , m, m.B.__mypy-replace -> -> -> -> m, m.A, m.B -> m + -> m -> m -> m.B -> m diff --git a/test-data/unit/plugins/add_overloaded_method.py b/test-data/unit/plugins/add_overloaded_method.py new file mode 100644 index 000000000000..efda848f790c --- /dev/null +++ b/test-data/unit/plugins/add_overloaded_method.py @@ -0,0 +1,41 @@ +from __future__ import annotations + +from typing import Callable + +from mypy.nodes import ARG_POS, Argument, Var +from mypy.plugin import ClassDefContext, Plugin +from mypy.plugins.common import MethodSpec, add_overloaded_method_to_class + + +class OverloadedMethodPlugin(Plugin): + def get_base_class_hook(self, fullname: str) -> Callable[[ClassDefContext], None] | None: + if "AddOverloadedMethod" in fullname: + return add_overloaded_method_hook + return None + + +def add_overloaded_method_hook(ctx: ClassDefContext) -> None: + add_overloaded_method_to_class(ctx.api, ctx.cls, "method", _generate_method_specs(ctx)) + add_overloaded_method_to_class( + ctx.api, ctx.cls, "clsmethod", _generate_method_specs(ctx), is_classmethod=True + ) + add_overloaded_method_to_class( + ctx.api, ctx.cls, "stmethod", _generate_method_specs(ctx), is_staticmethod=True + ) + + +def _generate_method_specs(ctx: ClassDefContext) -> list[MethodSpec]: + return [ + MethodSpec( + args=[Argument(Var("arg"), ctx.api.named_type("builtins.int"), None, ARG_POS)], + return_type=ctx.api.named_type("builtins.str"), + ), + MethodSpec( + args=[Argument(Var("arg"), ctx.api.named_type("builtins.str"), None, ARG_POS)], + return_type=ctx.api.named_type("builtins.int"), + ), + ] + + +def plugin(version: str) -> type[OverloadedMethodPlugin]: + return OverloadedMethodPlugin From 9e520c38777267495642845f070be4383f50342d Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Mon, 11 Sep 2023 20:02:31 +0100 Subject: [PATCH 041/144] Allow TypedDict unpacking in Callable types (#16083) Fixes https://github.com/python/mypy/issues/16082 Currently we only allow `Unpack` of a TypedDict when it appears in a function definition. This PR also allows this in `Callable` types, similarly to how we do this for variadic types. Note this still doesn't allow having both variadic unpack and a TypedDict unpack in the same `Callable`. Supporting this is tricky, so let's not so this until people will actually ask for this. FWIW we can always suggest callback protocols for such tricky cases. --- mypy/exprtotype.py | 4 +++- mypy/fastparse.py | 2 +- mypy/semanal_typeargs.py | 4 +++- mypy/typeanal.py | 13 ++++++++++++- mypy/types.py | 7 +++++-- test-data/unit/check-varargs.test | 15 +++++++++++++++ 6 files changed, 39 insertions(+), 6 deletions(-) diff --git a/mypy/exprtotype.py b/mypy/exprtotype.py index b82d35607ef1..5f0ef79acbd7 100644 --- a/mypy/exprtotype.py +++ b/mypy/exprtotype.py @@ -196,6 +196,8 @@ def expr_to_unanalyzed_type( elif isinstance(expr, EllipsisExpr): return EllipsisType(expr.line) elif allow_unpack and isinstance(expr, StarExpr): - return UnpackType(expr_to_unanalyzed_type(expr.expr, options, allow_new_syntax)) + return UnpackType( + expr_to_unanalyzed_type(expr.expr, options, allow_new_syntax), from_star_syntax=True + ) else: raise TypeTranslationError() diff --git a/mypy/fastparse.py b/mypy/fastparse.py index a96e697d40bf..fe158d468ce8 100644 --- a/mypy/fastparse.py +++ b/mypy/fastparse.py @@ -2041,7 +2041,7 @@ def visit_Attribute(self, n: Attribute) -> Type: # Used for Callable[[X *Ys, Z], R] def visit_Starred(self, n: ast3.Starred) -> Type: - return UnpackType(self.visit(n.value)) + return UnpackType(self.visit(n.value), from_star_syntax=True) # List(expr* elts, expr_context ctx) def visit_List(self, n: ast3.List) -> Type: diff --git a/mypy/semanal_typeargs.py b/mypy/semanal_typeargs.py index 3e11951376c9..ed04b30e90ba 100644 --- a/mypy/semanal_typeargs.py +++ b/mypy/semanal_typeargs.py @@ -214,7 +214,9 @@ def visit_unpack_type(self, typ: UnpackType) -> None: # Avoid extra errors if there were some errors already. Also interpret plain Any # as tuple[Any, ...] (this is better for the code in type checker). self.fail( - message_registry.INVALID_UNPACK.format(format_type(proper_type, self.options)), typ + message_registry.INVALID_UNPACK.format(format_type(proper_type, self.options)), + typ.type, + code=codes.VALID_TYPE, ) typ.type = self.named_type("builtins.tuple", [AnyType(TypeOfAny.from_error)]) diff --git a/mypy/typeanal.py b/mypy/typeanal.py index e297f2bf1631..385c5d35d67f 100644 --- a/mypy/typeanal.py +++ b/mypy/typeanal.py @@ -961,7 +961,7 @@ def visit_unpack_type(self, t: UnpackType) -> Type: if not self.allow_unpack: self.fail(message_registry.INVALID_UNPACK_POSITION, t.type, code=codes.VALID_TYPE) return AnyType(TypeOfAny.from_error) - return UnpackType(self.anal_type(t.type)) + return UnpackType(self.anal_type(t.type), from_star_syntax=t.from_star_syntax) def visit_parameters(self, t: Parameters) -> Type: raise NotImplementedError("ParamSpec literals cannot have unbound TypeVars") @@ -969,6 +969,7 @@ def visit_parameters(self, t: Parameters) -> Type: def visit_callable_type(self, t: CallableType, nested: bool = True) -> Type: # Every Callable can bind its own type variables, if they're not in the outer scope with self.tvar_scope_frame(): + unpacked_kwargs = False if self.defining_alias: variables = t.variables else: @@ -996,6 +997,15 @@ def visit_callable_type(self, t: CallableType, nested: bool = True) -> Type: ) validated_args.append(AnyType(TypeOfAny.from_error)) else: + if nested and isinstance(at, UnpackType) and i == star_index: + # TODO: it would be better to avoid this get_proper_type() call. + p_at = get_proper_type(at.type) + if isinstance(p_at, TypedDictType) and not at.from_star_syntax: + # Automatically detect Unpack[Foo] in Callable as backwards + # compatible syntax for **Foo, if Foo is a TypedDict. + at = p_at + arg_kinds[i] = ARG_STAR2 + unpacked_kwargs = True validated_args.append(at) arg_types = validated_args # If there were multiple (invalid) unpacks, the arg types list will become shorter, @@ -1013,6 +1023,7 @@ def visit_callable_type(self, t: CallableType, nested: bool = True) -> Type: fallback=(t.fallback if t.fallback.type else self.named_type("builtins.function")), variables=self.anal_var_defs(variables), type_guard=special, + unpack_kwargs=unpacked_kwargs, ) return ret diff --git a/mypy/types.py b/mypy/types.py index 04d90c9dc124..22fcd601d6a0 100644 --- a/mypy/types.py +++ b/mypy/types.py @@ -1053,11 +1053,14 @@ class UnpackType(ProperType): wild west, technically anything can be present in the wrapped type. """ - __slots__ = ["type"] + __slots__ = ["type", "from_star_syntax"] - def __init__(self, typ: Type, line: int = -1, column: int = -1) -> None: + def __init__( + self, typ: Type, line: int = -1, column: int = -1, from_star_syntax: bool = False + ) -> None: super().__init__(line, column) self.type = typ + self.from_star_syntax = from_star_syntax def accept(self, visitor: TypeVisitor[T]) -> T: return visitor.visit_unpack_type(self) diff --git a/test-data/unit/check-varargs.test b/test-data/unit/check-varargs.test index ef2c3c57fad5..41668e991972 100644 --- a/test-data/unit/check-varargs.test +++ b/test-data/unit/check-varargs.test @@ -1079,3 +1079,18 @@ class C: class D: def __init__(self, **kwds: Unpack[int, str]) -> None: ... # E: Unpack[...] requires exactly one type argument [builtins fixtures/dict.pyi] + +[case testUnpackInCallableType] +from typing import Callable +from typing_extensions import Unpack, TypedDict + +class TD(TypedDict): + key: str + value: str + +foo: Callable[[Unpack[TD]], None] +foo(key="yes", value=42) # E: Argument "value" has incompatible type "int"; expected "str" +foo(key="yes", value="ok") + +bad: Callable[[*TD], None] # E: "TD" cannot be unpacked (must be tuple or TypeVarTuple) +[builtins fixtures/dict.pyi] From 66fbf5b526ad8cfa127dd5cca68dcb2f770b1dd7 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Tue, 12 Sep 2023 18:19:53 +0100 Subject: [PATCH 042/144] [mypyc] Make tuple packing and unpacking more efficient (#16022) Previously returning a tuple from a function resulted in redundant increfs and decrefs for each item, and similarly unpacking the returned tuple in an assignment had extra incref/decref pair per item. This PR introduces these changes to make this better: * Creating a tuple steals the items always. * Accessing a tuple item optionally borrows the item. * A borrowed reference can be turned into a regular one using the new `Unborrow` op. * The no-op `KeepAlive` op can steal the operands to avoid decrefing the operands. Assignment from tuple now uses the three final features to avoid increfs and decrefs when unpacking a tuple in assignment. The docstrings in this PR contain additional explanation of how this works. In a micro-benchmark this improved performance by about 2-5%. In realistic examples the impact is likely small, but every little helps. Here is an example where this helps: ``` def f() -> tuple[C, C]: return C(), C() # Avoid 2 increfs and 2 decrefs def g() -> None: x, y = f() # Avoid 2 increfs and 2 decrefs ... ``` --------- Co-authored-by: Alex Waygood --- mypyc/analysis/dataflow.py | 4 ++ mypyc/analysis/ircheck.py | 4 ++ mypyc/analysis/selfleaks.py | 4 ++ mypyc/codegen/emitfunc.py | 11 +++- mypyc/ir/ops.py | 67 ++++++++++++++++++++++++- mypyc/ir/pprint.py | 14 +++++- mypyc/irbuild/ll_builder.py | 3 ++ mypyc/irbuild/statement.py | 23 +++++++++ mypyc/test-data/irbuild-statements.test | 29 ++++++----- mypyc/test-data/refcount.test | 60 ++++++++++++++++++++++ 10 files changed, 200 insertions(+), 19 deletions(-) diff --git a/mypyc/analysis/dataflow.py b/mypyc/analysis/dataflow.py index ee2ff06b0f03..cade0c823962 100644 --- a/mypyc/analysis/dataflow.py +++ b/mypyc/analysis/dataflow.py @@ -46,6 +46,7 @@ Truncate, TupleGet, TupleSet, + Unborrow, Unbox, Unreachable, Value, @@ -272,6 +273,9 @@ def visit_load_address(self, op: LoadAddress) -> GenAndKill[T]: def visit_keep_alive(self, op: KeepAlive) -> GenAndKill[T]: return self.visit_register_op(op) + def visit_unborrow(self, op: Unborrow) -> GenAndKill[T]: + return self.visit_register_op(op) + class DefinedVisitor(BaseAnalysisVisitor[Value]): """Visitor for finding defined registers. diff --git a/mypyc/analysis/ircheck.py b/mypyc/analysis/ircheck.py index 2e6b7320e898..a31b1517b036 100644 --- a/mypyc/analysis/ircheck.py +++ b/mypyc/analysis/ircheck.py @@ -44,6 +44,7 @@ Truncate, TupleGet, TupleSet, + Unborrow, Unbox, Unreachable, Value, @@ -422,3 +423,6 @@ def visit_load_address(self, op: LoadAddress) -> None: def visit_keep_alive(self, op: KeepAlive) -> None: pass + + def visit_unborrow(self, op: Unborrow) -> None: + pass diff --git a/mypyc/analysis/selfleaks.py b/mypyc/analysis/selfleaks.py index 288c366e50e5..80c2bc348bc2 100644 --- a/mypyc/analysis/selfleaks.py +++ b/mypyc/analysis/selfleaks.py @@ -40,6 +40,7 @@ Truncate, TupleGet, TupleSet, + Unborrow, Unbox, Unreachable, ) @@ -184,6 +185,9 @@ def visit_load_address(self, op: LoadAddress) -> GenAndKill: def visit_keep_alive(self, op: KeepAlive) -> GenAndKill: return CLEAN + def visit_unborrow(self, op: Unborrow) -> GenAndKill: + return CLEAN + def check_register_op(self, op: RegisterOp) -> GenAndKill: if any(src is self.self_reg for src in op.sources()): return DIRTY diff --git a/mypyc/codegen/emitfunc.py b/mypyc/codegen/emitfunc.py index b4d31544b196..3bce84d3ea59 100644 --- a/mypyc/codegen/emitfunc.py +++ b/mypyc/codegen/emitfunc.py @@ -55,6 +55,7 @@ Truncate, TupleGet, TupleSet, + Unborrow, Unbox, Unreachable, Value, @@ -260,7 +261,6 @@ def visit_tuple_set(self, op: TupleSet) -> None: else: for i, item in enumerate(op.items): self.emit_line(f"{dest}.f{i} = {self.reg(item)};") - self.emit_inc_ref(dest, tuple_type) def visit_assign(self, op: Assign) -> None: dest = self.reg(op.dest) @@ -499,7 +499,8 @@ def visit_tuple_get(self, op: TupleGet) -> None: dest = self.reg(op) src = self.reg(op.src) self.emit_line(f"{dest} = {src}.f{op.index};") - self.emit_inc_ref(dest, op.type) + if not op.is_borrowed: + self.emit_inc_ref(dest, op.type) def get_dest_assign(self, dest: Value) -> str: if not dest.is_void: @@ -746,6 +747,12 @@ def visit_keep_alive(self, op: KeepAlive) -> None: # This is a no-op. pass + def visit_unborrow(self, op: Unborrow) -> None: + # This is a no-op that propagates the source value. + dest = self.reg(op) + src = self.reg(op.src) + self.emit_line(f"{dest} = {src};") + # Helpers def label(self, label: BasicBlock) -> str: diff --git a/mypyc/ir/ops.py b/mypyc/ir/ops.py index 2d64cc79d822..04c50d1e2841 100644 --- a/mypyc/ir/ops.py +++ b/mypyc/ir/ops.py @@ -792,6 +792,9 @@ def __init__(self, items: list[Value], line: int) -> None: def sources(self) -> list[Value]: return self.items.copy() + def stolen(self) -> list[Value]: + return self.items.copy() + def accept(self, visitor: OpVisitor[T]) -> T: return visitor.visit_tuple_set(self) @@ -801,13 +804,14 @@ class TupleGet(RegisterOp): error_kind = ERR_NEVER - def __init__(self, src: Value, index: int, line: int = -1) -> None: + def __init__(self, src: Value, index: int, line: int = -1, *, borrow: bool = False) -> None: super().__init__(line) self.src = src self.index = index assert isinstance(src.type, RTuple), "TupleGet only operates on tuples" assert index >= 0 self.type = src.type.types[index] + self.is_borrowed = borrow def sources(self) -> list[Value]: return [self.src] @@ -1387,21 +1391,76 @@ class KeepAlive(RegisterOp): If we didn't have "keep_alive x", x could be freed immediately after taking the address of 'item', resulting in a read after free on the second line. + + If 'steal' is true, the value is considered to be stolen at + this op, i.e. it won't be decref'd. You need to ensure that + the value is freed otherwise, perhaps by using borrowing + followed by Unborrow. + + Be careful with steal=True -- this can cause memory leaks. """ error_kind = ERR_NEVER - def __init__(self, src: list[Value]) -> None: + def __init__(self, src: list[Value], *, steal: bool = False) -> None: assert src self.src = src + self.steal = steal def sources(self) -> list[Value]: return self.src.copy() + def stolen(self) -> list[Value]: + if self.steal: + return self.src.copy() + return [] + def accept(self, visitor: OpVisitor[T]) -> T: return visitor.visit_keep_alive(self) +class Unborrow(RegisterOp): + """A no-op op to create a regular reference from a borrowed one. + + Borrowed references can only be used temporarily and the reference + counts won't be managed. This value will be refcounted normally. + + This is mainly useful if you split an aggregate value, such as + a tuple, into components using borrowed values (to avoid increfs), + and want to treat the components as sharing the original managed + reference. You'll also need to use KeepAlive with steal=True to + "consume" the original tuple reference: + + # t is a 2-tuple + r0 = borrow t[0] + r1 = borrow t[1] + r2 = unborrow r0 + r3 = unborrow r1 + # now (r2, r3) represent the tuple as separate items, and the + # original tuple can be considered dead and available to be + # stolen + keep_alive steal t + + Be careful with this -- this can easily cause double freeing. + """ + + error_kind = ERR_NEVER + + def __init__(self, src: Value) -> None: + assert src.is_borrowed + self.src = src + self.type = src.type + + def sources(self) -> list[Value]: + return [self.src] + + def stolen(self) -> list[Value]: + return [] + + def accept(self, visitor: OpVisitor[T]) -> T: + return visitor.visit_unborrow(self) + + @trait class OpVisitor(Generic[T]): """Generic visitor over ops (uses the visitor design pattern).""" @@ -1548,6 +1607,10 @@ def visit_load_address(self, op: LoadAddress) -> T: def visit_keep_alive(self, op: KeepAlive) -> T: raise NotImplementedError + @abstractmethod + def visit_unborrow(self, op: Unborrow) -> T: + raise NotImplementedError + # TODO: Should the following definition live somewhere else? diff --git a/mypyc/ir/pprint.py b/mypyc/ir/pprint.py index c86060c49594..5578049256f1 100644 --- a/mypyc/ir/pprint.py +++ b/mypyc/ir/pprint.py @@ -51,6 +51,7 @@ Truncate, TupleGet, TupleSet, + Unborrow, Unbox, Unreachable, Value, @@ -153,7 +154,7 @@ def visit_init_static(self, op: InitStatic) -> str: return self.format("%s = %r :: %s", name, op.value, op.namespace) def visit_tuple_get(self, op: TupleGet) -> str: - return self.format("%r = %r[%d]", op, op.src, op.index) + return self.format("%r = %s%r[%d]", op, self.borrow_prefix(op), op.src, op.index) def visit_tuple_set(self, op: TupleSet) -> str: item_str = ", ".join(self.format("%r", item) for item in op.items) @@ -274,7 +275,16 @@ def visit_load_address(self, op: LoadAddress) -> str: return self.format("%r = load_address %s", op, op.src) def visit_keep_alive(self, op: KeepAlive) -> str: - return self.format("keep_alive %s" % ", ".join(self.format("%r", v) for v in op.src)) + if op.steal: + steal = "steal " + else: + steal = "" + return self.format( + "keep_alive {}{}".format(steal, ", ".join(self.format("%r", v) for v in op.src)) + ) + + def visit_unborrow(self, op: Unborrow) -> str: + return self.format("%r = unborrow %r", op, op.src) # Helpers diff --git a/mypyc/irbuild/ll_builder.py b/mypyc/irbuild/ll_builder.py index 984b6a4deec0..d1ea91476a66 100644 --- a/mypyc/irbuild/ll_builder.py +++ b/mypyc/irbuild/ll_builder.py @@ -266,6 +266,9 @@ def goto_and_activate(self, block: BasicBlock) -> None: self.goto(block) self.activate_block(block) + def keep_alive(self, values: list[Value], *, steal: bool = False) -> None: + self.add(KeepAlive(values, steal=steal)) + def push_error_handler(self, handler: BasicBlock | None) -> None: self.error_handlers.append(handler) diff --git a/mypyc/irbuild/statement.py b/mypyc/irbuild/statement.py index 63297618108c..d7e01456139d 100644 --- a/mypyc/irbuild/statement.py +++ b/mypyc/irbuild/statement.py @@ -59,11 +59,13 @@ Register, Return, TupleGet, + Unborrow, Unreachable, Value, ) from mypyc.ir.rtypes import ( RInstance, + RTuple, c_pyssize_t_rprimitive, exc_rtuple, is_tagged, @@ -183,8 +185,29 @@ def transform_assignment_stmt(builder: IRBuilder, stmt: AssignmentStmt) -> None: line = stmt.rvalue.line rvalue_reg = builder.accept(stmt.rvalue) + if builder.non_function_scope() and stmt.is_final_def: builder.init_final_static(first_lvalue, rvalue_reg) + + # Special-case multiple assignments like 'x, y = expr' to reduce refcount ops. + if ( + isinstance(first_lvalue, (TupleExpr, ListExpr)) + and isinstance(rvalue_reg.type, RTuple) + and len(rvalue_reg.type.types) == len(first_lvalue.items) + and len(lvalues) == 1 + and all(is_simple_lvalue(item) for item in first_lvalue.items) + and any(t.is_refcounted for t in rvalue_reg.type.types) + ): + n = len(first_lvalue.items) + for i in range(n): + target = builder.get_assignment_target(first_lvalue.items[i]) + rvalue_item = builder.add(TupleGet(rvalue_reg, i, borrow=True)) + rvalue_item = builder.add(Unborrow(rvalue_item)) + builder.assign(target, rvalue_item, line) + builder.builder.keep_alive([rvalue_reg], steal=True) + builder.flush_keep_alives() + return + for lvalue in lvalues: target = builder.get_assignment_target(lvalue) builder.assign(target, rvalue_reg, line) diff --git a/mypyc/test-data/irbuild-statements.test b/mypyc/test-data/irbuild-statements.test index 062abd47d163..490b41336e88 100644 --- a/mypyc/test-data/irbuild-statements.test +++ b/mypyc/test-data/irbuild-statements.test @@ -502,16 +502,16 @@ L0: [case testMultipleAssignmentBasicUnpacking] from typing import Tuple, Any -def from_tuple(t: Tuple[int, str]) -> None: +def from_tuple(t: Tuple[bool, None]) -> None: x, y = t def from_any(a: Any) -> None: x, y = a [out] def from_tuple(t): - t :: tuple[int, str] - r0, x :: int - r1, y :: str + t :: tuple[bool, None] + r0, x :: bool + r1, y :: None L0: r0 = t[0] x = r0 @@ -563,16 +563,19 @@ def from_any(a: Any) -> None: [out] def from_tuple(t): t :: tuple[int, object] - r0 :: int - r1, x, r2 :: object - r3, y :: int + r0, r1 :: int + r2, x, r3, r4 :: object + r5, y :: int L0: - r0 = t[0] - r1 = box(int, r0) - x = r1 - r2 = t[1] - r3 = unbox(int, r2) - y = r3 + r0 = borrow t[0] + r1 = unborrow r0 + r2 = box(int, r1) + x = r2 + r3 = borrow t[1] + r4 = unborrow r3 + r5 = unbox(int, r4) + y = r5 + keep_alive steal t return 1 def from_any(a): a, r0, r1 :: object diff --git a/mypyc/test-data/refcount.test b/mypyc/test-data/refcount.test index 3db4caa39566..0f2c134ae21e 100644 --- a/mypyc/test-data/refcount.test +++ b/mypyc/test-data/refcount.test @@ -656,6 +656,66 @@ L1: L2: return 4 +[case testReturnTuple] +from typing import Tuple + +class C: pass +def f() -> Tuple[C, C]: + a = C() + b = C() + return a, b +[out] +def f(): + r0, a, r1, b :: __main__.C + r2 :: tuple[__main__.C, __main__.C] +L0: + r0 = C() + a = r0 + r1 = C() + b = r1 + r2 = (a, b) + return r2 + +[case testDecomposeTuple] +from typing import Tuple + +class C: + a: int + +def f() -> int: + x, y = g() + return x.a + y.a + +def g() -> Tuple[C, C]: + return C(), C() +[out] +def f(): + r0 :: tuple[__main__.C, __main__.C] + r1, r2, x, r3, r4, y :: __main__.C + r5, r6, r7 :: int +L0: + r0 = g() + r1 = borrow r0[0] + r2 = unborrow r1 + x = r2 + r3 = borrow r0[1] + r4 = unborrow r3 + y = r4 + r5 = borrow x.a + r6 = borrow y.a + r7 = CPyTagged_Add(r5, r6) + dec_ref x + dec_ref y + return r7 +def g(): + r0, r1 :: __main__.C + r2 :: tuple[__main__.C, __main__.C] +L0: + r0 = C() + r1 = C() + r2 = (r0, r1) + return r2 + [case testUnicodeLiteral] def f() -> str: return "some string" From b3275572ec9b65d0a1b5157c5f73ad4004a356b4 Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Wed, 13 Sep 2023 23:41:08 +0100 Subject: [PATCH 043/144] Subtyping and inference of user defined variadic types (#16076) The second part of support for user defined variadic types comes as a single PR, it was hard to split into smaller parts. This part covers subtyping and inference (and relies on the first part: type analysis, normalization, and expansion, concluded by https://github.com/python/mypy/pull/15991). Note btw that the third (and last) part that covers actually using all the stuff in `checkexpr.py` will likely come as several smaller PRs. Some comments on this PR: * First good news: it looks like instances subtyping/inference can be handled in a really simple way, we just need to find correct type arguments mapping for each type variable, and perform procedures argument by argument (note this heavily relies on the normalization). Also callable subtyping inference for variadic items effectively defers to corresponding tuple types. This way all code paths will ultimately go through variadic tuple subtyping/inference (there is still a bunch of boilerplate to do the mapping, but it is quite simple). * Second some bad news: a lot of edge cases involving `*tuple[X, ...]` were missing everywhere (even couple cases in the code I touched before). I added all that were either simple or important. We can handle more if users will ask, since it is quite tricky. * Note that I handle variadic tuples essentially as infinite unions, the core of the logic for this (and for most of this PR FWIW) is in `variadic_tuple_subtype()`. * Previously `Foo[*tuple[int, ...]]` was considered a subtype of `Foo[int, int]`. I think this is wrong. I didn't find where this is required in the PEP (see one case below however), and mypy currently considers `tuple[int, ...]` not a subtype of `tuple[int, int]` (vice versa are subtypes), and similarly `(*args: int)` vs `(x: int, y: int)` for callables. Because of the logic I described in the first comment, the same logic now uniformly applies to instances as well. * Note however the PEP requires special casing of `Foo[*tuple[Any, ...]]` (equivalent to bare `Foo`), and I agree we should do this. I added a minimal special case for this. Note we also do this for callables as well (`*args: Any` is very different from `*args: object`). And I think we should special case `tuple[Any, ...] <: tuple[int, int]` as well. In the future we can even extend the special casing to `tuple[int, *tuple[Any, ...], int]` in the spirit of https://github.com/python/mypy/pull/15913 * In this PR I specifically only handle the PEP required item from above for instances. For plain tuples I left a TODO, @hauntsaninja may implement it since it is needed for other unrelated PR. * I make the default upper bound for `TypeVarTupleType` to be `tuple[object, ...]`. I think it can never be `object` (and this simplifies some subtyping corner cases). * TBH I didn't look into callables subtyping/inference very deeply (unlike instances and tuples), if needed we can improve their handling later. * Note I remove some failing unit tests because they test non-nomralized forms that should never appear now. We should probably add some more unit tests, but TBH I am quite tired now. --- mypy/constraints.py | 231 +++++++++---------- mypy/erasetype.py | 11 +- mypy/expandtype.py | 3 +- mypy/fixup.py | 17 +- mypy/join.py | 154 ++++++++++++- mypy/meet.py | 122 +++++++++- mypy/semanal.py | 3 +- mypy/semanal_typeargs.py | 3 +- mypy/solve.py | 8 +- mypy/subtypes.py | 241 ++++++++++++-------- mypy/test/testconstraints.py | 42 +--- mypy/test/testsubtypes.py | 83 +------ mypy/test/testtypes.py | 77 +++++++ mypy/test/typefixture.py | 11 +- mypy/typeops.py | 7 +- mypy/typevartuples.py | 134 ----------- test-data/unit/check-incremental.test | 19 ++ test-data/unit/check-typevar-tuple.test | 285 +++++++++++++++++++++++- test-data/unit/semanal-types.test | 7 +- 19 files changed, 943 insertions(+), 515 deletions(-) diff --git a/mypy/constraints.py b/mypy/constraints.py index 0e59b5459fd4..0524e38f9643 100644 --- a/mypy/constraints.py +++ b/mypy/constraints.py @@ -2,7 +2,7 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Final, Iterable, List, Sequence, cast +from typing import TYPE_CHECKING, Final, Iterable, List, Sequence import mypy.subtypes import mypy.typeops @@ -58,7 +58,6 @@ ) from mypy.types_utils import is_union_with_any from mypy.typestate import type_state -from mypy.typevartuples import extract_unpack, split_with_mapped_and_template if TYPE_CHECKING: from mypy.infer import ArgumentInferContext @@ -745,28 +744,23 @@ def visit_instance(self, template: Instance) -> list[Constraint]: tvars = mapped.type.defn.type_vars if instance.type.has_type_var_tuple_type: + # Variadic types need special handling to map each type argument to + # the correct corresponding type variable. assert instance.type.type_var_tuple_prefix is not None assert instance.type.type_var_tuple_suffix is not None - assert mapped.type.type_var_tuple_prefix is not None - assert mapped.type.type_var_tuple_suffix is not None - - unpack_constraints, instance_args, mapped_args = build_constraints_for_unpack( - instance.args, - instance.type.type_var_tuple_prefix, - instance.type.type_var_tuple_suffix, - mapped.args, - mapped.type.type_var_tuple_prefix, - mapped.type.type_var_tuple_suffix, - self.direction, + prefix_len = instance.type.type_var_tuple_prefix + suffix_len = instance.type.type_var_tuple_suffix + tvt = instance.type.defn.type_vars[prefix_len] + assert isinstance(tvt, TypeVarTupleType) + fallback = tvt.tuple_fallback + i_prefix, i_middle, i_suffix = split_with_prefix_and_suffix( + instance.args, prefix_len, suffix_len ) - res.extend(unpack_constraints) - - tvars_prefix, _, tvars_suffix = split_with_prefix_and_suffix( - tuple(tvars), - instance.type.type_var_tuple_prefix, - instance.type.type_var_tuple_suffix, + m_prefix, m_middle, m_suffix = split_with_prefix_and_suffix( + mapped.args, prefix_len, suffix_len ) - tvars = cast("list[TypeVarLikeType]", list(tvars_prefix + tvars_suffix)) + instance_args = i_prefix + (TupleType(list(i_middle), fallback),) + i_suffix + mapped_args = m_prefix + (TupleType(list(m_middle), fallback),) + m_suffix else: mapped_args = mapped.args instance_args = instance.args @@ -806,44 +800,38 @@ def visit_instance(self, template: Instance) -> list[Constraint]: ) res.append(Constraint(mapped_arg, SUBTYPE_OF, suffix)) res.append(Constraint(mapped_arg, SUPERTYPE_OF, suffix)) - else: - # This case should have been handled above. - assert not isinstance(tvar, TypeVarTupleType) + elif isinstance(tvar, TypeVarTupleType): + # Handle variadic type variables covariantly for consistency. + res.extend(infer_constraints(mapped_arg, instance_arg, self.direction)) return res elif self.direction == SUPERTYPE_OF and instance.type.has_base(template.type.fullname): mapped = map_instance_to_supertype(instance, template.type) tvars = template.type.defn.type_vars if template.type.has_type_var_tuple_type: - assert mapped.type.type_var_tuple_prefix is not None - assert mapped.type.type_var_tuple_suffix is not None + # Variadic types need special handling to map each type argument to + # the correct corresponding type variable. assert template.type.type_var_tuple_prefix is not None assert template.type.type_var_tuple_suffix is not None - - unpack_constraints, mapped_args, template_args = build_constraints_for_unpack( - mapped.args, - mapped.type.type_var_tuple_prefix, - mapped.type.type_var_tuple_suffix, - template.args, - template.type.type_var_tuple_prefix, - template.type.type_var_tuple_suffix, - self.direction, + prefix_len = template.type.type_var_tuple_prefix + suffix_len = template.type.type_var_tuple_suffix + tvt = template.type.defn.type_vars[prefix_len] + assert isinstance(tvt, TypeVarTupleType) + fallback = tvt.tuple_fallback + t_prefix, t_middle, t_suffix = split_with_prefix_and_suffix( + template.args, prefix_len, suffix_len ) - res.extend(unpack_constraints) - - tvars_prefix, _, tvars_suffix = split_with_prefix_and_suffix( - tuple(tvars), - template.type.type_var_tuple_prefix, - template.type.type_var_tuple_suffix, + m_prefix, m_middle, m_suffix = split_with_prefix_and_suffix( + mapped.args, prefix_len, suffix_len ) - tvars = cast("list[TypeVarLikeType]", list(tvars_prefix + tvars_suffix)) + template_args = t_prefix + (TupleType(list(t_middle), fallback),) + t_suffix + mapped_args = m_prefix + (TupleType(list(m_middle), fallback),) + m_suffix else: mapped_args = mapped.args template_args = template.args # N.B: We use zip instead of indexing because the lengths might have # mismatches during daemon reprocessing. for tvar, mapped_arg, template_arg in zip(tvars, mapped_args, template_args): - assert not isinstance(tvar, TypeVarTupleType) if isinstance(tvar, TypeVarType): # The constraints for generic type parameters depend on variance. # Include constraints from both directions if invariant. @@ -878,9 +866,9 @@ def visit_instance(self, template: Instance) -> list[Constraint]: ) res.append(Constraint(template_arg, SUBTYPE_OF, suffix)) res.append(Constraint(template_arg, SUPERTYPE_OF, suffix)) - else: - # This case should have been handled above. - assert not isinstance(tvar, TypeVarTupleType) + elif isinstance(tvar, TypeVarTupleType): + # Handle variadic type variables covariantly for consistency. + res.extend(infer_constraints(template_arg, mapped_arg, self.direction)) return res if ( template.type.is_protocol @@ -1049,7 +1037,8 @@ def visit_callable_type(self, template: CallableType) -> list[Constraint]: ) res.extend(unpack_constraints) else: - # Negate direction due to function argument type contravariance. + # TODO: do we need some special-casing when unpack is present in actual + # callable but not in template callable? res.extend( infer_callable_arguments_constraints(template, cactual, self.direction) ) @@ -1170,11 +1159,29 @@ def visit_tuple_type(self, template: TupleType) -> list[Constraint]: res: list[Constraint] = [] if unpack_index is not None: if is_varlength_tuple: + # Variadic tuple can be only a supertype of a tuple type, but even if + # direction is opposite, inferring something may give better error messages. unpack_type = template.items[unpack_index] assert isinstance(unpack_type, UnpackType) - unpacked_type = unpack_type.type - assert isinstance(unpacked_type, TypeVarTupleType) - return [Constraint(type_var=unpacked_type, op=self.direction, target=actual)] + unpacked_type = get_proper_type(unpack_type.type) + if isinstance(unpacked_type, TypeVarTupleType): + res = [ + Constraint(type_var=unpacked_type, op=self.direction, target=actual) + ] + else: + assert ( + isinstance(unpacked_type, Instance) + and unpacked_type.type.fullname == "builtins.tuple" + ) + res = infer_constraints(unpacked_type, actual, self.direction) + assert isinstance(actual, Instance) # ensured by is_varlength_tuple == True + for i, ti in enumerate(template.items): + if i == unpack_index: + # This one we just handled above. + continue + # For Tuple[T, *Ts, S] <: tuple[X, ...] infer also T <: X and S <: X. + res.extend(infer_constraints(ti, actual.args[0], self.direction)) + return res else: assert isinstance(actual, TupleType) unpack_constraints = build_constraints_for_simple_unpack( @@ -1184,8 +1191,36 @@ def visit_tuple_type(self, template: TupleType) -> list[Constraint]: template_items: tuple[Type, ...] = () res.extend(unpack_constraints) elif isinstance(actual, TupleType): - actual_items = tuple(actual.items) - template_items = tuple(template.items) + a_unpack_index = find_unpack_in_list(actual.items) + if a_unpack_index is not None: + # The case where template tuple doesn't have an unpack, but actual tuple + # has an unpack. We can infer something if actual unpack is a variadic tuple. + # Tuple[T, S, U] <: tuple[X, *tuple[Y, ...], Z] => T <: X, S <: Y, U <: Z. + a_unpack = actual.items[a_unpack_index] + assert isinstance(a_unpack, UnpackType) + a_unpacked = get_proper_type(a_unpack.type) + if len(actual.items) + 1 <= len(template.items): + a_prefix_len = a_unpack_index + a_suffix_len = len(actual.items) - a_unpack_index - 1 + t_prefix, t_middle, t_suffix = split_with_prefix_and_suffix( + tuple(template.items), a_prefix_len, a_suffix_len + ) + actual_items = tuple(actual.items[:a_prefix_len]) + if a_suffix_len: + actual_items += tuple(actual.items[-a_suffix_len:]) + template_items = t_prefix + t_suffix + if isinstance(a_unpacked, Instance): + assert a_unpacked.type.fullname == "builtins.tuple" + for tm in t_middle: + res.extend( + infer_constraints(tm, a_unpacked.args[0], self.direction) + ) + else: + actual_items = () + template_items = () + else: + actual_items = tuple(actual.items) + template_items = tuple(template.items) else: return res @@ -1236,8 +1271,13 @@ def visit_type_alias_type(self, template: TypeAliasType) -> list[Constraint]: def infer_against_any(self, types: Iterable[Type], any_type: AnyType) -> list[Constraint]: res: list[Constraint] = [] for t in types: - if isinstance(t, UnpackType) and isinstance(t.type, TypeVarTupleType): - res.append(Constraint(t.type, self.direction, any_type)) + if isinstance(t, UnpackType): + if isinstance(t.type, TypeVarTupleType): + res.append(Constraint(t.type, self.direction, any_type)) + else: + unpacked = get_proper_type(t.type) + assert isinstance(unpacked, Instance) + res.extend(infer_constraints(unpacked, any_type, self.direction)) else: # Note that we ignore variance and simply always use the # original direction. This is because for Any targets direction is @@ -1374,9 +1414,8 @@ def build_constraints_for_simple_unpack( templates: T1, T2, Ts, Ts, Ts, ... actuals: A1, As, As, As, ... - Note: this function can only be called for builtin variadic constructors: Tuple and Callable, - for Instances variance depends on position, and a much more complex function - build_constraints_for_unpack() should be used. + Note: this function can only be called for builtin variadic constructors: Tuple and Callable. + For instances, you should first find correct type argument mapping. """ template_unpack = find_unpack_in_list(template_args) assert template_unpack is not None @@ -1409,7 +1448,8 @@ def build_constraints_for_simple_unpack( common_prefix = min(template_prefix, actual_prefix) common_suffix = min(template_suffix, actual_suffix) if actual_prefix >= template_prefix and actual_suffix >= template_suffix: - # This is the only case where we can guarantee there will be no partial overlap. + # This is the only case where we can guarantee there will be no partial overlap + # (note however partial overlap is OK for variadic tuples, it is handled below). t_unpack = template_args[template_unpack] # Handle constraints from prefixes/suffixes first. @@ -1439,74 +1479,21 @@ def build_constraints_for_simple_unpack( res.extend(infer_constraints(tp.args[0], a_tp.args[0], direction)) elif isinstance(tp, TypeVarTupleType): res.append(Constraint(tp, direction, TupleType(list(middle), tp.tuple_fallback))) + elif actual_unpack is not None: + # A special case for a variadic tuple unpack, we simply infer T <: X from + # Tuple[..., *tuple[T, ...], ...] <: Tuple[..., *tuple[X, ...], ...]. + actual_unpack_type = actual_args[actual_unpack] + assert isinstance(actual_unpack_type, UnpackType) + a_unpacked = get_proper_type(actual_unpack_type.type) + if isinstance(a_unpacked, Instance) and a_unpacked.type.fullname == "builtins.tuple": + t_unpack = template_args[template_unpack] + assert isinstance(t_unpack, UnpackType) + tp = get_proper_type(t_unpack.type) + if isinstance(tp, Instance) and tp.type.fullname == "builtins.tuple": + res.extend(infer_constraints(tp.args[0], a_unpacked.args[0], direction)) return res -def build_constraints_for_unpack( - # TODO: this naming is misleading, these should be "actual", not "mapped" - # both template and actual can be mapped before, depending on direction. - # Also the convention is to put template related args first. - mapped: tuple[Type, ...], - mapped_prefix_len: int | None, - mapped_suffix_len: int | None, - template: tuple[Type, ...], - template_prefix_len: int, - template_suffix_len: int, - direction: int, -) -> tuple[list[Constraint], tuple[Type, ...], tuple[Type, ...]]: - # TODO: this function looks broken: - # a) it should take into account variances, but it doesn't - # b) it looks like both call sites always pass identical values to args (2, 3) and (5, 6) - # because after map_instance_to_supertype() both template and actual have same TypeInfo. - if mapped_prefix_len is None: - mapped_prefix_len = template_prefix_len - if mapped_suffix_len is None: - mapped_suffix_len = template_suffix_len - - split_result = split_with_mapped_and_template( - mapped, - mapped_prefix_len, - mapped_suffix_len, - template, - template_prefix_len, - template_suffix_len, - ) - assert split_result is not None - ( - mapped_prefix, - mapped_middle, - mapped_suffix, - template_prefix, - template_middle, - template_suffix, - ) = split_result - - template_unpack = extract_unpack(template_middle) - res = [] - - if template_unpack is not None: - if isinstance(template_unpack, TypeVarTupleType): - res.append( - Constraint( - template_unpack, - direction, - TupleType(list(mapped_middle), template_unpack.tuple_fallback), - ) - ) - elif ( - isinstance(template_unpack, Instance) - and template_unpack.type.fullname == "builtins.tuple" - ): - for item in mapped_middle: - res.extend(infer_constraints(template_unpack.args[0], item, direction)) - - elif isinstance(template_unpack, TupleType): - if len(template_unpack.items) == len(mapped_middle): - for template_arg, item in zip(template_unpack.items, mapped_middle): - res.extend(infer_constraints(template_arg, item, direction)) - return res, mapped_prefix + mapped_suffix, template_prefix + template_suffix - - def infer_directed_arg_constraints(left: Type, right: Type, direction: int) -> list[Constraint]: """Infer constraints between two arguments using direction between original callables.""" if isinstance(left, (ParamSpecType, UnpackType)) or isinstance( diff --git a/mypy/erasetype.py b/mypy/erasetype.py index d1a01fb6c779..24471f918319 100644 --- a/mypy/erasetype.py +++ b/mypy/erasetype.py @@ -77,7 +77,16 @@ def visit_deleted_type(self, t: DeletedType) -> ProperType: return t def visit_instance(self, t: Instance) -> ProperType: - return Instance(t.type, [AnyType(TypeOfAny.special_form)] * len(t.args), t.line) + args: list[Type] = [] + for tv in t.type.defn.type_vars: + # Valid erasure for *Ts is *tuple[Any, ...], not just Any. + if isinstance(tv, TypeVarTupleType): + args.append( + tv.tuple_fallback.copy_modified(args=[AnyType(TypeOfAny.special_form)]) + ) + else: + args.append(AnyType(TypeOfAny.special_form)) + return Instance(t.type, args, t.line) def visit_type_var(self, t: TypeVarType) -> ProperType: return AnyType(TypeOfAny.special_form) diff --git a/mypy/expandtype.py b/mypy/expandtype.py index c29fcb167777..b233561e19c2 100644 --- a/mypy/expandtype.py +++ b/mypy/expandtype.py @@ -255,7 +255,8 @@ def visit_param_spec(self, t: ParamSpecType) -> Type: variables=[*t.prefix.variables, *repl.variables], ) else: - # TODO: replace this with "assert False" + # We could encode Any as trivial parameters etc., but it would be too verbose. + # TODO: assert this is a trivial type, like Any, Never, or object. return repl def visit_type_var_tuple(self, t: TypeVarTupleType) -> Type: diff --git a/mypy/fixup.py b/mypy/fixup.py index 2b2e1210ee4e..5ffc47120734 100644 --- a/mypy/fixup.py +++ b/mypy/fixup.py @@ -81,11 +81,17 @@ def visit_type_info(self, info: TypeInfo) -> None: info.update_tuple_type(info.tuple_type) if info.special_alias: info.special_alias.alias_tvars = list(info.defn.type_vars) + for i, t in enumerate(info.defn.type_vars): + if isinstance(t, TypeVarTupleType): + info.special_alias.tvar_tuple_index = i if info.typeddict_type: info.typeddict_type.accept(self.type_fixer) info.update_typeddict_type(info.typeddict_type) if info.special_alias: info.special_alias.alias_tvars = list(info.defn.type_vars) + for i, t in enumerate(info.defn.type_vars): + if isinstance(t, TypeVarTupleType): + info.special_alias.tvar_tuple_index = i if info.declared_metaclass: info.declared_metaclass.accept(self.type_fixer) if info.metaclass_type: @@ -166,11 +172,7 @@ def visit_decorator(self, d: Decorator) -> None: def visit_class_def(self, c: ClassDef) -> None: for v in c.type_vars: - if isinstance(v, TypeVarType): - for value in v.values: - value.accept(self.type_fixer) - v.upper_bound.accept(self.type_fixer) - v.default.accept(self.type_fixer) + v.accept(self.type_fixer) def visit_type_var_expr(self, tv: TypeVarExpr) -> None: for value in tv.values: @@ -184,6 +186,7 @@ def visit_paramspec_expr(self, p: ParamSpecExpr) -> None: def visit_type_var_tuple_expr(self, tv: TypeVarTupleExpr) -> None: tv.upper_bound.accept(self.type_fixer) + tv.tuple_fallback.accept(self.type_fixer) tv.default.accept(self.type_fixer) def visit_var(self, v: Var) -> None: @@ -314,6 +317,7 @@ def visit_param_spec(self, p: ParamSpecType) -> None: p.default.accept(self) def visit_type_var_tuple(self, t: TypeVarTupleType) -> None: + t.tuple_fallback.accept(self) t.upper_bound.accept(self) t.default.accept(self) @@ -336,9 +340,6 @@ def visit_union_type(self, ut: UnionType) -> None: for it in ut.items: it.accept(self) - def visit_void(self, o: Any) -> None: - pass # Nothing to descend into. - def visit_type_type(self, t: TypeType) -> None: t.item.accept(self) diff --git a/mypy/join.py b/mypy/join.py index 806c644a680c..e4429425d98a 100644 --- a/mypy/join.py +++ b/mypy/join.py @@ -43,8 +43,10 @@ UninhabitedType, UnionType, UnpackType, + find_unpack_in_list, get_proper_type, get_proper_types, + split_with_prefix_and_suffix, ) @@ -67,7 +69,25 @@ def join_instances(self, t: Instance, s: Instance) -> ProperType: args: list[Type] = [] # N.B: We use zip instead of indexing because the lengths might have # mismatches during daemon reprocessing. - for ta, sa, type_var in zip(t.args, s.args, t.type.defn.type_vars): + if t.type.has_type_var_tuple_type: + # We handle joins of variadic instances by simply creating correct mapping + # for type arguments and compute the individual joins same as for regular + # instances. All the heavy lifting is done in the join of tuple types. + assert s.type.type_var_tuple_prefix is not None + assert s.type.type_var_tuple_suffix is not None + prefix = s.type.type_var_tuple_prefix + suffix = s.type.type_var_tuple_suffix + tvt = s.type.defn.type_vars[prefix] + assert isinstance(tvt, TypeVarTupleType) + fallback = tvt.tuple_fallback + s_prefix, s_middle, s_suffix = split_with_prefix_and_suffix(s.args, prefix, suffix) + t_prefix, t_middle, t_suffix = split_with_prefix_and_suffix(t.args, prefix, suffix) + s_args = s_prefix + (TupleType(list(s_middle), fallback),) + s_suffix + t_args = t_prefix + (TupleType(list(t_middle), fallback),) + t_suffix + else: + t_args = t.args + s_args = s.args + for ta, sa, type_var in zip(t_args, s_args, t.type.defn.type_vars): ta_proper = get_proper_type(ta) sa_proper = get_proper_type(sa) new_type: Type | None = None @@ -93,6 +113,18 @@ def join_instances(self, t: Instance, s: Instance) -> ProperType: # If the types are different but equivalent, then an Any is involved # so using a join in the contravariant case is also OK. new_type = join_types(ta, sa, self) + elif isinstance(type_var, TypeVarTupleType): + new_type = get_proper_type(join_types(ta, sa, self)) + # Put the joined arguments back into instance in the normal form: + # a) Tuple[X, Y, Z] -> [X, Y, Z] + # b) tuple[X, ...] -> [*tuple[X, ...]] + if isinstance(new_type, Instance): + assert new_type.type.fullname == "builtins.tuple" + new_type = UnpackType(new_type) + else: + assert isinstance(new_type, TupleType) + args.extend(new_type.items) + continue else: # ParamSpec type variables behave the same, independent of variance if not is_equivalent(ta, sa): @@ -440,6 +472,113 @@ def visit_overloaded(self, t: Overloaded) -> ProperType: return join_types(t, call) return join_types(t.fallback, s) + def join_tuples(self, s: TupleType, t: TupleType) -> list[Type] | None: + """Join two tuple types while handling variadic entries. + + This is surprisingly tricky, and we don't handle some tricky corner cases. + Most of the trickiness comes from the variadic tuple items like *tuple[X, ...] + since they can have arbitrary partial overlaps (while *Ts can't be split). + """ + s_unpack_index = find_unpack_in_list(s.items) + t_unpack_index = find_unpack_in_list(t.items) + if s_unpack_index is None and t_unpack_index is None: + if s.length() == t.length(): + items: list[Type] = [] + for i in range(t.length()): + items.append(join_types(t.items[i], s.items[i])) + return items + return None + if s_unpack_index is not None and t_unpack_index is not None: + # The most complex case: both tuples have an upack item. + s_unpack = s.items[s_unpack_index] + assert isinstance(s_unpack, UnpackType) + s_unpacked = get_proper_type(s_unpack.type) + t_unpack = t.items[t_unpack_index] + assert isinstance(t_unpack, UnpackType) + t_unpacked = get_proper_type(t_unpack.type) + if s.length() == t.length() and s_unpack_index == t_unpack_index: + # We can handle a case where arity is perfectly aligned, e.g. + # join(Tuple[X1, *tuple[Y1, ...], Z1], Tuple[X2, *tuple[Y2, ...], Z2]). + # We can essentially perform the join elementwise. + prefix_len = t_unpack_index + suffix_len = t.length() - t_unpack_index - 1 + items = [] + for si, ti in zip(s.items[:prefix_len], t.items[:prefix_len]): + items.append(join_types(si, ti)) + joined = join_types(s_unpacked, t_unpacked) + if isinstance(joined, TypeVarTupleType): + items.append(UnpackType(joined)) + elif isinstance(joined, Instance) and joined.type.fullname == "builtins.tuple": + items.append(UnpackType(joined)) + else: + if isinstance(t_unpacked, Instance): + assert t_unpacked.type.fullname == "builtins.tuple" + tuple_instance = t_unpacked + else: + assert isinstance(t_unpacked, TypeVarTupleType) + tuple_instance = t_unpacked.tuple_fallback + items.append( + UnpackType( + tuple_instance.copy_modified( + args=[object_from_instance(tuple_instance)] + ) + ) + ) + if suffix_len: + for si, ti in zip(s.items[-suffix_len:], t.items[-suffix_len:]): + items.append(join_types(si, ti)) + return items + if s.length() == 1 or t.length() == 1: + # Another case we can handle is when one of tuple is purely variadic + # (i.e. a non-normalized form of tuple[X, ...]), in this case the join + # will be again purely variadic. + if not (isinstance(s_unpacked, Instance) and isinstance(t_unpacked, Instance)): + return None + assert s_unpacked.type.fullname == "builtins.tuple" + assert t_unpacked.type.fullname == "builtins.tuple" + mid_joined = join_types(s_unpacked.args[0], t_unpacked.args[0]) + t_other = [a for i, a in enumerate(t.items) if i != t_unpack_index] + s_other = [a for i, a in enumerate(s.items) if i != s_unpack_index] + other_joined = join_type_list(s_other + t_other) + mid_joined = join_types(mid_joined, other_joined) + return [UnpackType(s_unpacked.copy_modified(args=[mid_joined]))] + # TODO: are there other case we can handle (e.g. both prefix/suffix are shorter)? + return None + if s_unpack_index is not None: + variadic = s + unpack_index = s_unpack_index + fixed = t + else: + assert t_unpack_index is not None + variadic = t + unpack_index = t_unpack_index + fixed = s + # Case where one tuple has variadic item and the other one doesn't. The join will + # be variadic, since fixed tuple is a subtype of variadic, but not vice versa. + unpack = variadic.items[unpack_index] + assert isinstance(unpack, UnpackType) + unpacked = get_proper_type(unpack.type) + if not isinstance(unpacked, Instance): + return None + if fixed.length() < variadic.length() - 1: + # There are no non-trivial types that are supertype of both. + return None + prefix_len = unpack_index + suffix_len = variadic.length() - prefix_len - 1 + prefix, middle, suffix = split_with_prefix_and_suffix( + tuple(fixed.items), prefix_len, suffix_len + ) + items = [] + for fi, vi in zip(prefix, variadic.items[:prefix_len]): + items.append(join_types(fi, vi)) + mid_joined = join_type_list(list(middle)) + mid_joined = join_types(mid_joined, unpacked.args[0]) + items.append(UnpackType(unpacked.copy_modified(args=[mid_joined]))) + if suffix_len: + for fi, vi in zip(suffix, variadic.items[-suffix_len:]): + items.append(join_types(fi, vi)) + return items + def visit_tuple_type(self, t: TupleType) -> ProperType: # When given two fixed-length tuples: # * If they have the same length, join their subtypes item-wise: @@ -452,19 +591,22 @@ def visit_tuple_type(self, t: TupleType) -> ProperType: # Tuple[int, bool] + Tuple[bool, ...] becomes Tuple[int, ...] # * Joining with any Sequence also returns a Sequence: # Tuple[int, bool] + List[bool] becomes Sequence[int] - if isinstance(self.s, TupleType) and self.s.length() == t.length(): + if isinstance(self.s, TupleType): if self.instance_joiner is None: self.instance_joiner = InstanceJoiner() fallback = self.instance_joiner.join_instances( mypy.typeops.tuple_fallback(self.s), mypy.typeops.tuple_fallback(t) ) assert isinstance(fallback, Instance) - if self.s.length() == t.length(): - items: list[Type] = [] - for i in range(t.length()): - items.append(join_types(t.items[i], self.s.items[i])) + items = self.join_tuples(self.s, t) + if items is not None: return TupleType(items, fallback) else: + # TODO: should this be a default fallback behaviour like for meet? + if is_proper_subtype(self.s, t): + return t + if is_proper_subtype(t, self.s): + return self.s return fallback else: return join_types(self.s, mypy.typeops.tuple_fallback(t)) diff --git a/mypy/meet.py b/mypy/meet.py index 2efde4ac7588..0fa500d32c30 100644 --- a/mypy/meet.py +++ b/mypy/meet.py @@ -44,8 +44,10 @@ UninhabitedType, UnionType, UnpackType, + find_unpack_in_list, get_proper_type, get_proper_types, + split_with_prefix_and_suffix, ) # TODO Describe this module. @@ -721,8 +723,41 @@ def visit_instance(self, t: Instance) -> ProperType: args: list[Type] = [] # N.B: We use zip instead of indexing because the lengths might have # mismatches during daemon reprocessing. - for ta, sia in zip(t.args, self.s.args): - args.append(self.meet(ta, sia)) + if t.type.has_type_var_tuple_type: + # We handle meet of variadic instances by simply creating correct mapping + # for type arguments and compute the individual meets same as for regular + # instances. All the heavy lifting is done in the meet of tuple types. + s = self.s + assert s.type.type_var_tuple_prefix is not None + assert s.type.type_var_tuple_suffix is not None + prefix = s.type.type_var_tuple_prefix + suffix = s.type.type_var_tuple_suffix + tvt = s.type.defn.type_vars[prefix] + assert isinstance(tvt, TypeVarTupleType) + fallback = tvt.tuple_fallback + s_prefix, s_middle, s_suffix = split_with_prefix_and_suffix( + s.args, prefix, suffix + ) + t_prefix, t_middle, t_suffix = split_with_prefix_and_suffix( + t.args, prefix, suffix + ) + s_args = s_prefix + (TupleType(list(s_middle), fallback),) + s_suffix + t_args = t_prefix + (TupleType(list(t_middle), fallback),) + t_suffix + else: + t_args = t.args + s_args = self.s.args + for ta, sa, tv in zip(t_args, s_args, t.type.defn.type_vars): + meet = self.meet(ta, sa) + if isinstance(tv, TypeVarTupleType): + # Correctly unpack possible outcomes of meets of tuples: it can be + # either another tuple type or Never (normalized as *tuple[Never, ...]) + if isinstance(meet, TupleType): + args.extend(meet.items) + continue + else: + assert isinstance(meet, UninhabitedType) + meet = UnpackType(tv.tuple_fallback.copy_modified(args=[meet])) + args.append(meet) return Instance(t.type, args) else: if state.strict_optional: @@ -811,11 +846,82 @@ def visit_overloaded(self, t: Overloaded) -> ProperType: return meet_types(t, call) return meet_types(t.fallback, s) + def meet_tuples(self, s: TupleType, t: TupleType) -> list[Type] | None: + """Meet two tuple types while handling variadic entries. + + This is surprisingly tricky, and we don't handle some tricky corner cases. + Most of the trickiness comes from the variadic tuple items like *tuple[X, ...] + since they can have arbitrary partial overlaps (while *Ts can't be split). This + function is roughly a mirror of join_tuples() w.r.t. to the fact that fixed + tuples are subtypes of variadic ones but not vice versa. + """ + s_unpack_index = find_unpack_in_list(s.items) + t_unpack_index = find_unpack_in_list(t.items) + if s_unpack_index is None and t_unpack_index is None: + if s.length() == t.length(): + items: list[Type] = [] + for i in range(t.length()): + items.append(self.meet(t.items[i], s.items[i])) + return items + return None + if s_unpack_index is not None and t_unpack_index is not None: + # The only simple case we can handle if both tuples are variadic + # is when they are purely variadic. Other cases are tricky because + # a variadic item is effectively a union of tuples of all length, thus + # potentially causing overlap between a suffix in `s` and a prefix + # in `t` (see how this is handled in is_subtype() for details). + # TODO: handle more cases (like when both prefix/suffix are shorter in s or t). + if s.length() == 1 and t.length() == 1: + s_unpack = s.items[0] + assert isinstance(s_unpack, UnpackType) + s_unpacked = get_proper_type(s_unpack.type) + t_unpack = t.items[0] + assert isinstance(t_unpack, UnpackType) + t_unpacked = get_proper_type(t_unpack.type) + if not (isinstance(s_unpacked, Instance) and isinstance(t_unpacked, Instance)): + return None + meet = self.meet(s_unpacked, t_unpacked) + if not isinstance(meet, Instance): + return None + return [UnpackType(meet)] + return None + if s_unpack_index is not None: + variadic = s + unpack_index = s_unpack_index + fixed = t + else: + assert t_unpack_index is not None + variadic = t + unpack_index = t_unpack_index + fixed = s + # If one tuple is variadic one, and the other one is fixed, the meet will be fixed. + unpack = variadic.items[unpack_index] + assert isinstance(unpack, UnpackType) + unpacked = get_proper_type(unpack.type) + if not isinstance(unpacked, Instance): + return None + if fixed.length() < variadic.length() - 1: + return None + prefix_len = unpack_index + suffix_len = variadic.length() - prefix_len - 1 + prefix, middle, suffix = split_with_prefix_and_suffix( + tuple(fixed.items), prefix_len, suffix_len + ) + items = [] + for fi, vi in zip(prefix, variadic.items[:prefix_len]): + items.append(self.meet(fi, vi)) + for mi in middle: + items.append(self.meet(mi, unpacked.args[0])) + if suffix_len: + for fi, vi in zip(suffix, variadic.items[-suffix_len:]): + items.append(self.meet(fi, vi)) + return items + def visit_tuple_type(self, t: TupleType) -> ProperType: - if isinstance(self.s, TupleType) and self.s.length() == t.length(): - items: list[Type] = [] - for i in range(t.length()): - items.append(self.meet(t.items[i], self.s.items[i])) + if isinstance(self.s, TupleType): + items = self.meet_tuples(self.s, t) + if items is None: + return self.default(self.s) # TODO: What if the fallbacks are different? return TupleType(items, tuple_fallback(t)) elif isinstance(self.s, Instance): @@ -825,6 +931,10 @@ def visit_tuple_type(self, t: TupleType) -> ProperType: elif is_proper_subtype(t, self.s): # A named tuple that inherits from a normal class return t + elif self.s.type.has_type_var_tuple_type and is_subtype(t, self.s): + # This is a bit ad-hoc but more principled handling is tricky, and this + # special case is important for type narrowing in binder to work. + return t return self.default(self.s) def visit_typeddict_type(self, t: TypedDictType) -> ProperType: diff --git a/mypy/semanal.py b/mypy/semanal.py index ec4d32aefeb9..70403eed57ae 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -4414,7 +4414,8 @@ def process_typevartuple_declaration(self, s: AssignmentStmt) -> bool: typevartuple_var = TypeVarTupleExpr( name, self.qualified_name(name), - self.object_type(), + # Upper bound for *Ts is *tuple[object, ...], it can never be object. + tuple_fallback.copy_modified(), tuple_fallback, default, INVARIANT, diff --git a/mypy/semanal_typeargs.py b/mypy/semanal_typeargs.py index ed04b30e90ba..a25bab8de054 100644 --- a/mypy/semanal_typeargs.py +++ b/mypy/semanal_typeargs.py @@ -207,7 +207,8 @@ def visit_unpack_type(self, typ: UnpackType) -> None: return if isinstance(proper_type, TypeVarTupleType): return - # TODO: this should probably be .has_base("builtins.tuple"), also elsewhere. + # TODO: this should probably be .has_base("builtins.tuple"), also elsewhere. This is + # tricky however, since this needs map_instance_to_supertype() available in many places. if isinstance(proper_type, Instance) and proper_type.type.fullname == "builtins.tuple": return if not isinstance(proper_type, (UnboundType, AnyType)): diff --git a/mypy/solve.py b/mypy/solve.py index 17e1ca047818..7cdf1c10c9b5 100644 --- a/mypy/solve.py +++ b/mypy/solve.py @@ -144,6 +144,8 @@ def solve_with_dependent( if all(not lowers[tv] and not uppers[tv] for tv in scc): best_free = choose_free([originals[tv] for tv in scc], original_vars) if best_free: + # TODO: failing to choose may cause leaking type variables, + # we need to fail gracefully instead. free_vars.append(best_free.id) free_solutions[best_free.id] = best_free @@ -323,13 +325,15 @@ def test(x: U) -> U: ... best = sorted(scc, key=lambda x: (x.id not in original_vars, x.id.raw_id))[0] if isinstance(best, TypeVarType): return best.copy_modified(values=values, upper_bound=common_upper_bound) - if is_trivial_bound(common_upper_bound_p): + if is_trivial_bound(common_upper_bound_p, allow_tuple=True): # TODO: support more cases for ParamSpecs/TypeVarTuples return best return None -def is_trivial_bound(tp: ProperType) -> bool: +def is_trivial_bound(tp: ProperType, allow_tuple: bool = False) -> bool: + if isinstance(tp, Instance) and tp.type.fullname == "builtins.tuple": + return allow_tuple and is_trivial_bound(get_proper_type(tp.args[0])) return isinstance(tp, Instance) and tp.type.fullname == "builtins.object" diff --git a/mypy/subtypes.py b/mypy/subtypes.py index 58ae4efdf582..fdde1c24670e 100644 --- a/mypy/subtypes.py +++ b/mypy/subtypes.py @@ -58,13 +58,14 @@ UninhabitedType, UnionType, UnpackType, + find_unpack_in_list, get_proper_type, is_named_instance, + split_with_prefix_and_suffix, ) from mypy.types_utils import flatten_types from mypy.typestate import SubtypeKind, type_state from mypy.typevars import fill_typevars_with_any -from mypy.typevartuples import extract_unpack, fully_split_with_mapped_and_template # Flags for detected protocol members IS_SETTABLE: Final = 1 @@ -278,7 +279,13 @@ def _is_subtype( left = get_proper_type(left) right = get_proper_type(right) - if not proper_subtype and isinstance(right, (AnyType, UnboundType, ErasedType)): + # Note: Unpack type should not be a subtype of Any, since it may represent + # multiple types. This should always go through the visitor, to check arity. + if ( + not proper_subtype + and isinstance(right, (AnyType, UnboundType, ErasedType)) + and not isinstance(left, UnpackType) + ): # TODO: should we consider all types proper subtypes of UnboundType and/or # ErasedType as we do for non-proper subtyping. return True @@ -437,6 +444,34 @@ def visit_instance(self, left: Instance) -> bool: right = self.right if isinstance(right, TupleType) and right.partial_fallback.type.is_enum: return self._is_subtype(left, mypy.typeops.tuple_fallback(right)) + if isinstance(right, TupleType): + if len(right.items) == 1: + # Non-normalized Tuple type (may be left after semantic analysis + # because semanal_typearg visitor is not a type translator). + item = right.items[0] + if isinstance(item, UnpackType): + unpacked = get_proper_type(item.type) + if isinstance(unpacked, Instance): + return self._is_subtype(left, unpacked) + if left.type.has_base(right.partial_fallback.type.fullname): + # Special case to consider Foo[*tuple[Any, ...]] (i.e. bare Foo) a + # subtype of Foo[], when Foo is user defined variadic tuple type. + mapped = map_instance_to_supertype(left, right.partial_fallback.type) + if len(mapped.args) == 1 and isinstance(mapped.args[0], UnpackType): + unpacked = get_proper_type(mapped.args[0].type) + if isinstance(unpacked, Instance): + assert unpacked.type.fullname == "builtins.tuple" + if isinstance(get_proper_type(unpacked.args[0]), AnyType): + return not self.proper_subtype + # TODO: we need a special case similar to above to consider (something that maps to) + # tuple[Any, ...] a subtype of Tuple[]. + return False + if isinstance(right, TypeVarTupleType): + # tuple[Any, ...] is like Any in the world of tuples (see special case above). + if left.type.has_base("builtins.tuple"): + mapped = map_instance_to_supertype(left, right.tuple_fallback.type) + if isinstance(get_proper_type(mapped.args[0]), AnyType): + return not self.proper_subtype if isinstance(right, Instance): if type_state.is_cached_subtype_check(self._subtype_kind, left, right): return True @@ -476,106 +511,37 @@ def visit_instance(self, left: Instance) -> bool: t = erased nominal = True if right.type.has_type_var_tuple_type: - assert left.type.type_var_tuple_prefix is not None - assert left.type.type_var_tuple_suffix is not None + # For variadic instances we simply find the correct type argument mappings, + # all the heavy lifting is done by the tuple subtyping. assert right.type.type_var_tuple_prefix is not None assert right.type.type_var_tuple_suffix is not None - split_result = fully_split_with_mapped_and_template( - left.args, - left.type.type_var_tuple_prefix, - left.type.type_var_tuple_suffix, - right.args, - right.type.type_var_tuple_prefix, - right.type.type_var_tuple_suffix, + prefix = right.type.type_var_tuple_prefix + suffix = right.type.type_var_tuple_suffix + tvt = right.type.defn.type_vars[prefix] + assert isinstance(tvt, TypeVarTupleType) + fallback = tvt.tuple_fallback + left_prefix, left_middle, left_suffix = split_with_prefix_and_suffix( + t.args, prefix, suffix ) - if split_result is None: - return False - - ( - left_prefix, - left_mprefix, - left_middle, - left_msuffix, - left_suffix, - right_prefix, - right_mprefix, - right_middle, - right_msuffix, - right_suffix, - ) = split_result - - left_unpacked = extract_unpack(left_middle) - right_unpacked = extract_unpack(right_middle) - - # Helper for case 2 below so we can treat them the same. - def check_mixed( - unpacked_type: ProperType, compare_to: tuple[Type, ...] - ) -> bool: - if ( - isinstance(unpacked_type, Instance) - and unpacked_type.type.fullname == "builtins.tuple" - ): - return all(is_equivalent(l, unpacked_type.args[0]) for l in compare_to) - if isinstance(unpacked_type, TypeVarTupleType): - return False - if isinstance(unpacked_type, AnyType): - return True - if isinstance(unpacked_type, TupleType): - if len(unpacked_type.items) != len(compare_to): - return False - for t1, t2 in zip(unpacked_type.items, compare_to): - if not is_equivalent(t1, t2): - return False - return True - return False - - # Case 1: Both are unpacks, in this case we check what is being - # unpacked is the same. - if left_unpacked is not None and right_unpacked is not None: - if not is_equivalent(left_unpacked, right_unpacked): - return False - - # Case 2: Only one of the types is an unpack. The equivalence - # case is mostly the same but we check some additional - # things when unpacking on the right. - elif left_unpacked is not None and right_unpacked is None: - if not check_mixed(left_unpacked, right_middle): - return False - elif left_unpacked is None and right_unpacked is not None: - if not check_mixed(right_unpacked, left_middle): - return False - - # Case 3: Neither type is an unpack. In this case we just compare - # the items themselves. - else: - if len(left_middle) != len(right_middle): - return False - for left_t, right_t in zip(left_middle, right_middle): - if not is_equivalent(left_t, right_t): - return False - - assert len(left_mprefix) == len(right_mprefix) - assert len(left_msuffix) == len(right_msuffix) - - for left_item, right_item in zip( - left_mprefix + left_msuffix, right_mprefix + right_msuffix - ): - if not is_equivalent(left_item, right_item): - return False - - left_items = t.args[: right.type.type_var_tuple_prefix] - right_items = right.args[: right.type.type_var_tuple_prefix] - if right.type.type_var_tuple_suffix: - left_items += t.args[-right.type.type_var_tuple_suffix :] - right_items += right.args[-right.type.type_var_tuple_suffix :] - unpack_index = right.type.type_var_tuple_prefix - assert unpack_index is not None - type_params = zip( - left_prefix + left_suffix, - right_prefix + right_suffix, - right.type.defn.type_vars[:unpack_index] - + right.type.defn.type_vars[unpack_index + 1 :], + right_prefix, right_middle, right_suffix = split_with_prefix_and_suffix( + right.args, prefix, suffix + ) + left_args = ( + left_prefix + (TupleType(list(left_middle), fallback),) + left_suffix ) + right_args = ( + right_prefix + (TupleType(list(right_middle), fallback),) + right_suffix + ) + if len(t.args) == 1 and isinstance(t.args[0], UnpackType): + unpacked = get_proper_type(t.args[0].type) + if isinstance(unpacked, Instance): + assert unpacked.type.fullname == "builtins.tuple" + if ( + isinstance(get_proper_type(unpacked.args[0]), AnyType) + and not self.proper_subtype + ): + return True + type_params = zip(left_args, right_args, right.type.defn.type_vars) else: type_params = zip(t.args, right.args, right.type.defn.type_vars) if not self.subtype_context.ignore_type_params: @@ -761,8 +727,12 @@ def visit_tuple_type(self, left: TupleType) -> bool: return True return False elif isinstance(right, TupleType): + # If right has a variadic unpack this needs special handling. If there is a TypeVarTuple + # unpack, item count must coincide. If the left has variadic unpack but right + # doesn't have one, we will fall through to False down the line. + if self.variadic_tuple_subtype(left, right): + return True if len(left.items) != len(right.items): - # TODO: handle tuple with variadic items better. return False if any(not self._is_subtype(l, r) for l, r in zip(left.items, right.items)): return False @@ -778,6 +748,79 @@ def visit_tuple_type(self, left: TupleType) -> bool: else: return False + def variadic_tuple_subtype(self, left: TupleType, right: TupleType) -> bool: + """Check subtyping between two potentially variadic tuples. + + Most non-trivial cases here are due to variadic unpacks like *tuple[X, ...], + we handle such unpacks as infinite unions Tuple[()] | Tuple[X] | Tuple[X, X] | ... + + Note: the cases where right is fixed or has *Ts unpack should be handled + by the caller. + """ + right_unpack_index = find_unpack_in_list(right.items) + if right_unpack_index is None: + # This case should be handled by the caller. + return False + right_unpack = right.items[right_unpack_index] + assert isinstance(right_unpack, UnpackType) + right_unpacked = get_proper_type(right_unpack.type) + if not isinstance(right_unpacked, Instance): + # This case should be handled by the caller. + return False + assert right_unpacked.type.fullname == "builtins.tuple" + right_item = right_unpacked.args[0] + right_prefix = right_unpack_index + right_suffix = len(right.items) - right_prefix - 1 + left_unpack_index = find_unpack_in_list(left.items) + if left_unpack_index is None: + # Simple case: left is fixed, simply find correct mapping to the right + # (effectively selecting item with matching length from an infinite union). + if len(left.items) < right_prefix + right_suffix: + return False + prefix, middle, suffix = split_with_prefix_and_suffix( + tuple(left.items), right_prefix, right_suffix + ) + if not all( + self._is_subtype(li, ri) for li, ri in zip(prefix, right.items[:right_prefix]) + ): + return False + if right_suffix and not all( + self._is_subtype(li, ri) for li, ri in zip(suffix, right.items[-right_suffix:]) + ): + return False + return all(self._is_subtype(li, right_item) for li in middle) + else: + if len(left.items) < len(right.items): + # There are some items on the left that will never have a matching length + # on the right. + return False + left_unpack = left.items[left_unpack_index] + assert isinstance(left_unpack, UnpackType) + left_unpacked = get_proper_type(left_unpack.type) + if not isinstance(left_unpacked, Instance): + # *Ts unpacks can't be split. + return False + assert left_unpacked.type.fullname == "builtins.tuple" + left_item = left_unpacked.args[0] + + # The most tricky case with two variadic unpacks we handle similar to union + # subtyping: *each* item on the left, must be a subtype of *some* item on the right. + # For this we first check the "asymptotic case", i.e. that both unpacks a subtypes, + # and then check subtyping for all finite overlaps. + if not self._is_subtype(left_item, right_item): + return False + left_prefix = left_unpack_index + left_suffix = len(left.items) - left_prefix - 1 + max_overlap = max(0, right_prefix - left_prefix, right_suffix - left_suffix) + for overlap in range(max_overlap + 1): + repr_items = left.items[:left_prefix] + [left_item] * overlap + if left_suffix: + repr_items += left.items[-left_suffix:] + left_repr = left.copy_modified(items=repr_items) + if not self._is_subtype(left_repr, right): + return False + return True + def visit_typeddict_type(self, left: TypedDictType) -> bool: right = self.right if isinstance(right, Instance): diff --git a/mypy/test/testconstraints.py b/mypy/test/testconstraints.py index f40996145cba..5ec292f07056 100644 --- a/mypy/test/testconstraints.py +++ b/mypy/test/testconstraints.py @@ -82,40 +82,11 @@ def test_unpack_homogenous_tuple_with_prefix_and_suffix(self) -> None: Constraint(type_var=fx.u, op=SUPERTYPE_OF, target=fx.d), } - def test_unpack_tuple(self) -> None: - fx = self.fx - assert set( - infer_constraints( - Instance( - fx.gvi, - [ - UnpackType( - TupleType([fx.t, fx.s], fallback=Instance(fx.std_tuplei, [fx.o])) - ) - ], - ), - Instance(fx.gvi, [fx.a, fx.b]), - SUPERTYPE_OF, - ) - ) == { - Constraint(type_var=fx.t, op=SUPERTYPE_OF, target=fx.a), - Constraint(type_var=fx.s, op=SUPERTYPE_OF, target=fx.b), - } - def test_unpack_with_prefix_and_suffix(self) -> None: fx = self.fx assert set( infer_constraints( - Instance( - fx.gv2i, - [ - fx.u, - UnpackType( - TupleType([fx.t, fx.s], fallback=Instance(fx.std_tuplei, [fx.o])) - ), - fx.u, - ], - ), + Instance(fx.gv2i, [fx.u, fx.t, fx.s, fx.u]), Instance(fx.gv2i, [fx.a, fx.b, fx.c, fx.d]), SUPERTYPE_OF, ) @@ -130,16 +101,7 @@ def test_unpack_tuple_length_non_match(self) -> None: fx = self.fx assert set( infer_constraints( - Instance( - fx.gv2i, - [ - fx.u, - UnpackType( - TupleType([fx.t, fx.s], fallback=Instance(fx.std_tuplei, [fx.o])) - ), - fx.u, - ], - ), + Instance(fx.gv2i, [fx.u, fx.t, fx.s, fx.u]), Instance(fx.gv2i, [fx.a, fx.b, fx.d]), SUPERTYPE_OF, ) diff --git a/mypy/test/testsubtypes.py b/mypy/test/testsubtypes.py index 464f64d2b846..480fe38a90a7 100644 --- a/mypy/test/testsubtypes.py +++ b/mypy/test/testsubtypes.py @@ -4,7 +4,7 @@ from mypy.subtypes import is_subtype from mypy.test.helpers import Suite from mypy.test.typefixture import InterfaceTypeFixture, TypeFixture -from mypy.types import Instance, TupleType, Type, UnpackType +from mypy.types import Instance, Type, UnpackType class SubtypingSuite(Suite): @@ -221,10 +221,6 @@ def test_type_var_tuple(self) -> None: Instance(self.fx.gvi, [UnpackType(self.fx.us)]), ) - self.assert_subtype( - Instance(self.fx.gvi, [UnpackType(self.fx.anyt)]), - Instance(self.fx.gvi, [self.fx.anyt]), - ) self.assert_not_subtype( Instance(self.fx.gvi, [UnpackType(self.fx.ss)]), Instance(self.fx.gvi, []) ) @@ -272,83 +268,8 @@ def test_type_var_tuple_with_prefix_suffix(self) -> None: Instance(self.fx.gvi, [self.fx.a, UnpackType(self.fx.ss), self.fx.b, self.fx.c]), ) - def test_type_var_tuple_unpacked_varlength_tuple(self) -> None: - self.assert_subtype( - Instance( - self.fx.gvi, - [ - UnpackType( - TupleType( - [self.fx.a, self.fx.b], - fallback=Instance(self.fx.std_tuplei, [self.fx.o]), - ) - ) - ], - ), - Instance(self.fx.gvi, [self.fx.a, self.fx.b]), - ) - - def test_type_var_tuple_unpacked_tuple(self) -> None: - self.assert_subtype( - Instance( - self.fx.gvi, - [ - UnpackType( - TupleType( - [self.fx.a, self.fx.b], - fallback=Instance(self.fx.std_tuplei, [self.fx.o]), - ) - ) - ], - ), - Instance(self.fx.gvi, [self.fx.a, self.fx.b]), - ) - self.assert_subtype( - Instance( - self.fx.gvi, - [ - UnpackType( - TupleType( - [self.fx.a, self.fx.b], - fallback=Instance(self.fx.std_tuplei, [self.fx.o]), - ) - ) - ], - ), - Instance(self.fx.gvi, [self.fx.anyt, self.fx.anyt]), - ) - self.assert_not_subtype( - Instance( - self.fx.gvi, - [ - UnpackType( - TupleType( - [self.fx.a, self.fx.b], - fallback=Instance(self.fx.std_tuplei, [self.fx.o]), - ) - ) - ], - ), - Instance(self.fx.gvi, [self.fx.a]), - ) - self.assert_not_subtype( - Instance( - self.fx.gvi, - [ - UnpackType( - TupleType( - [self.fx.a, self.fx.b], - fallback=Instance(self.fx.std_tuplei, [self.fx.o]), - ) - ) - ], - ), - # Order flipped here. - Instance(self.fx.gvi, [self.fx.b, self.fx.a]), - ) - def test_type_var_tuple_unpacked_variable_length_tuple(self) -> None: - self.assert_equivalent( + self.assert_subtype( Instance(self.fx.gvi, [self.fx.a, self.fx.a]), Instance(self.fx.gvi, [UnpackType(Instance(self.fx.std_tuplei, [self.fx.a]))]), ) diff --git a/mypy/test/testtypes.py b/mypy/test/testtypes.py index 59457dfa5d3b..e8dd623bec53 100644 --- a/mypy/test/testtypes.py +++ b/mypy/test/testtypes.py @@ -47,6 +47,7 @@ UnboundType, UninhabitedType, UnionType, + UnpackType, get_proper_type, has_recursive_types, ) @@ -986,6 +987,54 @@ def test_literal_type(self) -> None: UnionType([lit2, lit3]), UnionType([lit1, lit2]), UnionType([lit2, lit3, lit1]) ) + def test_variadic_tuple_joins(self) -> None: + # These tests really test just the "arity", to be sure it is handled correctly. + self.assert_join( + self.tuple(self.fx.a, self.fx.a), + self.tuple(UnpackType(Instance(self.fx.std_tuplei, [self.fx.a]))), + self.tuple(UnpackType(Instance(self.fx.std_tuplei, [self.fx.a]))), + ) + self.assert_join( + self.tuple(self.fx.a, self.fx.a), + self.tuple(UnpackType(Instance(self.fx.std_tuplei, [self.fx.a])), self.fx.a), + self.tuple(UnpackType(Instance(self.fx.std_tuplei, [self.fx.a])), self.fx.a), + ) + self.assert_join( + self.tuple(self.fx.a, self.fx.a), + self.tuple(self.fx.a, UnpackType(Instance(self.fx.std_tuplei, [self.fx.a]))), + self.tuple(self.fx.a, UnpackType(Instance(self.fx.std_tuplei, [self.fx.a]))), + ) + self.assert_join( + self.tuple( + self.fx.a, UnpackType(Instance(self.fx.std_tuplei, [self.fx.a])), self.fx.a + ), + self.tuple( + self.fx.a, UnpackType(Instance(self.fx.std_tuplei, [self.fx.a])), self.fx.a + ), + self.tuple( + self.fx.a, UnpackType(Instance(self.fx.std_tuplei, [self.fx.a])), self.fx.a + ), + ) + self.assert_join( + self.tuple(UnpackType(Instance(self.fx.std_tuplei, [self.fx.a]))), + self.tuple( + self.fx.a, UnpackType(Instance(self.fx.std_tuplei, [self.fx.a])), self.fx.a + ), + self.tuple(UnpackType(Instance(self.fx.std_tuplei, [self.fx.a]))), + ) + self.assert_join( + self.tuple(UnpackType(Instance(self.fx.std_tuplei, [self.fx.a]))), + self.tuple(UnpackType(Instance(self.fx.std_tuplei, [self.fx.a]))), + self.tuple(UnpackType(Instance(self.fx.std_tuplei, [self.fx.a]))), + ) + self.assert_join( + self.tuple(UnpackType(Instance(self.fx.std_tuplei, [self.fx.a])), self.fx.a), + self.tuple( + self.fx.b, UnpackType(Instance(self.fx.std_tuplei, [self.fx.b])), self.fx.b + ), + self.tuple(UnpackType(Instance(self.fx.std_tuplei, [self.fx.a])), self.fx.a), + ) + # There are additional test cases in check-inference.test. # TODO: Function types + varargs and default args. @@ -1221,6 +1270,34 @@ def assert_meet_uninhabited(self, s: Type, t: Type) -> None: with state.strict_optional_set(True): self.assert_meet(s, t, self.fx.uninhabited) + def test_variadic_tuple_meets(self) -> None: + # These tests really test just the "arity", to be sure it is handled correctly. + self.assert_meet( + self.tuple(self.fx.a, self.fx.a), + self.tuple(UnpackType(Instance(self.fx.std_tuplei, [self.fx.a]))), + self.tuple(self.fx.a, self.fx.a), + ) + self.assert_meet( + self.tuple(self.fx.a, self.fx.a), + self.tuple(UnpackType(Instance(self.fx.std_tuplei, [self.fx.a])), self.fx.a), + self.tuple(self.fx.a, self.fx.a), + ) + self.assert_meet( + self.tuple(self.fx.a, self.fx.a), + self.tuple(self.fx.a, UnpackType(Instance(self.fx.std_tuplei, [self.fx.a]))), + self.tuple(self.fx.a, self.fx.a), + ) + self.assert_meet( + self.tuple(UnpackType(Instance(self.fx.std_tuplei, [self.fx.a]))), + self.tuple(UnpackType(Instance(self.fx.std_tuplei, [self.fx.a]))), + self.tuple(UnpackType(Instance(self.fx.std_tuplei, [self.fx.a]))), + ) + self.assert_meet( + self.tuple(UnpackType(Instance(self.fx.std_tuplei, [self.fx.a])), self.fx.a), + self.tuple(self.fx.b, UnpackType(Instance(self.fx.std_tuplei, [self.fx.b]))), + self.tuple(self.fx.b, UnpackType(Instance(self.fx.std_tuplei, [self.fx.b]))), + ) + def assert_meet(self, s: Type, t: Type, meet: Type) -> None: self.assert_simple_meet(s, t, meet) self.assert_simple_meet(t, s, meet) diff --git a/mypy/test/typefixture.py b/mypy/test/typefixture.py index 81af765f8585..b7bde16e6be2 100644 --- a/mypy/test/typefixture.py +++ b/mypy/test/typefixture.py @@ -233,9 +233,10 @@ def make_type_var_tuple(name: str, id: int, upper_bound: Type) -> TypeVarTupleTy AnyType(TypeOfAny.from_omitted_generics), ) - self.ts = make_type_var_tuple("Ts", 1, self.o) # Ts`1 (type var tuple) - self.ss = make_type_var_tuple("Ss", 2, self.o) # Ss`2 (type var tuple) - self.us = make_type_var_tuple("Us", 3, self.o) # Us`3 (type var tuple) + obj_tuple = self.std_tuple.copy_modified(args=[self.o]) + self.ts = make_type_var_tuple("Ts", 1, obj_tuple) # Ts`1 (type var tuple) + self.ss = make_type_var_tuple("Ss", 2, obj_tuple) # Ss`2 (type var tuple) + self.us = make_type_var_tuple("Us", 3, obj_tuple) # Us`3 (type var tuple) self.gvi = self.make_type_info("GV", mro=[self.oi], typevars=["Ts"], typevar_tuple_index=0) self.gv2i = self.make_type_info( @@ -325,8 +326,8 @@ def make_type_info( n, n, id, - self.o, - self.std_tuple, + self.std_tuple.copy_modified(args=[self.o]), + self.std_tuple.copy_modified(args=[self.o]), AnyType(TypeOfAny.from_omitted_generics), ) ) diff --git a/mypy/typeops.py b/mypy/typeops.py index 3efa3cc3e965..3f50232f04c1 100644 --- a/mypy/typeops.py +++ b/mypy/typeops.py @@ -104,8 +104,8 @@ def tuple_fallback(typ: TupleType) -> Instance: if isinstance(item, UnpackType): unpacked_type = get_proper_type(item.type) if isinstance(unpacked_type, TypeVarTupleType): - items.append(unpacked_type.upper_bound) - elif ( + unpacked_type = get_proper_type(unpacked_type.upper_bound) + if ( isinstance(unpacked_type, Instance) and unpacked_type.type.fullname == "builtins.tuple" ): @@ -654,8 +654,7 @@ def erase_def_to_union_or_bound(tdef: TypeVarLikeType) -> Type: # TODO(PEP612): fix for ParamSpecType if isinstance(tdef, ParamSpecType): return AnyType(TypeOfAny.from_error) - assert isinstance(tdef, TypeVarType) - if tdef.values: + if isinstance(tdef, TypeVarType) and tdef.values: return make_simplified_union(tdef.values) else: return tdef.upper_bound diff --git a/mypy/typevartuples.py b/mypy/typevartuples.py index bcb5e96b615c..af2effbd4035 100644 --- a/mypy/typevartuples.py +++ b/mypy/typevartuples.py @@ -9,7 +9,6 @@ ProperType, Type, UnpackType, - find_unpack_in_list, get_proper_type, split_with_prefix_and_suffix, ) @@ -25,139 +24,6 @@ def split_with_instance( ) -def split_with_mapped_and_template( - mapped: tuple[Type, ...], - mapped_prefix_len: int | None, - mapped_suffix_len: int | None, - template: tuple[Type, ...], - template_prefix_len: int, - template_suffix_len: int, -) -> ( - tuple[ - tuple[Type, ...], - tuple[Type, ...], - tuple[Type, ...], - tuple[Type, ...], - tuple[Type, ...], - tuple[Type, ...], - ] - | None -): - split_result = fully_split_with_mapped_and_template( - mapped, - mapped_prefix_len, - mapped_suffix_len, - template, - template_prefix_len, - template_suffix_len, - ) - if split_result is None: - return None - - ( - mapped_prefix, - mapped_middle_prefix, - mapped_middle_middle, - mapped_middle_suffix, - mapped_suffix, - template_prefix, - template_middle_prefix, - template_middle_middle, - template_middle_suffix, - template_suffix, - ) = split_result - - return ( - mapped_prefix + mapped_middle_prefix, - mapped_middle_middle, - mapped_middle_suffix + mapped_suffix, - template_prefix + template_middle_prefix, - template_middle_middle, - template_middle_suffix + template_suffix, - ) - - -def fully_split_with_mapped_and_template( - mapped: tuple[Type, ...], - mapped_prefix_len: int | None, - mapped_suffix_len: int | None, - template: tuple[Type, ...], - template_prefix_len: int, - template_suffix_len: int, -) -> ( - tuple[ - tuple[Type, ...], - tuple[Type, ...], - tuple[Type, ...], - tuple[Type, ...], - tuple[Type, ...], - tuple[Type, ...], - tuple[Type, ...], - tuple[Type, ...], - tuple[Type, ...], - tuple[Type, ...], - ] - | None -): - if mapped_prefix_len is not None: - assert mapped_suffix_len is not None - mapped_prefix, mapped_middle, mapped_suffix = split_with_prefix_and_suffix( - tuple(mapped), mapped_prefix_len, mapped_suffix_len - ) - else: - mapped_prefix = tuple() - mapped_suffix = tuple() - mapped_middle = mapped - - template_prefix, template_middle, template_suffix = split_with_prefix_and_suffix( - tuple(template), template_prefix_len, template_suffix_len - ) - - unpack_prefix = find_unpack_in_list(template_middle) - if unpack_prefix is None: - return ( - mapped_prefix, - (), - mapped_middle, - (), - mapped_suffix, - template_prefix, - (), - template_middle, - (), - template_suffix, - ) - - unpack_suffix = len(template_middle) - unpack_prefix - 1 - # mapped_middle is too short to do the unpack - if unpack_prefix + unpack_suffix > len(mapped_middle): - return None - - ( - mapped_middle_prefix, - mapped_middle_middle, - mapped_middle_suffix, - ) = split_with_prefix_and_suffix(mapped_middle, unpack_prefix, unpack_suffix) - ( - template_middle_prefix, - template_middle_middle, - template_middle_suffix, - ) = split_with_prefix_and_suffix(template_middle, unpack_prefix, unpack_suffix) - - return ( - mapped_prefix, - mapped_middle_prefix, - mapped_middle_middle, - mapped_middle_suffix, - mapped_suffix, - template_prefix, - template_middle_prefix, - template_middle_middle, - template_middle_suffix, - template_suffix, - ) - - def extract_unpack(types: Sequence[Type]) -> ProperType | None: """Given a list of types, extracts either a single type from an unpack, or returns None.""" if len(types) == 1: diff --git a/test-data/unit/check-incremental.test b/test-data/unit/check-incremental.test index b4cd21aa552c..06f87a26e7a1 100644 --- a/test-data/unit/check-incremental.test +++ b/test-data/unit/check-incremental.test @@ -6460,3 +6460,22 @@ P = ParamSpec("P") class C(Generic[P]): def __init__(self, fn: Callable[P, int]) -> None: ... [builtins fixtures/dict.pyi] + +[case testVariadicTupleIncrementalUpdateNoCrash] +import m +[file m.py] +from typing import Any +from lib import C + +x: C[Any] +[file m.py.2] +from lib import C + +x: C[int] +[file lib.py] +from typing import Generic, Tuple, TypeVar +from typing_extensions import TypeVarTuple, Unpack + +Ts = TypeVarTuple("Ts") +class C(Tuple[Unpack[Ts]]): ... +[builtins fixtures/tuple.pyi] diff --git a/test-data/unit/check-typevar-tuple.test b/test-data/unit/check-typevar-tuple.test index 2b47ff30cdfb..d38d492fe9b2 100644 --- a/test-data/unit/check-typevar-tuple.test +++ b/test-data/unit/check-typevar-tuple.test @@ -1221,7 +1221,7 @@ def foo(x: Tuple[Unpack[Ts]]) -> Tuple[Unpack[Ts]]: [case testTypeVarTupleWithIsInstance] # flags: --warn-unreachable -from typing import Tuple +from typing import Generic, Tuple from typing_extensions import TypeVarTuple, Unpack TP = TypeVarTuple("TP") @@ -1232,4 +1232,287 @@ def test(d: A[int, str]) -> None: reveal_type(d) # N: Revealed type is "Tuple[builtins.int, builtins.str, fallback=__main__.A[builtins.int, builtins.str]]" else: reveal_type(d) # E: Statement is unreachable + +class B(Generic[Unpack[TP]]): ... + +def test2(d: B[int, str]) -> None: + if isinstance(d, B): + reveal_type(d) # N: Revealed type is "__main__.B[builtins.int, builtins.str]" + else: + reveal_type(d) # E: Statement is unreachable [builtins fixtures/isinstancelist.pyi] + +[case testVariadicTupleSubtyping] +from typing import Tuple +from typing_extensions import Unpack + +def f1(x: Tuple[float, ...]) -> None: ... +def f2(x: Tuple[float, Unpack[Tuple[float, ...]]]) -> None: ... +def f3(x: Tuple[Unpack[Tuple[float, ...]], float]) -> None: ... +def f4(x: Tuple[float, Unpack[Tuple[float, ...]], float]) -> None: ... + +t1: Tuple[int, int] +t2: Tuple[int, Unpack[Tuple[int, ...]]] +t3: Tuple[Unpack[Tuple[int, ...]], int] +t4: Tuple[int, Unpack[Tuple[int, ...]], int] +t5: Tuple[int, ...] + +tl: Tuple[int, int, Unpack[Tuple[int, ...]]] +tr: Tuple[Unpack[Tuple[int, ...]], int, int] + +f1(t1) +f1(t2) +f1(t3) +f1(t4) +f1(t5) + +f1(tl) +f1(tr) + +f2(t1) +f2(t2) +f2(t3) +f2(t4) +f2(t5) # E: Argument 1 to "f2" has incompatible type "Tuple[int, ...]"; expected "Tuple[float, Unpack[Tuple[float, ...]]]" + +f2(tl) +f2(tr) + +f3(t1) +f3(t2) +f3(t3) +f3(t4) +f3(t5) # E: Argument 1 to "f3" has incompatible type "Tuple[int, ...]"; expected "Tuple[Unpack[Tuple[float, ...]], float]" + +f3(tl) +f3(tr) + +f4(t1) +f4(t2) # E: Argument 1 to "f4" has incompatible type "Tuple[int, Unpack[Tuple[int, ...]]]"; expected "Tuple[float, Unpack[Tuple[float, ...]], float]" +f4(t3) # E: Argument 1 to "f4" has incompatible type "Tuple[Unpack[Tuple[int, ...]], int]"; expected "Tuple[float, Unpack[Tuple[float, ...]], float]" +f4(t4) +f4(t5) # E: Argument 1 to "f4" has incompatible type "Tuple[int, ...]"; expected "Tuple[float, Unpack[Tuple[float, ...]], float]" + +f4(tl) +f4(tr) + +t5_verbose: Tuple[Unpack[Tuple[int, ...]]] +t5 = t5_verbose # OK +[builtins fixtures/tuple.pyi] + +[case testVariadicTupleInference] +from typing import List, Tuple, TypeVar +from typing_extensions import TypeVarTuple, Unpack + +T = TypeVar("T") +def f(x: Tuple[int, Unpack[Tuple[T, ...]]]) -> T: ... + +vt0: Tuple[int, ...] +f(vt0) # E: Argument 1 to "f" has incompatible type "Tuple[int, ...]"; expected "Tuple[int, Unpack[Tuple[int, ...]]]" + +vt1: Tuple[Unpack[Tuple[int, ...]], int] +reveal_type(f(vt1)) # N: Revealed type is "builtins.int" + +S = TypeVar("S") +Ts = TypeVarTuple("Ts") +def g(x: Tuple[T, Unpack[Ts], S]) -> Tuple[T, Unpack[Ts], S]: ... +g(vt0) # E: Argument 1 to "g" has incompatible type "Tuple[int, ...]"; expected "Tuple[int, Unpack[Tuple[int, ...]], int]" + +U = TypeVar("U") +def h(x: List[Tuple[T, S, U]]) -> Tuple[T, S, U]: ... +vt2: Tuple[Unpack[Tuple[int, ...]], int] +vt2 = h(reveal_type([])) # N: Revealed type is "builtins.list[Tuple[builtins.int, builtins.int, builtins.int]]" +[builtins fixtures/tuple.pyi] + +[case testVariadicSelfTypeErasure] +from typing import Generic +from typing_extensions import TypeVarTuple, Unpack + +Ts = TypeVarTuple("Ts") +class Array(Generic[Unpack[Ts]]): + def _close(self) -> None: ... + + def close(self) -> None: + self._close() +[builtins fixtures/tuple.pyi] + +[case testVariadicSubclassFixed] +from typing import Generic, Tuple +from typing_extensions import TypeVarTuple, Unpack + +Ts = TypeVarTuple("Ts") +class B(Generic[Unpack[Ts]]): ... +class C(B[int, str]): ... +class D(B[Unpack[Tuple[int, ...]]]): ... + +def fii(x: B[int, int]) -> None: ... +def fis(x: B[int, str]) -> None: ... +def fiv(x: B[Unpack[Tuple[int, ...]]]) -> None: ... + +fii(C()) # E: Argument 1 to "fii" has incompatible type "C"; expected "B[int, int]" +fii(D()) # E: Argument 1 to "fii" has incompatible type "D"; expected "B[int, int]" +fis(C()) +fis(D()) # E: Argument 1 to "fis" has incompatible type "D"; expected "B[int, str]" +fiv(C()) # E: Argument 1 to "fiv" has incompatible type "C"; expected "B[Unpack[Tuple[int, ...]]]" +fiv(D()) +[builtins fixtures/tuple.pyi] + +[case testVariadicSubclassSame] +from typing import Generic, Tuple, TypeVar +from typing_extensions import TypeVarTuple, Unpack + +Ts = TypeVarTuple("Ts") +class B(Generic[Unpack[Ts]]): ... +class C(B[Unpack[Ts]]): ... + +def fii(x: B[int, int]) -> None: ... +def fis(x: B[int, str]) -> None: ... +def fiv(x: B[Unpack[Tuple[int, ...]]]) -> None: ... + +cii: C[int, int] +cis: C[int, str] +civ: C[Unpack[Tuple[int, ...]]] + +fii(cii) +fii(cis) # E: Argument 1 to "fii" has incompatible type "C[int, str]"; expected "B[int, int]" +fii(civ) # E: Argument 1 to "fii" has incompatible type "C[Unpack[Tuple[int, ...]]]"; expected "B[int, int]" + +fis(cii) # E: Argument 1 to "fis" has incompatible type "C[int, int]"; expected "B[int, str]" +fis(cis) +fis(civ) # E: Argument 1 to "fis" has incompatible type "C[Unpack[Tuple[int, ...]]]"; expected "B[int, str]" + +fiv(cii) +fiv(cis) # E: Argument 1 to "fiv" has incompatible type "C[int, str]"; expected "B[Unpack[Tuple[int, ...]]]" +fiv(civ) +[builtins fixtures/tuple.pyi] + +[case testVariadicSubclassExtra] +from typing import Generic, Tuple, TypeVar +from typing_extensions import TypeVarTuple, Unpack + +Ts = TypeVarTuple("Ts") +class B(Generic[Unpack[Ts]]): ... + +T = TypeVar("T") +class C(B[int, Unpack[Ts], T]): ... + +def ff(x: B[int, int, int]) -> None: ... +def fv(x: B[Unpack[Tuple[int, ...]]]) -> None: ... + +cii: C[int, int] +cis: C[int, str] +civ: C[Unpack[Tuple[int, ...]]] + +ff(cii) +ff(cis) # E: Argument 1 to "ff" has incompatible type "C[int, str]"; expected "B[int, int, int]" +ff(civ) # E: Argument 1 to "ff" has incompatible type "C[Unpack[Tuple[int, ...]]]"; expected "B[int, int, int]" + +fv(cii) +fv(cis) # E: Argument 1 to "fv" has incompatible type "C[int, str]"; expected "B[Unpack[Tuple[int, ...]]]" +fv(civ) +[builtins fixtures/tuple.pyi] + +[case testVariadicSubclassVariadic] +from typing import Generic, Tuple, TypeVar +from typing_extensions import TypeVarTuple, Unpack + +Ts = TypeVarTuple("Ts") +class B(Generic[Unpack[Ts]]): ... +T = TypeVar("T") +class C(B[Unpack[Tuple[T, ...]]]): ... + +def ff(x: B[int, int]) -> None: ... +def fv(x: B[Unpack[Tuple[int, ...]]]) -> None: ... + +ci: C[int] +ff(ci) # E: Argument 1 to "ff" has incompatible type "C[int]"; expected "B[int, int]" +fv(ci) +[builtins fixtures/tuple.pyi] + +[case testVariadicSubclassMethodAccess] +from typing import Generic, Tuple, TypeVar +from typing_extensions import TypeVarTuple, Unpack + +Ts = TypeVarTuple("Ts") +class B(Generic[Unpack[Ts]]): + def meth(self) -> Tuple[Unpack[Ts]]: ... + +class C1(B[int, str]): ... +class C2(B[Unpack[Ts]]): ... +T = TypeVar("T") +class C3(B[int, Unpack[Ts], T]): ... +class C4(B[Unpack[Tuple[T, ...]]]): ... + +c1: C1 +reveal_type(c1.meth()) # N: Revealed type is "Tuple[builtins.int, builtins.str]" + +c2f: C2[int, str] +c2v: C2[Unpack[Tuple[int, ...]]] +reveal_type(c2f.meth()) # N: Revealed type is "Tuple[builtins.int, builtins.str]" +reveal_type(c2v.meth()) # N: Revealed type is "builtins.tuple[builtins.int, ...]" + +c3f: C3[int, str] +c3v: C3[Unpack[Tuple[int, ...]]] +reveal_type(c3f.meth()) # N: Revealed type is "Tuple[builtins.int, builtins.int, builtins.str]" +reveal_type(c3v.meth()) # N: Revealed type is "Tuple[builtins.int, Unpack[builtins.tuple[builtins.int, ...]], builtins.int]" + +c4: C4[int] +reveal_type(c4.meth()) # N: Revealed type is "builtins.tuple[builtins.int, ...]" +[builtins fixtures/tuple.pyi] + +[case testVariadicTupleAnySubtype] +from typing import Any, Generic, Tuple +from typing_extensions import TypeVarTuple, Unpack + +Ts = TypeVarTuple("Ts") +class B(Generic[Unpack[Ts]]): ... +class C1(B[Unpack[Tuple[Any, ...]]]): ... +c1 = C1() +class C2(B): ... +c2 = C2() +x: B[int, str] +x = c1 +x = c2 +[builtins fixtures/tuple.pyi] + +[case testVariadicTupleAnySubtypeTupleType] +from typing import Any, Generic, Tuple +from typing_extensions import TypeVarTuple, Unpack + +Ts = TypeVarTuple("Ts") +class B(Tuple[Unpack[Ts]]): ... +class C1(B[Unpack[Tuple[Any, ...]]]): ... +c1 = C1() +class C2(B): ... +c2 = C2() +x: B[int, str] +x = c1 +x = c2 +[builtins fixtures/tuple.pyi] + +[case testTypeVarTupleAnyOverload] +from typing import Any, Generic, overload, Tuple +from typing_extensions import TypeVarTuple, Unpack + +Ts = TypeVarTuple("Ts") +class Array(Generic[Unpack[Ts]]): ... + +class A: + @overload + def f(self, x: Tuple[Unpack[Ts]]) -> Array[Unpack[Ts]]: ... + @overload + def f(self, x: Any) -> Any: ... + def f(self, x: Any) -> Any: + ... +[builtins fixtures/tuple.pyi] + +[case testTypeVarTupleInferAgainstAny] +from typing import Any, Tuple, TypeVar +from typing_extensions import Unpack + +T = TypeVar("T") + +def test(x: int, t: Tuple[T, ...]) -> Tuple[int, Unpack[Tuple[T, ...]]]: + ... +a: Any = test(42, ()) +[builtins fixtures/tuple.pyi] diff --git a/test-data/unit/semanal-types.test b/test-data/unit/semanal-types.test index 71a5c6dd87b5..5e05d099b958 100644 --- a/test-data/unit/semanal-types.test +++ b/test-data/unit/semanal-types.test @@ -1559,8 +1559,8 @@ MypyFile:1( ImportFrom:1(typing_extensions, [TypeVarTuple]) AssignmentStmt:2( NameExpr(TV* [__main__.TV]) - TypeVarTupleExpr:2())) - + TypeVarTupleExpr:2( + UpperBound(builtins.tuple[builtins.object, ...])))) [builtins fixtures/tuple.pyi] [case testTypeVarTupleCallable] @@ -1576,7 +1576,8 @@ MypyFile:1( ImportFrom:2(typing, [Callable]) AssignmentStmt:3( NameExpr(Ts* [__main__.Ts]) - TypeVarTupleExpr:3()) + TypeVarTupleExpr:3( + UpperBound(builtins.tuple[builtins.object, ...]))) FuncDef:5( foo Args( From f41e24c8b31a110c2f01a753acba458977e41bfc Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Thu, 14 Sep 2023 10:42:34 +0100 Subject: [PATCH 044/144] Lenient handling of trivial Callable suffixes (#15913) Fixes https://github.com/python/mypy/issues/15734 Fixes https://github.com/python/mypy/issues/15188 Fixes https://github.com/python/mypy/issues/14321 Fixes https://github.com/python/mypy/issues/13107 (plain Callable was already working, this fixes the protocol example) Fixes https://github.com/python/mypy/issues/16058 It looks like treating trivial suffixes (especially for erased callables) as "whatever works" is a right thing, because it reflects the whole idea of why we normally check subtyping with respect to an e.g. erased type. As you can see this fixes a bunch of issues. Note it was necessary to make couple more tweaks to make everything work smoothly: * Adjust self-type erasure level in `checker.py` to match other places. * Explicitly allow `Callable` as a `self`/`cls` annotation (actually I am not sure we need to keep this check at all, since we now have good inference for self-types, and we check they are safe either at definition site or at call site). --- mypy/checker.py | 4 +- mypy/checkmember.py | 2 + mypy/messages.py | 3 + mypy/subtypes.py | 19 ++- mypy/typeops.py | 4 + test-data/unit/check-callable.test | 31 ++++ test-data/unit/check-modules.test | 12 +- .../unit/check-parameter-specification.test | 139 +++++++++++++++++- test-data/unit/fixtures/paramspec.pyi | 1 + 9 files changed, 204 insertions(+), 11 deletions(-) diff --git a/mypy/checker.py b/mypy/checker.py index 5a74f019dcf4..95a65b0a8cd1 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -1208,7 +1208,9 @@ def check_func_def( ): if defn.is_class or defn.name == "__new__": ref_type = mypy.types.TypeType.make_normalized(ref_type) - erased = get_proper_type(erase_to_bound(arg_type)) + # This level of erasure matches the one in checkmember.check_self_arg(), + # better keep these two checks consistent. + erased = get_proper_type(erase_typevars(erase_to_bound(arg_type))) if not is_subtype(ref_type, erased, ignore_type_params=True): if ( isinstance(erased, Instance) diff --git a/mypy/checkmember.py b/mypy/checkmember.py index 60430839ff62..59af0d402e14 100644 --- a/mypy/checkmember.py +++ b/mypy/checkmember.py @@ -896,6 +896,8 @@ def f(self: S) -> T: ... return functype else: selfarg = get_proper_type(item.arg_types[0]) + # This level of erasure matches the one in checker.check_func_def(), + # better keep these two checks consistent. if subtypes.is_subtype(dispatched_arg_type, erase_typevars(erase_to_bound(selfarg))): new_items.append(item) elif isinstance(selfarg, ParamSpecType): diff --git a/mypy/messages.py b/mypy/messages.py index b6fdaf06a8e0..8bc190b7d66d 100644 --- a/mypy/messages.py +++ b/mypy/messages.py @@ -2132,6 +2132,9 @@ def report_protocol_problems( not is_subtype(subtype, erase_type(supertype), options=self.options) or not subtype.type.defn.type_vars or not supertype.type.defn.type_vars + # Always show detailed message for ParamSpec + or subtype.type.has_param_spec_type + or supertype.type.has_param_spec_type ): type_name = format_type(subtype, self.options, module_names=True) self.note(f"Following member(s) of {type_name} have conflicts:", context, code=code) diff --git a/mypy/subtypes.py b/mypy/subtypes.py index fdde1c24670e..e8339a8c4d69 100644 --- a/mypy/subtypes.py +++ b/mypy/subtypes.py @@ -1519,6 +1519,18 @@ def are_trivial_parameters(param: Parameters | NormalizedCallableType) -> bool: ) +def is_trivial_suffix(param: Parameters | NormalizedCallableType) -> bool: + param_star = param.var_arg() + param_star2 = param.kw_arg() + return ( + param.arg_kinds[-2:] == [ARG_STAR, ARG_STAR2] + and param_star is not None + and isinstance(get_proper_type(param_star.typ), AnyType) + and param_star2 is not None + and isinstance(get_proper_type(param_star2.typ), AnyType) + ) + + def are_parameters_compatible( left: Parameters | NormalizedCallableType, right: Parameters | NormalizedCallableType, @@ -1540,6 +1552,7 @@ def are_parameters_compatible( # Treat "def _(*a: Any, **kw: Any) -> X" similarly to "Callable[..., X]" if are_trivial_parameters(right): return True + trivial_suffix = is_trivial_suffix(right) # Match up corresponding arguments and check them for compatibility. In # every pair (argL, argR) of corresponding arguments from L and R, argL must @@ -1570,7 +1583,7 @@ def _incompatible(left_arg: FormalArgument | None, right_arg: FormalArgument | N if right_arg is None: return False if left_arg is None: - return not allow_partial_overlap + return not allow_partial_overlap and not trivial_suffix return not is_compat(right_arg.typ, left_arg.typ) if _incompatible(left_star, right_star) or _incompatible(left_star2, right_star2): @@ -1594,7 +1607,7 @@ def _incompatible(left_arg: FormalArgument | None, right_arg: FormalArgument | N # arguments. Get all further positional args of left, and make sure # they're more general than the corresponding member in right. # TODO: are we handling UnpackType correctly here? - if right_star is not None: + if right_star is not None and not trivial_suffix: # Synthesize an anonymous formal argument for the right right_by_position = right.try_synthesizing_arg_from_vararg(None) assert right_by_position is not None @@ -1621,7 +1634,7 @@ def _incompatible(left_arg: FormalArgument | None, right_arg: FormalArgument | N # Phase 1d: Check kw args. Right has an infinite series of optional named # arguments. Get all further named args of left, and make sure # they're more general than the corresponding member in right. - if right_star2 is not None: + if right_star2 is not None and not trivial_suffix: right_names = {name for name in right.arg_names if name is not None} left_only_names = set() for name, kind in zip(left.arg_names, left.arg_kinds): diff --git a/mypy/typeops.py b/mypy/typeops.py index 3f50232f04c1..10efa32c4b91 100644 --- a/mypy/typeops.py +++ b/mypy/typeops.py @@ -251,6 +251,10 @@ def supported_self_type(typ: ProperType) -> bool: """ if isinstance(typ, TypeType): return supported_self_type(typ.item) + if isinstance(typ, CallableType): + # Special case: allow class callable instead of Type[...] as cls annotation, + # as well as callable self for callback protocols. + return True return isinstance(typ, TypeVarType) or ( isinstance(typ, Instance) and typ != fill_typevars(typ.type) ) diff --git a/test-data/unit/check-callable.test b/test-data/unit/check-callable.test index 07c42de74bb3..8a611a689be5 100644 --- a/test-data/unit/check-callable.test +++ b/test-data/unit/check-callable.test @@ -598,3 +598,34 @@ a: A a() # E: Missing positional argument "other" in call to "__call__" of "A" a(a) a(lambda: None) + +[case testCallableSubtypingTrivialSuffix] +from typing import Any, Protocol + +class Call(Protocol): + def __call__(self, x: int, *args: Any, **kwargs: Any) -> None: ... + +def f1() -> None: ... +a1: Call = f1 # E: Incompatible types in assignment (expression has type "Callable[[], None]", variable has type "Call") \ + # N: "Call.__call__" has type "Callable[[Arg(int, 'x'), VarArg(Any), KwArg(Any)], None]" +def f2(x: str) -> None: ... +a2: Call = f2 # E: Incompatible types in assignment (expression has type "Callable[[str], None]", variable has type "Call") \ + # N: "Call.__call__" has type "Callable[[Arg(int, 'x'), VarArg(Any), KwArg(Any)], None]" +def f3(y: int) -> None: ... +a3: Call = f3 # E: Incompatible types in assignment (expression has type "Callable[[int], None]", variable has type "Call") \ + # N: "Call.__call__" has type "Callable[[Arg(int, 'x'), VarArg(Any), KwArg(Any)], None]" +def f4(x: int) -> None: ... +a4: Call = f4 + +def f5(x: int, y: int) -> None: ... +a5: Call = f5 + +def f6(x: int, y: int = 0) -> None: ... +a6: Call = f6 + +def f7(x: int, *, y: int) -> None: ... +a7: Call = f7 + +def f8(x: int, *args: int, **kwargs: str) -> None: ... +a8: Call = f8 +[builtins fixtures/tuple.pyi] diff --git a/test-data/unit/check-modules.test b/test-data/unit/check-modules.test index 3da5996ed274..94368f6c1113 100644 --- a/test-data/unit/check-modules.test +++ b/test-data/unit/check-modules.test @@ -3193,7 +3193,7 @@ from test1 import aaaa # E: Module "test1" has no attribute "aaaa" import b [file a.py] class Foo: - def frobnicate(self, x, *args, **kwargs): pass + def frobnicate(self, x: str, *args, **kwargs): pass [file b.py] from a import Foo class Bar(Foo): @@ -3201,21 +3201,21 @@ class Bar(Foo): [file b.py.2] from a import Foo class Bar(Foo): - def frobnicate(self, *args) -> None: pass + def frobnicate(self, *args: int) -> None: pass [file b.py.3] from a import Foo class Bar(Foo): - def frobnicate(self, *args) -> None: pass # type: ignore[override] # I know + def frobnicate(self, *args: int) -> None: pass # type: ignore[override] # I know [builtins fixtures/dict.pyi] [out1] tmp/b.py:3: error: Signature of "frobnicate" incompatible with supertype "Foo" tmp/b.py:3: note: Superclass: -tmp/b.py:3: note: def frobnicate(self, x: Any, *args: Any, **kwargs: Any) -> Any +tmp/b.py:3: note: def frobnicate(self, x: str, *args: Any, **kwargs: Any) -> Any tmp/b.py:3: note: Subclass: tmp/b.py:3: note: def frobnicate(self) -> None [out2] tmp/b.py:3: error: Signature of "frobnicate" incompatible with supertype "Foo" tmp/b.py:3: note: Superclass: -tmp/b.py:3: note: def frobnicate(self, x: Any, *args: Any, **kwargs: Any) -> Any +tmp/b.py:3: note: def frobnicate(self, x: str, *args: Any, **kwargs: Any) -> Any tmp/b.py:3: note: Subclass: -tmp/b.py:3: note: def frobnicate(self, *args: Any) -> None +tmp/b.py:3: note: def frobnicate(self, *args: int) -> None diff --git a/test-data/unit/check-parameter-specification.test b/test-data/unit/check-parameter-specification.test index d80069644194..da831d29dd43 100644 --- a/test-data/unit/check-parameter-specification.test +++ b/test-data/unit/check-parameter-specification.test @@ -1729,7 +1729,12 @@ class A(Protocol[P]): ... def bar(b: A[P]) -> A[Concatenate[int, P]]: - return b # E: Incompatible return value type (got "A[P]", expected "A[[int, **P]]") + return b # E: Incompatible return value type (got "A[P]", expected "A[[int, **P]]") \ + # N: Following member(s) of "A[P]" have conflicts: \ + # N: Expected: \ + # N: def foo(self, int, /, *args: P.args, **kwargs: P.kwargs) -> Any \ + # N: Got: \ + # N: def foo(self, *args: P.args, **kwargs: P.kwargs) -> Any [builtins fixtures/paramspec.pyi] [case testParamSpecPrefixSubtypingValidNonStrict] @@ -1825,6 +1830,138 @@ c: C[int, [int, str], str] # E: Nested parameter specifications are not allowed reveal_type(c) # N: Revealed type is "__main__.C[Any]" [builtins fixtures/paramspec.pyi] +[case testParamSpecConcatenateSelfType] +from typing import Callable +from typing_extensions import ParamSpec, Concatenate + +P = ParamSpec("P") +class A: + def __init__(self, a_param_1: str) -> None: ... + + @classmethod + def add_params(cls: Callable[P, A]) -> Callable[Concatenate[float, P], A]: + def new_constructor(i: float, *args: P.args, **kwargs: P.kwargs) -> A: + return cls(*args, **kwargs) + return new_constructor + + @classmethod + def remove_params(cls: Callable[Concatenate[str, P], A]) -> Callable[P, A]: + def new_constructor(*args: P.args, **kwargs: P.kwargs) -> A: + return cls("my_special_str", *args, **kwargs) + return new_constructor + +reveal_type(A.add_params()) # N: Revealed type is "def (builtins.float, a_param_1: builtins.str) -> __main__.A" +reveal_type(A.remove_params()) # N: Revealed type is "def () -> __main__.A" +[builtins fixtures/paramspec.pyi] + +[case testParamSpecConcatenateCallbackProtocol] +from typing import Protocol, TypeVar +from typing_extensions import ParamSpec, Concatenate + +P = ParamSpec("P") +R = TypeVar("R", covariant=True) + +class Path: ... + +class Function(Protocol[P, R]): + def __call__(self, *args: P.args, **kwargs: P.kwargs) -> R: ... + +def file_cache(fn: Function[Concatenate[Path, P], R]) -> Function[P, R]: + def wrapper(*args: P.args, **kw: P.kwargs) -> R: + return fn(Path(), *args, **kw) + return wrapper + +@file_cache +def get_thing(path: Path, *, some_arg: int) -> int: ... +reveal_type(get_thing) # N: Revealed type is "__main__.Function[[*, some_arg: builtins.int], builtins.int]" +get_thing(some_arg=1) # OK +[builtins fixtures/paramspec.pyi] + +[case testParamSpecConcatenateKeywordOnly] +from typing import Callable, TypeVar +from typing_extensions import ParamSpec, Concatenate + +P = ParamSpec("P") +R = TypeVar("R") + +class Path: ... + +def file_cache(fn: Callable[Concatenate[Path, P], R]) -> Callable[P, R]: + def wrapper(*args: P.args, **kw: P.kwargs) -> R: + return fn(Path(), *args, **kw) + return wrapper + +@file_cache +def get_thing(path: Path, *, some_arg: int) -> int: ... +reveal_type(get_thing) # N: Revealed type is "def (*, some_arg: builtins.int) -> builtins.int" +get_thing(some_arg=1) # OK +[builtins fixtures/paramspec.pyi] + +[case testParamSpecConcatenateCallbackApply] +from typing import Callable, Protocol +from typing_extensions import ParamSpec, Concatenate + +P = ParamSpec("P") + +class FuncType(Protocol[P]): + def __call__(self, x: int, s: str, *args: P.args, **kw_args: P.kwargs) -> str: ... + +def forwarder1(fp: FuncType[P], *args: P.args, **kw_args: P.kwargs) -> str: + return fp(0, '', *args, **kw_args) + +def forwarder2(fp: Callable[Concatenate[int, str, P], str], *args: P.args, **kw_args: P.kwargs) -> str: + return fp(0, '', *args, **kw_args) + +def my_f(x: int, s: str, d: bool) -> str: ... +forwarder1(my_f, True) # OK +forwarder2(my_f, True) # OK +forwarder1(my_f, 1.0) # E: Argument 2 to "forwarder1" has incompatible type "float"; expected "bool" +forwarder2(my_f, 1.0) # E: Argument 2 to "forwarder2" has incompatible type "float"; expected "bool" +[builtins fixtures/paramspec.pyi] + +[case testParamSpecCallbackProtocolSelf] +from typing import Callable, Protocol, TypeVar +from typing_extensions import ParamSpec, Concatenate + +Params = ParamSpec("Params") +Result = TypeVar("Result", covariant=True) + +class FancyMethod(Protocol): + def __call__(self, arg1: int, arg2: str) -> bool: ... + def return_me(self: Callable[Params, Result]) -> Callable[Params, Result]: ... + def return_part(self: Callable[Concatenate[int, Params], Result]) -> Callable[Params, Result]: ... + +m: FancyMethod +reveal_type(m.return_me()) # N: Revealed type is "def (arg1: builtins.int, arg2: builtins.str) -> builtins.bool" +reveal_type(m.return_part()) # N: Revealed type is "def (arg2: builtins.str) -> builtins.bool" +[builtins fixtures/paramspec.pyi] + +[case testParamSpecInferenceCallableAgainstAny] +from typing import Callable, TypeVar, Any +from typing_extensions import ParamSpec, Concatenate + +_P = ParamSpec("_P") +_R = TypeVar("_R") + +class A: ... +a = A() + +def a_func( + func: Callable[Concatenate[A, _P], _R], +) -> Callable[Concatenate[Any, _P], _R]: + def wrapper(__a: Any, *args: _P.args, **kwargs: _P.kwargs) -> _R: + return func(a, *args, **kwargs) + return wrapper + +def test(a, *args): ... +x: Any +y: object + +a_func(test) +x = a_func(test) +y = a_func(test) +[builtins fixtures/paramspec.pyi] + [case testParamSpecInferenceWithCallbackProtocol] from typing import Protocol, Callable, ParamSpec diff --git a/test-data/unit/fixtures/paramspec.pyi b/test-data/unit/fixtures/paramspec.pyi index 9b0089f6a7e9..dfb5e126f242 100644 --- a/test-data/unit/fixtures/paramspec.pyi +++ b/test-data/unit/fixtures/paramspec.pyi @@ -16,6 +16,7 @@ class object: class function: ... class ellipsis: ... +class classmethod: ... class type: def __init__(self, *a: object) -> None: ... From 2c2d126cc742f2467045d36780c33bb8fb77a614 Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Thu, 14 Sep 2023 14:27:54 -0700 Subject: [PATCH 045/144] Fix tuple[Any, ...] subtyping (#16108) Follow up to #16073 and #16076 Fix needed for https://github.com/python/mypy/pull/16053/files#r1316481395 I add test cases that would have caught my previous incorrect PR. I add an explicit case for the new desirable behaviour we see with zip. --- mypy/main.py | 2 +- mypy/subtypes.py | 6 +- test-data/unit/check-tuples.test | 164 +++++++++++++++++++++++++++++-- 3 files changed, 160 insertions(+), 12 deletions(-) diff --git a/mypy/main.py b/mypy/main.py index a4357dca7890..3eb8a76a6de3 100644 --- a/mypy/main.py +++ b/mypy/main.py @@ -1359,7 +1359,7 @@ def set_strict_flags() -> None: parser.error("Can only find occurrences of class members.") if len(_find_occurrences) != 2: parser.error("Can only find occurrences of non-nested class members.") - state.find_occurrences = _find_occurrences # type: ignore[assignment] + state.find_occurrences = _find_occurrences # Set reports. for flag, val in vars(special_opts).items(): diff --git a/mypy/subtypes.py b/mypy/subtypes.py index e8339a8c4d69..9ed2e4af4051 100644 --- a/mypy/subtypes.py +++ b/mypy/subtypes.py @@ -463,8 +463,10 @@ def visit_instance(self, left: Instance) -> bool: assert unpacked.type.fullname == "builtins.tuple" if isinstance(get_proper_type(unpacked.args[0]), AnyType): return not self.proper_subtype - # TODO: we need a special case similar to above to consider (something that maps to) - # tuple[Any, ...] a subtype of Tuple[]. + if mapped.type.fullname == "builtins.tuple" and isinstance( + get_proper_type(mapped.args[0]), AnyType + ): + return not self.proper_subtype return False if isinstance(right, TypeVarTupleType): # tuple[Any, ...] is like Any in the world of tuples (see special case above). diff --git a/test-data/unit/check-tuples.test b/test-data/unit/check-tuples.test index 0e7c81edc498..391fa20db738 100644 --- a/test-data/unit/check-tuples.test +++ b/test-data/unit/check-tuples.test @@ -107,19 +107,147 @@ class A: pass class B(A): pass [builtins fixtures/tuple.pyi] -[case testSubtypingWithNamedTupleType] -from typing import Tuple -t1: Tuple[A, A] -t2: tuple - -if int(): - t1 = t2 # E: Incompatible types in assignment (expression has type "Tuple[Any, ...]", variable has type "Tuple[A, A]") -if int(): - t2 = t1 +[case testSubtypingWithTupleType] +from __future__ import annotations +from typing import Any, Tuple + +tuple_aa: tuple[A, A] +Tuple_aa: Tuple[A, A] + +tuple_obj: tuple[object, ...] +Tuple_obj: Tuple[object, ...] + +tuple_obj_one: tuple[object] +Tuple_obj_one: Tuple[object] + +tuple_obj_two: tuple[object, object] +Tuple_obj_two: Tuple[object, object] + +tuple_any_implicit: tuple +Tuple_any_implicit: Tuple + +tuple_any: tuple[Any, ...] +Tuple_any: Tuple[Any, ...] + +tuple_any_one: tuple[Any] +Tuple_any_one: Tuple[Any] + +tuple_any_two: tuple[Any, Any] +Tuple_any_two: Tuple[Any, Any] + +def takes_tuple_aa(t: tuple[A, A]): ... + +takes_tuple_aa(tuple_aa) +takes_tuple_aa(Tuple_aa) +takes_tuple_aa(tuple_obj) # E: Argument 1 to "takes_tuple_aa" has incompatible type "Tuple[object, ...]"; expected "Tuple[A, A]" +takes_tuple_aa(Tuple_obj) # E: Argument 1 to "takes_tuple_aa" has incompatible type "Tuple[object, ...]"; expected "Tuple[A, A]" +takes_tuple_aa(tuple_obj_one) # E: Argument 1 to "takes_tuple_aa" has incompatible type "Tuple[object]"; expected "Tuple[A, A]" +takes_tuple_aa(Tuple_obj_one) # E: Argument 1 to "takes_tuple_aa" has incompatible type "Tuple[object]"; expected "Tuple[A, A]" +takes_tuple_aa(tuple_obj_two) # E: Argument 1 to "takes_tuple_aa" has incompatible type "Tuple[object, object]"; expected "Tuple[A, A]" +takes_tuple_aa(Tuple_obj_two) # E: Argument 1 to "takes_tuple_aa" has incompatible type "Tuple[object, object]"; expected "Tuple[A, A]" +takes_tuple_aa(tuple_any_implicit) +takes_tuple_aa(Tuple_any_implicit) +takes_tuple_aa(tuple_any) +takes_tuple_aa(Tuple_any) +takes_tuple_aa(tuple_any_one) # E: Argument 1 to "takes_tuple_aa" has incompatible type "Tuple[Any]"; expected "Tuple[A, A]" +takes_tuple_aa(Tuple_any_one) # E: Argument 1 to "takes_tuple_aa" has incompatible type "Tuple[Any]"; expected "Tuple[A, A]" +takes_tuple_aa(tuple_any_two) +takes_tuple_aa(Tuple_any_two) + +def takes_tuple_any_implicit(t: tuple): ... + +takes_tuple_any_implicit(tuple_aa) +takes_tuple_any_implicit(Tuple_aa) +takes_tuple_any_implicit(tuple_obj) +takes_tuple_any_implicit(Tuple_obj) +takes_tuple_any_implicit(tuple_obj_one) +takes_tuple_any_implicit(Tuple_obj_one) +takes_tuple_any_implicit(tuple_obj_two) +takes_tuple_any_implicit(Tuple_obj_two) +takes_tuple_any_implicit(tuple_any_implicit) +takes_tuple_any_implicit(Tuple_any_implicit) +takes_tuple_any_implicit(tuple_any) +takes_tuple_any_implicit(Tuple_any) +takes_tuple_any_implicit(tuple_any_one) +takes_tuple_any_implicit(Tuple_any_one) +takes_tuple_any_implicit(tuple_any_two) +takes_tuple_any_implicit(Tuple_any_two) + +def takes_tuple_any_one(t: tuple[Any]): ... + +takes_tuple_any_one(tuple_aa) # E: Argument 1 to "takes_tuple_any_one" has incompatible type "Tuple[A, A]"; expected "Tuple[Any]" +takes_tuple_any_one(Tuple_aa) # E: Argument 1 to "takes_tuple_any_one" has incompatible type "Tuple[A, A]"; expected "Tuple[Any]" +takes_tuple_any_one(tuple_obj) # E: Argument 1 to "takes_tuple_any_one" has incompatible type "Tuple[object, ...]"; expected "Tuple[Any]" +takes_tuple_any_one(Tuple_obj) # E: Argument 1 to "takes_tuple_any_one" has incompatible type "Tuple[object, ...]"; expected "Tuple[Any]" +takes_tuple_any_one(tuple_obj_one) +takes_tuple_any_one(Tuple_obj_one) +takes_tuple_any_one(tuple_obj_two) # E: Argument 1 to "takes_tuple_any_one" has incompatible type "Tuple[object, object]"; expected "Tuple[Any]" +takes_tuple_any_one(Tuple_obj_two) # E: Argument 1 to "takes_tuple_any_one" has incompatible type "Tuple[object, object]"; expected "Tuple[Any]" +takes_tuple_any_one(tuple_any_implicit) +takes_tuple_any_one(Tuple_any_implicit) +takes_tuple_any_one(tuple_any) +takes_tuple_any_one(Tuple_any) +takes_tuple_any_one(tuple_any_one) +takes_tuple_any_one(Tuple_any_one) +takes_tuple_any_one(tuple_any_two) # E: Argument 1 to "takes_tuple_any_one" has incompatible type "Tuple[Any, Any]"; expected "Tuple[Any]" +takes_tuple_any_one(Tuple_any_two) # E: Argument 1 to "takes_tuple_any_one" has incompatible type "Tuple[Any, Any]"; expected "Tuple[Any]" class A: pass [builtins fixtures/tuple.pyi] +[case testSubtypingWithTupleTypeSubclass] +from __future__ import annotations +from typing import Any, Tuple + +class A: ... + +inst_tuple_aa: Tuple[A, A] + +class tuple_aa_subclass(Tuple[A, A]): ... +inst_tuple_aa_subclass: tuple_aa_subclass + +class tuple_any_subclass(Tuple[Any, ...]): ... +inst_tuple_any_subclass: tuple_any_subclass + +class tuple_any_one_subclass(Tuple[Any]): ... +inst_tuple_any_one_subclass: tuple_any_one_subclass + +class tuple_any_two_subclass(Tuple[Any, Any]): ... +inst_tuple_any_two_subclass: tuple_any_two_subclass + +class tuple_obj_subclass(Tuple[object, ...]): ... +inst_tuple_obj_subclass: tuple_obj_subclass + +class tuple_obj_one_subclass(Tuple[object]): ... +inst_tuple_obj_one_subclass: tuple_obj_one_subclass + +class tuple_obj_two_subclass(Tuple[object, object]): ... +inst_tuple_obj_two_subclass: tuple_obj_two_subclass + +def takes_tuple_aa(t: Tuple[A, A]): ... + +takes_tuple_aa(inst_tuple_aa) +takes_tuple_aa(inst_tuple_aa_subclass) +takes_tuple_aa(inst_tuple_any_subclass) +takes_tuple_aa(inst_tuple_any_one_subclass) # E: Argument 1 to "takes_tuple_aa" has incompatible type "tuple_any_one_subclass"; expected "Tuple[A, A]" +takes_tuple_aa(inst_tuple_any_two_subclass) +takes_tuple_aa(inst_tuple_obj_subclass) # E: Argument 1 to "takes_tuple_aa" has incompatible type "tuple_obj_subclass"; expected "Tuple[A, A]" +takes_tuple_aa(inst_tuple_obj_one_subclass) # E: Argument 1 to "takes_tuple_aa" has incompatible type "tuple_obj_one_subclass"; expected "Tuple[A, A]" +takes_tuple_aa(inst_tuple_obj_two_subclass) # E: Argument 1 to "takes_tuple_aa" has incompatible type "tuple_obj_two_subclass"; expected "Tuple[A, A]" + +def takes_tuple_aa_subclass(t: tuple_aa_subclass): ... + +takes_tuple_aa_subclass(inst_tuple_aa) # E: Argument 1 to "takes_tuple_aa_subclass" has incompatible type "Tuple[A, A]"; expected "tuple_aa_subclass" +takes_tuple_aa_subclass(inst_tuple_aa_subclass) +takes_tuple_aa_subclass(inst_tuple_any_subclass) # E: Argument 1 to "takes_tuple_aa_subclass" has incompatible type "tuple_any_subclass"; expected "tuple_aa_subclass" +takes_tuple_aa_subclass(inst_tuple_any_one_subclass) # E: Argument 1 to "takes_tuple_aa_subclass" has incompatible type "tuple_any_one_subclass"; expected "tuple_aa_subclass" +takes_tuple_aa_subclass(inst_tuple_any_two_subclass) # E: Argument 1 to "takes_tuple_aa_subclass" has incompatible type "tuple_any_two_subclass"; expected "tuple_aa_subclass" +takes_tuple_aa_subclass(inst_tuple_obj_subclass) # E: Argument 1 to "takes_tuple_aa_subclass" has incompatible type "tuple_obj_subclass"; expected "tuple_aa_subclass" +takes_tuple_aa_subclass(inst_tuple_obj_one_subclass) # E: Argument 1 to "takes_tuple_aa_subclass" has incompatible type "tuple_obj_one_subclass"; expected "tuple_aa_subclass" +takes_tuple_aa_subclass(inst_tuple_obj_two_subclass) # E: Argument 1 to "takes_tuple_aa_subclass" has incompatible type "tuple_obj_two_subclass"; expected "tuple_aa_subclass" + +[builtins fixtures/tuple.pyi] + [case testTupleInitializationWithNone] # flags: --no-strict-optional from typing import Tuple @@ -1522,3 +1650,21 @@ class Bar(aaaaaaaaaa): # E: Name "aaaaaaaaaa" is not defined class FooBarTuple(Tuple[Foo, Bar]): ... [builtins fixtures/tuple.pyi] + + +[case testTupleOverloadZipAny] +from typing import Any, Iterable, Iterator, Tuple, TypeVar, overload + +T = TypeVar("T") + +@overload +def zip(__i: Iterable[T]) -> Iterator[Tuple[T]]: ... +@overload +def zip(*i: Iterable[Any]) -> Iterator[Tuple[Any, ...]]: ... +def zip(i): ... + +def g(t: Tuple): + # Ideally, we'd infer that these are iterators of tuples + reveal_type(zip(*t)) # N: Revealed type is "typing.Iterator[Any]" + reveal_type(zip(t)) # N: Revealed type is "typing.Iterator[Any]" +[builtins fixtures/tuple.pyi] From d77310ae61e8e784aae46b2011f35900b9392e15 Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Fri, 15 Sep 2023 00:17:32 -0700 Subject: [PATCH 046/144] Better diffs in tests (#16112) It's annoying that one line change causes everything else to show up as a diff. Just use difflib instead. I also highlight the changed lines. We can't use FancyFormatter because it doesn't work well with pytest. --- mypy/test/helpers.py | 128 +++++++++++++++++++++++-------------------- 1 file changed, 68 insertions(+), 60 deletions(-) diff --git a/mypy/test/helpers.py b/mypy/test/helpers.py index 7447391593d5..a53e16e27dfa 100644 --- a/mypy/test/helpers.py +++ b/mypy/test/helpers.py @@ -1,6 +1,7 @@ from __future__ import annotations import contextlib +import difflib import os import pathlib import re @@ -43,64 +44,81 @@ def run_mypy(args: list[str]) -> None: pytest.fail(msg="Sample check failed", pytrace=False) -def assert_string_arrays_equal(expected: list[str], actual: list[str], msg: str) -> None: - """Assert that two string arrays are equal. +def diff_ranges( + left: list[str], right: list[str] +) -> tuple[list[tuple[int, int]], list[tuple[int, int]]]: + seq = difflib.SequenceMatcher(None, left, right) + # note last triple is a dummy, so don't need to worry + blocks = seq.get_matching_blocks() - Display any differences in a human-readable form. - """ - actual = clean_up(actual) - if actual != expected: - num_skip_start = num_skipped_prefix_lines(expected, actual) - num_skip_end = num_skipped_suffix_lines(expected, actual) + i = 0 + j = 0 + left_ranges = [] + right_ranges = [] + for block in blocks: + # mismatched range + left_ranges.append((i, block.a)) + right_ranges.append((j, block.b)) - sys.stderr.write("Expected:\n") + i = block.a + block.size + j = block.b + block.size - # If omit some lines at the beginning, indicate it by displaying a line - # with '...'. - if num_skip_start > 0: - sys.stderr.write(" ...\n") + # matched range + left_ranges.append((block.a, i)) + right_ranges.append((block.b, j)) + return left_ranges, right_ranges - # Keep track of the first different line. - first_diff = -1 - # Display only this many first characters of identical lines. - width = 75 +def render_diff_range( + ranges: list[tuple[int, int]], content: list[str], colour: str | None = None +) -> None: + for i, line_range in enumerate(ranges): + is_matching = i % 2 == 1 + lines = content[line_range[0] : line_range[1]] + for j, line in enumerate(lines): + if ( + is_matching + # elide the middle of matching blocks + and j >= 3 + and j < len(lines) - 3 + ): + if j == 3: + sys.stderr.write(" ...\n") + continue - for i in range(num_skip_start, len(expected) - num_skip_end): - if i >= len(actual) or expected[i] != actual[i]: - if first_diff < 0: - first_diff = i - sys.stderr.write(f" {expected[i]:<45} (diff)") - else: - e = expected[i] - sys.stderr.write(" " + e[:width]) - if len(e) > width: - sys.stderr.write("...") - sys.stderr.write("\n") - if num_skip_end > 0: - sys.stderr.write(" ...\n") + if not is_matching and colour: + sys.stderr.write(colour) - sys.stderr.write("Actual:\n") + sys.stderr.write(" " + line) - if num_skip_start > 0: - sys.stderr.write(" ...\n") + if not is_matching: + if colour: + sys.stderr.write("\033[0m") + sys.stderr.write(" (diff)") - for j in range(num_skip_start, len(actual) - num_skip_end): - if j >= len(expected) or expected[j] != actual[j]: - sys.stderr.write(f" {actual[j]:<45} (diff)") - else: - a = actual[j] - sys.stderr.write(" " + a[:width]) - if len(a) > width: - sys.stderr.write("...") sys.stderr.write("\n") - if not actual: - sys.stderr.write(" (empty)\n") - if num_skip_end > 0: - sys.stderr.write(" ...\n") - sys.stderr.write("\n") +def assert_string_arrays_equal(expected: list[str], actual: list[str], msg: str) -> None: + """Assert that two string arrays are equal. + + Display any differences in a human-readable form. + """ + actual = clean_up(actual) + if expected != actual: + expected_ranges, actual_ranges = diff_ranges(expected, actual) + sys.stderr.write("Expected:\n") + red = "\033[31m" if sys.platform != "win32" else None + render_diff_range(expected_ranges, expected, colour=red) + sys.stderr.write("Actual:\n") + green = "\033[32m" if sys.platform != "win32" else None + render_diff_range(actual_ranges, actual, colour=green) + + sys.stderr.write("\n") + first_diff = next( + (i for i, (a, b) in enumerate(zip(expected, actual)) if a != b), + max(len(expected), len(actual)), + ) if 0 <= first_diff < len(actual) and ( len(expected[first_diff]) >= MIN_LINE_LENGTH_FOR_ALIGNMENT or len(actual[first_diff]) >= MIN_LINE_LENGTH_FOR_ALIGNMENT @@ -109,6 +127,10 @@ def assert_string_arrays_equal(expected: list[str], actual: list[str], msg: str) # long lines. show_align_message(expected[first_diff], actual[first_diff]) + sys.stderr.write( + "Update the test output using --update-data -n0 " + "(you can additionally use the -k selector to update only specific tests)" + ) pytest.fail(msg, pytrace=False) @@ -226,20 +248,6 @@ def local_sys_path_set() -> Iterator[None]: sys.path = old_sys_path -def num_skipped_prefix_lines(a1: list[str], a2: list[str]) -> int: - num_eq = 0 - while num_eq < min(len(a1), len(a2)) and a1[num_eq] == a2[num_eq]: - num_eq += 1 - return max(0, num_eq - 4) - - -def num_skipped_suffix_lines(a1: list[str], a2: list[str]) -> int: - num_eq = 0 - while num_eq < min(len(a1), len(a2)) and a1[-num_eq - 1] == a2[-num_eq - 1]: - num_eq += 1 - return max(0, num_eq - 4) - - def testfile_pyversion(path: str) -> tuple[int, int]: if path.endswith("python312.test"): return 3, 12 From 402c8ffa821d35a68dfe010a59f1dd9ea3dbb02a Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Fri, 15 Sep 2023 09:42:20 +0100 Subject: [PATCH 047/144] Fix crash on malformed TypedDict in incremental mode (#16115) Fixes https://github.com/python/mypy/issues/15557 FWIW I simply copy the logic for handling malformed definitions from named tuples, that seems to be much more robust. --- mypy/semanal_typeddict.py | 14 ++++++++++---- test-data/unit/check-incremental.test | 25 +++++++++++++++++++++++++ 2 files changed, 35 insertions(+), 4 deletions(-) diff --git a/mypy/semanal_typeddict.py b/mypy/semanal_typeddict.py index fb3fa713e3fb..a9a4cd868f27 100644 --- a/mypy/semanal_typeddict.py +++ b/mypy/semanal_typeddict.py @@ -366,7 +366,13 @@ def check_typeddict( name, items, types, total, tvar_defs, ok = res if not ok: # Error. Construct dummy return value. - info = self.build_typeddict_typeinfo("TypedDict", [], [], set(), call.line, None) + if var_name: + name = var_name + if is_func_scope: + name += "@" + str(call.line) + else: + name = var_name = "TypedDict@" + str(call.line) + info = self.build_typeddict_typeinfo(name, [], [], set(), call.line, None) else: if var_name is not None and name != var_name: self.fail( @@ -395,9 +401,9 @@ def check_typeddict( name, items, types, required_keys, call.line, existing_info ) info.line = node.line - # Store generated TypeInfo under both names, see semanal_namedtuple for more details. - if name != var_name or is_func_scope: - self.api.add_symbol_skip_local(name, info) + # Store generated TypeInfo under both names, see semanal_namedtuple for more details. + if name != var_name or is_func_scope: + self.api.add_symbol_skip_local(name, info) if var_name: self.api.add_symbol(var_name, info, node) call.analyzed = TypedDictExpr(info) diff --git a/test-data/unit/check-incremental.test b/test-data/unit/check-incremental.test index 06f87a26e7a1..801bbd4e77b4 100644 --- a/test-data/unit/check-incremental.test +++ b/test-data/unit/check-incremental.test @@ -6479,3 +6479,28 @@ from typing_extensions import TypeVarTuple, Unpack Ts = TypeVarTuple("Ts") class C(Tuple[Unpack[Ts]]): ... [builtins fixtures/tuple.pyi] + +[case testNoIncrementalCrashOnInvalidTypedDict] +import m +[file m.py] +import counts +[file m.py.2] +import counts +# touch +[file counts.py] +from typing_extensions import TypedDict +Counts = TypedDict("Counts", {k: int for k in "abc"}) # type: ignore +[builtins fixtures/dict.pyi] + +[case testNoIncrementalCrashOnInvalidTypedDictFunc] +import m +[file m.py] +import counts +[file m.py.2] +import counts +# touch +[file counts.py] +from typing_extensions import TypedDict +def test() -> None: + Counts = TypedDict("Counts", {k: int for k in "abc"}) # type: ignore +[builtins fixtures/dict.pyi] From 2bbc42f898031d2aa3e26f1272604ce879ff57dd Mon Sep 17 00:00:00 2001 From: Ali Hamdan Date: Fri, 15 Sep 2023 10:44:31 +0200 Subject: [PATCH 048/144] stubgen: generate valid dataclass stubs (#15625) Fixes #12441 Fixes #9986 Fixes #15966 --- mypy/stubgen.py | 57 +++++++++-- mypy/test/teststubgen.py | 11 +++ test-data/unit/stubgen.test | 182 ++++++++++++++++++++++++++++++++++++ 3 files changed, 244 insertions(+), 6 deletions(-) diff --git a/mypy/stubgen.py b/mypy/stubgen.py index aca836c52ce8..ca7249465746 100755 --- a/mypy/stubgen.py +++ b/mypy/stubgen.py @@ -657,6 +657,7 @@ def __init__( self.defined_names: set[str] = set() # Short names of methods defined in the body of the current class self.method_names: set[str] = set() + self.processing_dataclass = False def visit_mypy_file(self, o: MypyFile) -> None: self.module = o.fullname # Current module being processed @@ -706,6 +707,12 @@ def visit_overloaded_func_def(self, o: OverloadedFuncDef) -> None: self.clear_decorators() def visit_func_def(self, o: FuncDef) -> None: + is_dataclass_generated = ( + self.analyzed and self.processing_dataclass and o.info.names[o.name].plugin_generated + ) + if is_dataclass_generated and o.name != "__init__": + # Skip methods generated by the @dataclass decorator (except for __init__) + return if ( self.is_private_name(o.name, o.fullname) or self.is_not_in_all(o.name) @@ -771,6 +778,12 @@ def visit_func_def(self, o: FuncDef) -> None: else: arg = name + annotation args.append(arg) + if o.name == "__init__" and is_dataclass_generated and "**" in args: + # The dataclass plugin generates invalid nameless "*" and "**" arguments + new_name = "".join(a.split(":", 1)[0] for a in args).replace("*", "") + args[args.index("*")] = f"*{new_name}_" # this name is guaranteed to be unique + args[args.index("**")] = f"**{new_name}__" # same here + retname = None if o.name != "__init__" and isinstance(o.unanalyzed_type, CallableType): if isinstance(get_proper_type(o.unanalyzed_type.ret_type), AnyType): @@ -899,6 +912,9 @@ def visit_class_def(self, o: ClassDef) -> None: if not self._indent and self._state != EMPTY: sep = len(self._output) self.add("\n") + decorators = self.get_class_decorators(o) + for d in decorators: + self.add(f"{self._indent}@{d}\n") self.add(f"{self._indent}class {o.name}") self.record_name(o.name) base_types = self.get_base_types(o) @@ -934,6 +950,7 @@ def visit_class_def(self, o: ClassDef) -> None: else: self._state = CLASS self.method_names = set() + self.processing_dataclass = False self._current_class = None def get_base_types(self, cdef: ClassDef) -> list[str]: @@ -979,6 +996,21 @@ def get_base_types(self, cdef: ClassDef) -> list[str]: base_types.append(f"{name}={value.accept(p)}") return base_types + def get_class_decorators(self, cdef: ClassDef) -> list[str]: + decorators: list[str] = [] + p = AliasPrinter(self) + for d in cdef.decorators: + if self.is_dataclass(d): + decorators.append(d.accept(p)) + self.import_tracker.require_name(get_qualified_name(d)) + self.processing_dataclass = True + return decorators + + def is_dataclass(self, expr: Expression) -> bool: + if isinstance(expr, CallExpr): + expr = expr.callee + return self.get_fullname(expr) == "dataclasses.dataclass" + def visit_block(self, o: Block) -> None: # Unreachable statements may be partially uninitialized and that may # cause trouble. @@ -1336,6 +1368,9 @@ def get_init( # Final without type argument is invalid in stubs. final_arg = self.get_str_type_of_node(rvalue) typename += f"[{final_arg}]" + elif self.processing_dataclass: + # attribute without annotation is not a dataclass field, don't add annotation. + return f"{self._indent}{lvalue} = ...\n" else: typename = self.get_str_type_of_node(rvalue) initializer = self.get_assign_initializer(rvalue) @@ -1343,12 +1378,20 @@ def get_init( def get_assign_initializer(self, rvalue: Expression) -> str: """Does this rvalue need some special initializer value?""" - if self._current_class and self._current_class.info: - # Current rules - # 1. Return `...` if we are dealing with `NamedTuple` and it has an existing default value - if self._current_class.info.is_named_tuple and not isinstance(rvalue, TempNode): - return " = ..." - # TODO: support other possible cases, where initializer is important + if not self._current_class: + return "" + # Current rules + # 1. Return `...` if we are dealing with `NamedTuple` or `dataclass` field and + # it has an existing default value + if ( + self._current_class.info + and self._current_class.info.is_named_tuple + and not isinstance(rvalue, TempNode) + ): + return " = ..." + if self.processing_dataclass and not (isinstance(rvalue, TempNode) and rvalue.no_rhs): + return " = ..." + # TODO: support other possible cases, where initializer is important # By default, no initializer is required: return "" @@ -1410,6 +1453,8 @@ def is_private_name(self, name: str, fullname: str | None = None) -> bool: return False if fullname in EXTRA_EXPORTED: return False + if name == "_": + return False return name.startswith("_") and (not name.endswith("__") or name in IGNORED_DUNDERS) def is_private_member(self, fullname: str) -> bool: diff --git a/mypy/test/teststubgen.py b/mypy/test/teststubgen.py index 79d380785a39..7e30515ac892 100644 --- a/mypy/test/teststubgen.py +++ b/mypy/test/teststubgen.py @@ -724,11 +724,22 @@ def run_case_inner(self, testcase: DataDrivenTestCase) -> None: def parse_flags(self, program_text: str, extra: list[str]) -> Options: flags = re.search("# flags: (.*)$", program_text, flags=re.MULTILINE) + pyversion = None if flags: flag_list = flags.group(1).split() + for i, flag in enumerate(flag_list): + if flag.startswith("--python-version="): + pyversion = flag.split("=", 1)[1] + del flag_list[i] + break else: flag_list = [] options = parse_options(flag_list + extra) + if pyversion: + # A hack to allow testing old python versions with new language constructs + # This should be rarely used in general as stubgen output should not be version-specific + major, minor = pyversion.split(".", 1) + options.pyversion = (int(major), int(minor)) if "--verbose" not in flag_list: options.quiet = True else: diff --git a/test-data/unit/stubgen.test b/test-data/unit/stubgen.test index 774a17b76161..828680fadcf2 100644 --- a/test-data/unit/stubgen.test +++ b/test-data/unit/stubgen.test @@ -3512,3 +3512,185 @@ def gen2() -> _Generator[_Incomplete, _Incomplete, _Incomplete]: ... class X(_Incomplete): ... class Y(_Incomplete): ... + +[case testDataclass] +import dataclasses +import dataclasses as dcs +from dataclasses import dataclass, InitVar, KW_ONLY +from dataclasses import dataclass as dc +from typing import ClassVar + +@dataclasses.dataclass +class X: + a: int + b: str = "hello" + c: ClassVar + d: ClassVar = 200 + f: list[int] = field(init=False, default_factory=list) + g: int = field(default=2, kw_only=True) + _: KW_ONLY + h: int = 1 + i: InitVar[str] + j: InitVar = 100 + non_field = None + +@dcs.dataclass +class Y: ... + +@dataclass +class Z: ... + +@dc +class W: ... + +@dataclass(init=False, repr=False) +class V: ... + +[out] +import dataclasses +import dataclasses as dcs +from dataclasses import InitVar, KW_ONLY, dataclass, dataclass as dc +from typing import ClassVar + +@dataclasses.dataclass +class X: + a: int + b: str = ... + c: ClassVar + d: ClassVar = ... + f: list[int] = ... + g: int = ... + _: KW_ONLY + h: int = ... + i: InitVar[str] + j: InitVar = ... + non_field = ... + +@dcs.dataclass +class Y: ... +@dataclass +class Z: ... +@dc +class W: ... +@dataclass(init=False, repr=False) +class V: ... + +[case testDataclass_semanal] +from dataclasses import dataclass, InitVar +from typing import ClassVar + +@dataclass +class X: + a: int + b: str = "hello" + c: ClassVar + d: ClassVar = 200 + f: list[int] = field(init=False, default_factory=list) + g: int = field(default=2, kw_only=True) + h: int = 1 + i: InitVar[str] + j: InitVar = 100 + non_field = None + +@dataclass(init=False, repr=False, frozen=True) +class Y: ... + +[out] +from dataclasses import InitVar, dataclass +from typing import ClassVar + +@dataclass +class X: + a: int + b: str = ... + c: ClassVar + d: ClassVar = ... + f: list[int] = ... + g: int = ... + h: int = ... + i: InitVar[str] + j: InitVar = ... + non_field = ... + def __init__(self, a, b, f, g, h, i, j) -> None: ... + +@dataclass(init=False, repr=False, frozen=True) +class Y: ... + +[case testDataclassWithKwOnlyField_semanal] +# flags: --python-version=3.10 +from dataclasses import dataclass, InitVar, KW_ONLY +from typing import ClassVar + +@dataclass +class X: + a: int + b: str = "hello" + c: ClassVar + d: ClassVar = 200 + f: list[int] = field(init=False, default_factory=list) + g: int = field(default=2, kw_only=True) + _: KW_ONLY + h: int = 1 + i: InitVar[str] + j: InitVar = 100 + non_field = None + +@dataclass(init=False, repr=False, frozen=True) +class Y: ... + +[out] +from dataclasses import InitVar, KW_ONLY, dataclass +from typing import ClassVar + +@dataclass +class X: + a: int + b: str = ... + c: ClassVar + d: ClassVar = ... + f: list[int] = ... + g: int = ... + _: KW_ONLY + h: int = ... + i: InitVar[str] + j: InitVar = ... + non_field = ... + def __init__(self, a, b, f, g, *, h, i, j) -> None: ... + +@dataclass(init=False, repr=False, frozen=True) +class Y: ... + +[case testDataclassWithExplicitGeneratedMethodsOverrides_semanal] +from dataclasses import dataclass + +@dataclass +class X: + a: int + def __init__(self, a: int, b: str = ...) -> None: ... + def __post_init__(self) -> None: ... + +[out] +from dataclasses import dataclass + +@dataclass +class X: + a: int + def __init__(self, a: int, b: str = ...) -> None: ... + def __post_init__(self) -> None: ... + +[case testDataclassInheritsFromAny_semanal] +from dataclasses import dataclass +import missing + +@dataclass +class X(missing.Base): + a: int + +[out] +import missing +from dataclasses import dataclass + +@dataclass +class X(missing.Base): + a: int + def __init__(self, *selfa_, a, **selfa__) -> None: ... From 88ae1e4c1541e5b03d695cf63d1265b972e427d9 Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Fri, 15 Sep 2023 21:53:35 +0100 Subject: [PATCH 049/144] Fix crash on star unpack in TypedDict (#16116) Fixes https://github.com/python/mypy/issues/16107 Fixes https://github.com/python/mypy/issues/15891 I only vaguely remember why I added those context managers, it seemed to me giving full TypedDict as context may cause false positives. But since the current way causes crashes, let's just not do this (we will see if there will be actual false positives). --- mypy/checkexpr.py | 3 +-- test-data/unit/check-typeddict.test | 12 ++++++++++ test-data/unit/reports.test | 34 +++++++++++++++++++++++++++++ 3 files changed, 47 insertions(+), 2 deletions(-) diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index 22a9852545b7..f46c8cb15c6f 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -813,8 +813,7 @@ def validate_star_typeddict_item( Note `result` and `always_present_keys` are updated in place. Return true if the expression `item_arg` may valid in `callee` TypedDict context. """ - with self.chk.local_type_map(), self.msg.filter_errors(): - inferred = get_proper_type(self.accept(item_arg, type_context=callee)) + inferred = get_proper_type(self.accept(item_arg, type_context=callee)) possible_tds = [] if isinstance(inferred, TypedDictType): possible_tds = [inferred] diff --git a/test-data/unit/check-typeddict.test b/test-data/unit/check-typeddict.test index b8953f05b6a5..7ee9ef0b708b 100644 --- a/test-data/unit/check-typeddict.test +++ b/test-data/unit/check-typeddict.test @@ -3224,3 +3224,15 @@ t2: Foo = {**y} # E: Missing key "a" for TypedDict "Foo" t3: Foo = {**z} # E: Missing key "a" for TypedDict "Foo" [builtins fixtures/dict.pyi] [typing fixtures/typing-typeddict.pyi] + +[case testTypedDictUnpackError] +from typing import TypedDict + +class Foo(TypedDict): + a: int + +def foo(x: int) -> Foo: ... + +f: Foo = {**foo("no")} # E: Argument 1 to "foo" has incompatible type "str"; expected "int" +[builtins fixtures/dict.pyi] +[typing fixtures/typing-typeddict.pyi] diff --git a/test-data/unit/reports.test b/test-data/unit/reports.test index a6cde503ca09..16061d9c32bf 100644 --- a/test-data/unit/reports.test +++ b/test-data/unit/reports.test @@ -69,6 +69,40 @@ def untyped_function(): +[case testCoberturaStarUnpacking] +# cmd: mypy --cobertura-xml-report build a.py +[file a.py] +from typing import TypedDict + +class MyDict(TypedDict): + a: int + +def foo(a: int) -> MyDict: + return {"a": a} +md: MyDict = MyDict(**foo(42)) +[outfile build/cobertura.xml] + + + $PWD + + + + + + + + + + + + + + + + + + + [case testAnyExprReportDivisionByZero] # cmd: mypy --any-exprs-report=out -c 'pass' From 80232b0cd6305b848c0d454bac04a5fb30578766 Mon Sep 17 00:00:00 2001 From: AlexWaygood Date: Fri, 15 Sep 2023 20:36:25 +0100 Subject: [PATCH 050/144] Sync typeshed Source commit: https://github.com/python/typeshed/commit/0ea043253e70d0304478a6d0b58bcda4cc583d08 --- mypy/typeshed/stdlib/_ctypes.pyi | 18 ++- mypy/typeshed/stdlib/asyncio/tasks.pyi | 105 ++++++++++++++---- mypy/typeshed/stdlib/builtins.pyi | 105 +++++++++++++++++- mypy/typeshed/stdlib/collections/__init__.pyi | 17 ++- mypy/typeshed/stdlib/csv.pyi | 8 +- mypy/typeshed/stdlib/ctypes/wintypes.pyi | 89 ++++++++------- mypy/typeshed/stdlib/enum.pyi | 22 +++- mypy/typeshed/stdlib/functools.pyi | 40 ++++--- mypy/typeshed/stdlib/http/client.pyi | 3 + mypy/typeshed/stdlib/http/cookies.pyi | 6 +- mypy/typeshed/stdlib/imaplib.pyi | 11 +- mypy/typeshed/stdlib/importlib/__init__.pyi | 6 +- mypy/typeshed/stdlib/importlib/abc.pyi | 60 ++++++---- mypy/typeshed/stdlib/importlib/machinery.pyi | 33 ++++-- .../stdlib/importlib/metadata/__init__.pyi | 12 +- .../stdlib/importlib/metadata/_meta.pyi | 38 +++++-- .../stdlib/importlib/resources/__init__.pyi | 11 +- mypy/typeshed/stdlib/importlib/util.pyi | 11 +- mypy/typeshed/stdlib/pathlib.pyi | 19 +++- mypy/typeshed/stdlib/poplib.pyi | 29 +++-- mypy/typeshed/stdlib/smtplib.pyi | 42 +++++-- mypy/typeshed/stdlib/sqlite3/dbapi2.pyi | 2 +- mypy/typeshed/stdlib/tkinter/__init__.pyi | 7 +- mypy/typeshed/stdlib/turtle.pyi | 6 + mypy/typeshed/stdlib/typing.pyi | 9 +- mypy/typeshed/stdlib/unittest/mock.pyi | 20 +++- mypy/typeshed/stdlib/urllib/request.pyi | 18 ++- mypy/typeshed/stdlib/weakref.pyi | 8 ++ mypy/typeshed/stdlib/zipfile.pyi | 14 ++- 29 files changed, 574 insertions(+), 195 deletions(-) diff --git a/mypy/typeshed/stdlib/_ctypes.pyi b/mypy/typeshed/stdlib/_ctypes.pyi index 165bb5337784..1f15ac057988 100644 --- a/mypy/typeshed/stdlib/_ctypes.pyi +++ b/mypy/typeshed/stdlib/_ctypes.pyi @@ -122,15 +122,23 @@ class CFuncPtr(_PointerLike, _CData): def __call__(self, *args: Any, **kwargs: Any) -> Any: ... -class _CField: +_GetT = TypeVar("_GetT") +_SetT = TypeVar("_SetT") + +class _CField(Generic[_CT, _GetT, _SetT]): offset: int size: int + @overload + def __get__(self, __instance: None, __owner: type[Any] | None) -> Self: ... + @overload + def __get__(self, __instance: Any, __owner: type[Any] | None) -> _GetT: ... + def __set__(self, __instance: Any, __value: _SetT) -> None: ... class _StructUnionMeta(_CDataMeta): _fields_: Sequence[tuple[str, type[_CData]] | tuple[str, type[_CData], int]] _pack_: int _anonymous_: Sequence[str] - def __getattr__(self, name: str) -> _CField: ... + def __getattr__(self, name: str) -> _CField[Any, Any, Any]: ... class _StructUnionBase(_CData, metaclass=_StructUnionMeta): def __init__(self, *args: Any, **kw: Any) -> None: ... @@ -151,7 +159,11 @@ class Array(_CData, Generic[_CT]): def _type_(self) -> type[_CT]: ... @_type_.setter def _type_(self, value: type[_CT]) -> None: ... - raw: bytes # Note: only available if _CT == c_char + # Note: only available if _CT == c_char + @property + def raw(self) -> bytes: ... + @raw.setter + def raw(self, value: ReadableBuffer) -> None: ... value: Any # Note: bytes if _CT == c_char, str if _CT == c_wchar, unavailable otherwise # TODO These methods cannot be annotated correctly at the moment. # All of these "Any"s stand for the array's element type, but it's not possible to use _CT diff --git a/mypy/typeshed/stdlib/asyncio/tasks.pyi b/mypy/typeshed/stdlib/asyncio/tasks.pyi index 3bc65e3703c5..b6929deb0fae 100644 --- a/mypy/typeshed/stdlib/asyncio/tasks.pyi +++ b/mypy/typeshed/stdlib/asyncio/tasks.pyi @@ -2,7 +2,7 @@ import concurrent.futures import sys from collections.abc import Awaitable, Coroutine, Generator, Iterable, Iterator from types import FrameType -from typing import Any, Generic, TextIO, TypeVar, overload +from typing import Any, Generic, Protocol, TextIO, TypeVar, overload from typing_extensions import Literal, TypeAlias from . import _CoroutineLike @@ -14,27 +14,52 @@ if sys.version_info >= (3, 9): if sys.version_info >= (3, 11): from contextvars import Context -__all__ = ( - "Task", - "create_task", - "FIRST_COMPLETED", - "FIRST_EXCEPTION", - "ALL_COMPLETED", - "wait", - "wait_for", - "as_completed", - "sleep", - "gather", - "shield", - "ensure_future", - "run_coroutine_threadsafe", - "current_task", - "all_tasks", - "_register_task", - "_unregister_task", - "_enter_task", - "_leave_task", -) +if sys.version_info >= (3, 12): + __all__ = ( + "Task", + "create_task", + "FIRST_COMPLETED", + "FIRST_EXCEPTION", + "ALL_COMPLETED", + "wait", + "wait_for", + "as_completed", + "sleep", + "gather", + "shield", + "ensure_future", + "run_coroutine_threadsafe", + "current_task", + "all_tasks", + "create_eager_task_factory", + "eager_task_factory", + "_register_task", + "_unregister_task", + "_enter_task", + "_leave_task", + ) +else: + __all__ = ( + "Task", + "create_task", + "FIRST_COMPLETED", + "FIRST_EXCEPTION", + "ALL_COMPLETED", + "wait", + "wait_for", + "as_completed", + "sleep", + "gather", + "shield", + "ensure_future", + "run_coroutine_threadsafe", + "current_task", + "all_tasks", + "_register_task", + "_unregister_task", + "_enter_task", + "_leave_task", + ) _T = TypeVar("_T") _T_co = TypeVar("_T_co", covariant=True) @@ -356,5 +381,41 @@ else: def current_task(loop: AbstractEventLoop | None = None) -> Task[Any] | None: ... def _enter_task(loop: AbstractEventLoop, task: Task[Any]) -> None: ... def _leave_task(loop: AbstractEventLoop, task: Task[Any]) -> None: ... + +if sys.version_info >= (3, 12): + _TaskT_co = TypeVar("_TaskT_co", bound=Task[Any], covariant=True) + + class _CustomTaskConstructor(Protocol[_TaskT_co]): + def __call__( + self, + __coro: _TaskCompatibleCoro[Any], + *, + loop: AbstractEventLoop, + name: str | None, + context: Context | None, + eager_start: bool, + ) -> _TaskT_co: ... + + class _EagerTaskFactoryType(Protocol[_TaskT_co]): + def __call__( + self, + loop: AbstractEventLoop, + coro: _TaskCompatibleCoro[Any], + *, + name: str | None = None, + context: Context | None = None, + ) -> _TaskT_co: ... + + def create_eager_task_factory( + custom_task_constructor: _CustomTaskConstructor[_TaskT_co], + ) -> _EagerTaskFactoryType[_TaskT_co]: ... + def eager_task_factory( + loop: AbstractEventLoop | None, + coro: _TaskCompatibleCoro[_T_co], + *, + name: str | None = None, + context: Context | None = None, + ) -> Task[_T_co]: ... + def _register_task(task: Task[Any]) -> None: ... def _unregister_task(task: Task[Any]) -> None: ... diff --git a/mypy/typeshed/stdlib/builtins.pyi b/mypy/typeshed/stdlib/builtins.pyi index 66c644d09a4d..cf4f857c5524 100644 --- a/mypy/typeshed/stdlib/builtins.pyi +++ b/mypy/typeshed/stdlib/builtins.pyi @@ -56,6 +56,7 @@ from typing import ( # noqa: Y022 from typing_extensions import ( Concatenate, Literal, + LiteralString, ParamSpec, Self, SupportsIndex, @@ -441,8 +442,17 @@ class str(Sequence[str]): def __new__(cls, object: object = ...) -> Self: ... @overload def __new__(cls, object: ReadableBuffer, encoding: str = ..., errors: str = ...) -> Self: ... + @overload + def capitalize(self: LiteralString) -> LiteralString: ... + @overload def capitalize(self) -> str: ... # type: ignore[misc] + @overload + def casefold(self: LiteralString) -> LiteralString: ... + @overload def casefold(self) -> str: ... # type: ignore[misc] + @overload + def center(self: LiteralString, __width: SupportsIndex, __fillchar: LiteralString = " ") -> LiteralString: ... + @overload def center(self, __width: SupportsIndex, __fillchar: str = " ") -> str: ... # type: ignore[misc] def count(self, x: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ... def encode(self, encoding: str = "utf-8", errors: str = "strict") -> bytes: ... @@ -450,11 +460,20 @@ class str(Sequence[str]): self, __suffix: str | tuple[str, ...], __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ... ) -> bool: ... if sys.version_info >= (3, 8): + @overload + def expandtabs(self: LiteralString, tabsize: SupportsIndex = 8) -> LiteralString: ... + @overload def expandtabs(self, tabsize: SupportsIndex = 8) -> str: ... # type: ignore[misc] else: + @overload + def expandtabs(self: LiteralString, tabsize: int = 8) -> LiteralString: ... + @overload def expandtabs(self, tabsize: int = 8) -> str: ... # type: ignore[misc] def find(self, __sub: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ... + @overload + def format(self: LiteralString, *args: LiteralString, **kwargs: LiteralString) -> LiteralString: ... + @overload def format(self, *args: object, **kwargs: object) -> str: ... def format_map(self, map: _FormatMapMapping) -> str: ... def index(self, __sub: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ... @@ -470,32 +489,91 @@ class str(Sequence[str]): def isspace(self) -> bool: ... def istitle(self) -> bool: ... def isupper(self) -> bool: ... + @overload + def join(self: LiteralString, __iterable: Iterable[LiteralString]) -> LiteralString: ... + @overload def join(self, __iterable: Iterable[str]) -> str: ... # type: ignore[misc] + @overload + def ljust(self: LiteralString, __width: SupportsIndex, __fillchar: LiteralString = " ") -> LiteralString: ... + @overload def ljust(self, __width: SupportsIndex, __fillchar: str = " ") -> str: ... # type: ignore[misc] + @overload + def lower(self: LiteralString) -> LiteralString: ... + @overload def lower(self) -> str: ... # type: ignore[misc] + @overload + def lstrip(self: LiteralString, __chars: LiteralString | None = None) -> LiteralString: ... + @overload def lstrip(self, __chars: str | None = None) -> str: ... # type: ignore[misc] + @overload + def partition(self: LiteralString, __sep: LiteralString) -> tuple[LiteralString, LiteralString, LiteralString]: ... + @overload def partition(self, __sep: str) -> tuple[str, str, str]: ... # type: ignore[misc] + @overload + def replace( + self: LiteralString, __old: LiteralString, __new: LiteralString, __count: SupportsIndex = -1 + ) -> LiteralString: ... + @overload def replace(self, __old: str, __new: str, __count: SupportsIndex = -1) -> str: ... # type: ignore[misc] if sys.version_info >= (3, 9): + @overload + def removeprefix(self: LiteralString, __prefix: LiteralString) -> LiteralString: ... + @overload def removeprefix(self, __prefix: str) -> str: ... # type: ignore[misc] + @overload + def removesuffix(self: LiteralString, __suffix: LiteralString) -> LiteralString: ... + @overload def removesuffix(self, __suffix: str) -> str: ... # type: ignore[misc] def rfind(self, __sub: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ... def rindex(self, __sub: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ... + @overload + def rjust(self: LiteralString, __width: SupportsIndex, __fillchar: LiteralString = " ") -> LiteralString: ... + @overload def rjust(self, __width: SupportsIndex, __fillchar: str = " ") -> str: ... # type: ignore[misc] + @overload + def rpartition(self: LiteralString, __sep: LiteralString) -> tuple[LiteralString, LiteralString, LiteralString]: ... + @overload def rpartition(self, __sep: str) -> tuple[str, str, str]: ... # type: ignore[misc] + @overload + def rsplit(self: LiteralString, sep: LiteralString | None = None, maxsplit: SupportsIndex = -1) -> list[LiteralString]: ... + @overload def rsplit(self, sep: str | None = None, maxsplit: SupportsIndex = -1) -> list[str]: ... # type: ignore[misc] + @overload + def rstrip(self: LiteralString, __chars: LiteralString | None = None) -> LiteralString: ... + @overload def rstrip(self, __chars: str | None = None) -> str: ... # type: ignore[misc] + @overload + def split(self: LiteralString, sep: LiteralString | None = None, maxsplit: SupportsIndex = -1) -> list[LiteralString]: ... + @overload def split(self, sep: str | None = None, maxsplit: SupportsIndex = -1) -> list[str]: ... # type: ignore[misc] + @overload + def splitlines(self: LiteralString, keepends: bool = False) -> list[LiteralString]: ... + @overload def splitlines(self, keepends: bool = False) -> list[str]: ... # type: ignore[misc] def startswith( self, __prefix: str | tuple[str, ...], __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ... ) -> bool: ... + @overload + def strip(self: LiteralString, __chars: LiteralString | None = None) -> LiteralString: ... + @overload def strip(self, __chars: str | None = None) -> str: ... # type: ignore[misc] + @overload + def swapcase(self: LiteralString) -> LiteralString: ... + @overload def swapcase(self) -> str: ... # type: ignore[misc] + @overload + def title(self: LiteralString) -> LiteralString: ... + @overload def title(self) -> str: ... # type: ignore[misc] def translate(self, __table: _TranslateTable) -> str: ... + @overload + def upper(self: LiteralString) -> LiteralString: ... + @overload def upper(self) -> str: ... # type: ignore[misc] + @overload + def zfill(self: LiteralString, __width: SupportsIndex) -> LiteralString: ... + @overload def zfill(self, __width: SupportsIndex) -> str: ... # type: ignore[misc] @staticmethod @overload @@ -506,6 +584,9 @@ class str(Sequence[str]): @staticmethod @overload def maketrans(__x: str, __y: str, __z: str) -> dict[int, int | None]: ... + @overload + def __add__(self: LiteralString, __value: LiteralString) -> LiteralString: ... + @overload def __add__(self, __value: str) -> str: ... # type: ignore[misc] # Incompatible with Sequence.__contains__ def __contains__(self, __key: str) -> bool: ... # type: ignore[override] @@ -514,13 +595,25 @@ class str(Sequence[str]): def __getitem__(self, __key: SupportsIndex | slice) -> str: ... def __gt__(self, __value: str) -> bool: ... def __hash__(self) -> int: ... + @overload + def __iter__(self: LiteralString) -> Iterator[LiteralString]: ... + @overload def __iter__(self) -> Iterator[str]: ... # type: ignore[misc] def __le__(self, __value: str) -> bool: ... def __len__(self) -> int: ... def __lt__(self, __value: str) -> bool: ... + @overload + def __mod__(self: LiteralString, __value: LiteralString | tuple[LiteralString, ...]) -> LiteralString: ... + @overload def __mod__(self, __value: Any) -> str: ... + @overload + def __mul__(self: LiteralString, __value: SupportsIndex) -> LiteralString: ... + @overload def __mul__(self, __value: SupportsIndex) -> str: ... # type: ignore[misc] def __ne__(self, __value: object) -> bool: ... + @overload + def __rmul__(self: LiteralString, __value: SupportsIndex) -> LiteralString: ... + @overload def __rmul__(self, __value: SupportsIndex) -> str: ... # type: ignore[misc] def __getnewargs__(self) -> tuple[str]: ... @@ -1027,13 +1120,13 @@ class dict(MutableMapping[_KT, _VT], Generic[_KT, _VT]): if sys.version_info >= (3, 9): def __class_getitem__(cls, __item: Any) -> GenericAlias: ... @overload - def __or__(self, __value: Mapping[_KT, _VT]) -> dict[_KT, _VT]: ... + def __or__(self, __value: dict[_KT, _VT]) -> dict[_KT, _VT]: ... @overload - def __or__(self, __value: Mapping[_T1, _T2]) -> dict[_KT | _T1, _VT | _T2]: ... + def __or__(self, __value: dict[_T1, _T2]) -> dict[_KT | _T1, _VT | _T2]: ... @overload - def __ror__(self, __value: Mapping[_KT, _VT]) -> dict[_KT, _VT]: ... + def __ror__(self, __value: dict[_KT, _VT]) -> dict[_KT, _VT]: ... @overload - def __ror__(self, __value: Mapping[_T1, _T2]) -> dict[_KT | _T1, _VT | _T2]: ... + def __ror__(self, __value: dict[_T1, _T2]) -> dict[_KT | _T1, _VT | _T2]: ... # dict.__ior__ should be kept roughly in line with MutableMapping.update() @overload # type: ignore[misc] def __ior__(self, __value: SupportsKeysAndGetItem[_KT, _VT]) -> Self: ... @@ -1698,11 +1791,11 @@ _SupportsSumNoDefaultT = TypeVar("_SupportsSumNoDefaultT", bound=_SupportsSumWit # Instead, we special-case the most common examples of this: bool and literal integers. if sys.version_info >= (3, 8): @overload - def sum(__iterable: Iterable[bool], start: int = 0) -> int: ... # type: ignore[misc] + def sum(__iterable: Iterable[bool | _LiteralInteger], start: int = 0) -> int: ... # type: ignore[misc] else: @overload - def sum(__iterable: Iterable[bool], __start: int = 0) -> int: ... # type: ignore[misc] + def sum(__iterable: Iterable[bool | _LiteralInteger], __start: int = 0) -> int: ... # type: ignore[misc] @overload def sum(__iterable: Iterable[_SupportsSumNoDefaultT]) -> _SupportsSumNoDefaultT | Literal[0]: ... diff --git a/mypy/typeshed/stdlib/collections/__init__.pyi b/mypy/typeshed/stdlib/collections/__init__.pyi index 8ceecd1f354e..3b8d92f78612 100644 --- a/mypy/typeshed/stdlib/collections/__init__.pyi +++ b/mypy/typeshed/stdlib/collections/__init__.pyi @@ -96,6 +96,11 @@ class UserDict(MutableMapping[_KT, _VT], Generic[_KT, _VT]): def __ior__(self, other: SupportsKeysAndGetItem[_KT, _VT]) -> Self: ... @overload def __ior__(self, other: Iterable[tuple[_KT, _VT]]) -> Self: ... + if sys.version_info >= (3, 12): + @overload + def get(self, key: _KT, default: None = None) -> _VT | None: ... + @overload + def get(self, key: _KT, default: _T) -> _VT | _T: ... class UserList(MutableSequence[_T]): data: list[_T] @@ -402,13 +407,13 @@ class defaultdict(dict[_KT, _VT], Generic[_KT, _VT]): def copy(self) -> Self: ... if sys.version_info >= (3, 9): @overload - def __or__(self, __value: Mapping[_KT, _VT]) -> Self: ... + def __or__(self, __value: dict[_KT, _VT]) -> Self: ... @overload - def __or__(self, __value: Mapping[_T1, _T2]) -> defaultdict[_KT | _T1, _VT | _T2]: ... + def __or__(self, __value: dict[_T1, _T2]) -> defaultdict[_KT | _T1, _VT | _T2]: ... @overload - def __ror__(self, __value: Mapping[_KT, _VT]) -> Self: ... + def __ror__(self, __value: dict[_KT, _VT]) -> Self: ... @overload - def __ror__(self, __value: Mapping[_T1, _T2]) -> defaultdict[_KT | _T1, _VT | _T2]: ... + def __ror__(self, __value: dict[_T1, _T2]) -> defaultdict[_KT | _T1, _VT | _T2]: ... # type: ignore[misc] class ChainMap(MutableMapping[_KT, _VT], Generic[_KT, _VT]): maps: list[MutableMapping[_KT, _VT]] @@ -422,6 +427,10 @@ class ChainMap(MutableMapping[_KT, _VT], Generic[_KT, _VT]): def __iter__(self) -> Iterator[_KT]: ... def __len__(self) -> int: ... def __contains__(self, key: object) -> bool: ... + @overload + def get(self, key: _KT, default: None = None) -> _VT | None: ... + @overload + def get(self, key: _KT, default: _T) -> _VT | _T: ... def __missing__(self, key: _KT) -> _VT: ... # undocumented def __bool__(self) -> bool: ... # Keep ChainMap.setdefault in line with MutableMapping.setdefault, modulo positional-only differences. diff --git a/mypy/typeshed/stdlib/csv.pyi b/mypy/typeshed/stdlib/csv.pyi index a9c7fe0492c8..53425fbcccb1 100644 --- a/mypy/typeshed/stdlib/csv.pyi +++ b/mypy/typeshed/stdlib/csv.pyi @@ -71,8 +71,8 @@ class unix_dialect(Dialect): ... class DictReader(Iterator[_DictReadMapping[_T | Any, str | Any]], Generic[_T]): fieldnames: Sequence[_T] | None - restkey: str | None - restval: str | None + restkey: _T | None + restval: str | Any | None reader: _reader dialect: _DialectLike line_num: int @@ -81,8 +81,8 @@ class DictReader(Iterator[_DictReadMapping[_T | Any, str | Any]], Generic[_T]): self, f: Iterable[str], fieldnames: Sequence[_T], - restkey: str | None = None, - restval: str | None = None, + restkey: _T | None = None, + restval: str | Any | None = None, dialect: _DialectLike = "excel", *, delimiter: str = ",", diff --git a/mypy/typeshed/stdlib/ctypes/wintypes.pyi b/mypy/typeshed/stdlib/ctypes/wintypes.pyi index 3bd27934750a..59c7ae3e599f 100644 --- a/mypy/typeshed/stdlib/ctypes/wintypes.pyi +++ b/mypy/typeshed/stdlib/ctypes/wintypes.pyi @@ -1,6 +1,7 @@ from ctypes import ( Array, Structure, + _CField, _Pointer, _SimpleCData, c_byte, @@ -20,6 +21,7 @@ from ctypes import ( c_wchar, c_wchar_p, ) +from typing import TypeVar from typing_extensions import TypeAlias BYTE = c_byte @@ -101,39 +103,42 @@ HWND = HANDLE SC_HANDLE = HANDLE SERVICE_STATUS_HANDLE = HANDLE +_CIntLikeT = TypeVar("_CIntLikeT", bound=_SimpleCData[int]) +_CIntLikeField: TypeAlias = _CField[_CIntLikeT, int, _CIntLikeT | int] + class RECT(Structure): - left: LONG - top: LONG - right: LONG - bottom: LONG + left: _CIntLikeField[LONG] + top: _CIntLikeField[LONG] + right: _CIntLikeField[LONG] + bottom: _CIntLikeField[LONG] RECTL = RECT _RECTL = RECT tagRECT = RECT class _SMALL_RECT(Structure): - Left: SHORT - Top: SHORT - Right: SHORT - Bottom: SHORT + Left: _CIntLikeField[SHORT] + Top: _CIntLikeField[SHORT] + Right: _CIntLikeField[SHORT] + Bottom: _CIntLikeField[SHORT] SMALL_RECT = _SMALL_RECT class _COORD(Structure): - X: SHORT - Y: SHORT + X: _CIntLikeField[SHORT] + Y: _CIntLikeField[SHORT] class POINT(Structure): - x: LONG - y: LONG + x: _CIntLikeField[LONG] + y: _CIntLikeField[LONG] POINTL = POINT _POINTL = POINT tagPOINT = POINT class SIZE(Structure): - cx: LONG - cy: LONG + cx: _CIntLikeField[LONG] + cy: _CIntLikeField[LONG] SIZEL = SIZE tagSIZE = SIZE @@ -141,45 +146,45 @@ tagSIZE = SIZE def RGB(red: int, green: int, blue: int) -> int: ... class FILETIME(Structure): - dwLowDateTime: DWORD - dwHighDateTime: DWORD + dwLowDateTime: _CIntLikeField[DWORD] + dwHighDateTime: _CIntLikeField[DWORD] _FILETIME = FILETIME class MSG(Structure): - hWnd: HWND - message: UINT - wParam: WPARAM - lParam: LPARAM - time: DWORD - pt: POINT + hWnd: _CField[HWND, int | None, HWND | int | None] + message: _CIntLikeField[UINT] + wParam: _CIntLikeField[WPARAM] + lParam: _CIntLikeField[LPARAM] + time: _CIntLikeField[DWORD] + pt: _CField[POINT, POINT, POINT] tagMSG = MSG MAX_PATH: int class WIN32_FIND_DATAA(Structure): - dwFileAttributes: DWORD - ftCreationTime: FILETIME - ftLastAccessTime: FILETIME - ftLastWriteTime: FILETIME - nFileSizeHigh: DWORD - nFileSizeLow: DWORD - dwReserved0: DWORD - dwReserved1: DWORD - cFileName: Array[CHAR] - cAlternateFileName: Array[CHAR] + dwFileAttributes: _CIntLikeField[DWORD] + ftCreationTime: _CField[FILETIME, FILETIME, FILETIME] + ftLastAccessTime: _CField[FILETIME, FILETIME, FILETIME] + ftLastWriteTime: _CField[FILETIME, FILETIME, FILETIME] + nFileSizeHigh: _CIntLikeField[DWORD] + nFileSizeLow: _CIntLikeField[DWORD] + dwReserved0: _CIntLikeField[DWORD] + dwReserved1: _CIntLikeField[DWORD] + cFileName: _CField[Array[CHAR], bytes, bytes] + cAlternateFileName: _CField[Array[CHAR], bytes, bytes] class WIN32_FIND_DATAW(Structure): - dwFileAttributes: DWORD - ftCreationTime: FILETIME - ftLastAccessTime: FILETIME - ftLastWriteTime: FILETIME - nFileSizeHigh: DWORD - nFileSizeLow: DWORD - dwReserved0: DWORD - dwReserved1: DWORD - cFileName: Array[WCHAR] - cAlternateFileName: Array[WCHAR] + dwFileAttributes: _CIntLikeField[DWORD] + ftCreationTime: _CField[FILETIME, FILETIME, FILETIME] + ftLastAccessTime: _CField[FILETIME, FILETIME, FILETIME] + ftLastWriteTime: _CField[FILETIME, FILETIME, FILETIME] + nFileSizeHigh: _CIntLikeField[DWORD] + nFileSizeLow: _CIntLikeField[DWORD] + dwReserved0: _CIntLikeField[DWORD] + dwReserved1: _CIntLikeField[DWORD] + cFileName: _CField[Array[WCHAR], str, str] + cAlternateFileName: _CField[Array[WCHAR], str, str] # These pointer type definitions use _Pointer[...] instead of POINTER(...), to allow them # to be used in type annotations. diff --git a/mypy/typeshed/stdlib/enum.pyi b/mypy/typeshed/stdlib/enum.pyi index e6eaf6c413dc..10ea19257144 100644 --- a/mypy/typeshed/stdlib/enum.pyi +++ b/mypy/typeshed/stdlib/enum.pyi @@ -119,10 +119,12 @@ class EnumMeta(type): def __len__(self) -> int: ... def __bool__(self) -> Literal[True]: ... def __dir__(self) -> list[str]: ... - # Simple value lookup + + # Overload 1: Value lookup on an already existing enum class (simple case) @overload def __call__(cls: type[_EnumMemberT], value: Any, names: None = None) -> _EnumMemberT: ... - # Functional Enum API + + # Overload 2: Functional API for constructing new enum classes. if sys.version_info >= (3, 11): @overload def __call__( @@ -148,6 +150,18 @@ class EnumMeta(type): type: type | None = None, start: int = 1, ) -> type[Enum]: ... + + # Overload 3 (py312+ only): Value lookup on an already existing enum class (complex case) + # + # >>> class Foo(enum.Enum): + # ... X = 1, 2, 3 + # >>> Foo(1, 2, 3) + # + # + if sys.version_info >= (3, 12): + @overload + def __call__(cls: type[_EnumMemberT], value: Any, *values: Any) -> _EnumMemberT: ... + _member_names_: list[str] # undocumented _member_map_: dict[str, Enum] # undocumented _value2member_map_: dict[Any, Enum] # undocumented @@ -160,6 +174,7 @@ if sys.version_info >= (3, 11): def __set_name__(self, ownerclass: type[Enum], name: str) -> None: ... name: str clsname: str + member: Enum | None _magic_enum_attr = property else: _magic_enum_attr = types.DynamicClassAttribute @@ -191,6 +206,9 @@ class Enum(metaclass=EnumMeta): if sys.version_info >= (3, 11): def __copy__(self) -> Self: ... def __deepcopy__(self, memo: Any) -> Self: ... + if sys.version_info >= (3, 12): + @classmethod + def __signature__(cls) -> str: ... if sys.version_info >= (3, 11): class ReprEnum(Enum): ... diff --git a/mypy/typeshed/stdlib/functools.pyi b/mypy/typeshed/stdlib/functools.pyi index 1b4e59b7c120..0d08cdb19e3f 100644 --- a/mypy/typeshed/stdlib/functools.pyi +++ b/mypy/typeshed/stdlib/functools.pyi @@ -1,9 +1,9 @@ import sys import types -from _typeshed import IdentityFunction, SupportsAllComparisons, SupportsItems +from _typeshed import SupportsAllComparisons, SupportsItems from collections.abc import Callable, Hashable, Iterable, Sequence, Sized from typing import Any, Generic, NamedTuple, TypeVar, overload -from typing_extensions import Literal, Self, TypeAlias, TypedDict, final +from typing_extensions import Literal, ParamSpec, Self, TypeAlias, TypedDict, final if sys.version_info >= (3, 9): from types import GenericAlias @@ -28,10 +28,12 @@ if sys.version_info >= (3, 8): if sys.version_info >= (3, 9): __all__ += ["cache"] -_AnyCallable: TypeAlias = Callable[..., object] - _T = TypeVar("_T") _S = TypeVar("_S") +_PWrapped = ParamSpec("_PWrapped") +_RWrapped = TypeVar("_RWrapped") +_PWrapper = ParamSpec("_PWrapper") +_RWrapper = TypeVar("_RWrapper") @overload def reduce(function: Callable[[_T, _S], _T], sequence: Iterable[_S], initial: _T) -> _T: ... @@ -85,31 +87,41 @@ else: ] WRAPPER_UPDATES: tuple[Literal["__dict__"]] +class _Wrapped(Generic[_PWrapped, _RWrapped, _PWrapper, _RWrapper]): + __wrapped__: Callable[_PWrapped, _RWrapped] + def __call__(self, *args: _PWrapper.args, **kwargs: _PWrapper.kwargs) -> _RWrapper: ... + # as with ``Callable``, we'll assume that these attributes exist + __name__: str + __qualname__: str + +class _Wrapper(Generic[_PWrapped, _RWrapped]): + def __call__(self, f: Callable[_PWrapper, _RWrapper]) -> _Wrapped[_PWrapped, _RWrapped, _PWrapper, _RWrapper]: ... + if sys.version_info >= (3, 12): def update_wrapper( - wrapper: _T, - wrapped: _AnyCallable, + wrapper: Callable[_PWrapper, _RWrapper], + wrapped: Callable[_PWrapped, _RWrapped], assigned: Sequence[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotations__", "__type_params__"), updated: Sequence[str] = ("__dict__",), - ) -> _T: ... + ) -> _Wrapped[_PWrapped, _RWrapped, _PWrapper, _RWrapper]: ... def wraps( - wrapped: _AnyCallable, + wrapped: Callable[_PWrapped, _RWrapped], assigned: Sequence[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotations__", "__type_params__"), updated: Sequence[str] = ("__dict__",), - ) -> IdentityFunction: ... + ) -> _Wrapper[_PWrapped, _RWrapped]: ... else: def update_wrapper( - wrapper: _T, - wrapped: _AnyCallable, + wrapper: Callable[_PWrapper, _RWrapper], + wrapped: Callable[_PWrapped, _RWrapped], assigned: Sequence[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotations__"), updated: Sequence[str] = ("__dict__",), - ) -> _T: ... + ) -> _Wrapped[_PWrapped, _RWrapped, _PWrapper, _RWrapper]: ... def wraps( - wrapped: _AnyCallable, + wrapped: Callable[_PWrapped, _RWrapped], assigned: Sequence[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotations__"), updated: Sequence[str] = ("__dict__",), - ) -> IdentityFunction: ... + ) -> _Wrapper[_PWrapped, _RWrapped]: ... def total_ordering(cls: type[_T]) -> type[_T]: ... def cmp_to_key(mycmp: Callable[[_T, _T], int]) -> Callable[[_T], SupportsAllComparisons]: ... diff --git a/mypy/typeshed/stdlib/http/client.pyi b/mypy/typeshed/stdlib/http/client.pyi index 4b5ed3d8bda0..3e5e496ab501 100644 --- a/mypy/typeshed/stdlib/http/client.pyi +++ b/mypy/typeshed/stdlib/http/client.pyi @@ -169,6 +169,9 @@ class HTTPConnection: ) -> None: ... def getresponse(self) -> HTTPResponse: ... def set_debuglevel(self, level: int) -> None: ... + if sys.version_info >= (3, 12): + def get_proxy_response_headers(self) -> HTTPMessage | None: ... + def set_tunnel(self, host: str, port: int | None = None, headers: Mapping[str, str] | None = None) -> None: ... def connect(self) -> None: ... def close(self) -> None: ... diff --git a/mypy/typeshed/stdlib/http/cookies.pyi b/mypy/typeshed/stdlib/http/cookies.pyi index e24ef9cbdd2e..3d19bb108c2d 100644 --- a/mypy/typeshed/stdlib/http/cookies.pyi +++ b/mypy/typeshed/stdlib/http/cookies.pyi @@ -49,12 +49,12 @@ class Morsel(dict[str, Any], Generic[_T]): class BaseCookie(dict[str, Morsel[_T]], Generic[_T]): def __init__(self, input: _DataType | None = None) -> None: ... - def value_decode(self, val: str) -> _T: ... - def value_encode(self, val: _T) -> str: ... + def value_decode(self, val: str) -> tuple[_T, str]: ... + def value_encode(self, val: _T) -> tuple[_T, str]: ... def output(self, attrs: list[str] | None = None, header: str = "Set-Cookie:", sep: str = "\r\n") -> str: ... __str__ = output def js_output(self, attrs: list[str] | None = None) -> str: ... def load(self, rawdata: _DataType) -> None: ... def __setitem__(self, key: str, value: str | Morsel[_T]) -> None: ... -class SimpleCookie(BaseCookie[_T], Generic[_T]): ... +class SimpleCookie(BaseCookie[str]): ... diff --git a/mypy/typeshed/stdlib/imaplib.pyi b/mypy/typeshed/stdlib/imaplib.pyi index 7781559c3888..a61848c9af13 100644 --- a/mypy/typeshed/stdlib/imaplib.pyi +++ b/mypy/typeshed/stdlib/imaplib.pyi @@ -108,9 +108,14 @@ class IMAP4: def print_log(self) -> None: ... class IMAP4_SSL(IMAP4): - keyfile: str - certfile: str - if sys.version_info >= (3, 9): + if sys.version_info < (3, 12): + keyfile: str + certfile: str + if sys.version_info >= (3, 12): + def __init__( + self, host: str = "", port: int = 993, *, ssl_context: SSLContext | None = None, timeout: float | None = None + ) -> None: ... + elif sys.version_info >= (3, 9): def __init__( self, host: str = "", diff --git a/mypy/typeshed/stdlib/importlib/__init__.pyi b/mypy/typeshed/stdlib/importlib/__init__.pyi index 8d73319f8c3d..8506efc01171 100644 --- a/mypy/typeshed/stdlib/importlib/__init__.pyi +++ b/mypy/typeshed/stdlib/importlib/__init__.pyi @@ -1,3 +1,4 @@ +import sys from collections.abc import Mapping, Sequence from importlib.abc import Loader from types import ModuleType @@ -15,6 +16,9 @@ def __import__( # `importlib.import_module` return type should be kept the same as `builtins.__import__` def import_module(name: str, package: str | None = None) -> ModuleType: ... -def find_loader(name: str, path: str | None = None) -> Loader | None: ... + +if sys.version_info < (3, 12): + def find_loader(name: str, path: str | None = None) -> Loader | None: ... + def invalidate_caches() -> None: ... def reload(module: ModuleType) -> ModuleType: ... diff --git a/mypy/typeshed/stdlib/importlib/abc.pyi b/mypy/typeshed/stdlib/importlib/abc.pyi index 4bf46104ba6d..28c33205a4df 100644 --- a/mypy/typeshed/stdlib/importlib/abc.pyi +++ b/mypy/typeshed/stdlib/importlib/abc.pyi @@ -20,7 +20,6 @@ from typing_extensions import Literal if sys.version_info >= (3, 11): __all__ = [ "Loader", - "Finder", "MetaPathFinder", "PathEntryFinder", "ResourceLoader", @@ -28,16 +27,19 @@ if sys.version_info >= (3, 11): "ExecutionLoader", "FileLoader", "SourceLoader", - "ResourceReader", - "Traversable", - "TraversableResources", ] -class Finder(metaclass=ABCMeta): ... + if sys.version_info < (3, 12): + __all__ += ["Finder", "ResourceReader", "Traversable", "TraversableResources"] + +if sys.version_info < (3, 12): + class Finder(metaclass=ABCMeta): ... class Loader(metaclass=ABCMeta): def load_module(self, fullname: str) -> types.ModuleType: ... - def module_repr(self, module: types.ModuleType) -> str: ... + if sys.version_info < (3, 12): + def module_repr(self, module: types.ModuleType) -> str: ... + def create_module(self, spec: ModuleSpec) -> types.ModuleType | None: ... # Not defined on the actual class for backwards-compatibility reasons, # but expected in new code. @@ -68,21 +70,37 @@ class SourceLoader(ResourceLoader, ExecutionLoader, metaclass=ABCMeta): def get_source(self, fullname: str) -> str | None: ... def path_stats(self, path: str) -> Mapping[str, Any]: ... -# Please keep in sync with sys._MetaPathFinder -class MetaPathFinder(Finder): - def find_module(self, fullname: str, path: Sequence[str] | None) -> Loader | None: ... - def invalidate_caches(self) -> None: ... - # Not defined on the actual class, but expected to exist. - def find_spec( - self, fullname: str, path: Sequence[str] | None, target: types.ModuleType | None = ... - ) -> ModuleSpec | None: ... - -class PathEntryFinder(Finder): - def find_module(self, fullname: str) -> Loader | None: ... - def find_loader(self, fullname: str) -> tuple[Loader | None, Sequence[str]]: ... - def invalidate_caches(self) -> None: ... - # Not defined on the actual class, but expected to exist. - def find_spec(self, fullname: str, target: types.ModuleType | None = ...) -> ModuleSpec | None: ... +# The base classes differ on 3.12: +if sys.version_info >= (3, 12): + # Please keep in sync with sys._MetaPathFinder + class MetaPathFinder(metaclass=ABCMeta): + def invalidate_caches(self) -> None: ... + # Not defined on the actual class, but expected to exist. + def find_spec( + self, fullname: str, path: Sequence[str] | None, target: types.ModuleType | None = ... + ) -> ModuleSpec | None: ... + + class PathEntryFinder(metaclass=ABCMeta): + def invalidate_caches(self) -> None: ... + # Not defined on the actual class, but expected to exist. + def find_spec(self, fullname: str, target: types.ModuleType | None = ...) -> ModuleSpec | None: ... + +else: + # Please keep in sync with sys._MetaPathFinder + class MetaPathFinder(Finder): + def find_module(self, fullname: str, path: Sequence[str] | None) -> Loader | None: ... + def invalidate_caches(self) -> None: ... + # Not defined on the actual class, but expected to exist. + def find_spec( + self, fullname: str, path: Sequence[str] | None, target: types.ModuleType | None = ... + ) -> ModuleSpec | None: ... + + class PathEntryFinder(Finder): + def find_module(self, fullname: str) -> Loader | None: ... + def find_loader(self, fullname: str) -> tuple[Loader | None, Sequence[str]]: ... + def invalidate_caches(self) -> None: ... + # Not defined on the actual class, but expected to exist. + def find_spec(self, fullname: str, target: types.ModuleType | None = ...) -> ModuleSpec | None: ... class FileLoader(ResourceLoader, ExecutionLoader, metaclass=ABCMeta): name: str diff --git a/mypy/typeshed/stdlib/importlib/machinery.pyi b/mypy/typeshed/stdlib/importlib/machinery.pyi index f5037da00d5f..1a9680ab3c46 100644 --- a/mypy/typeshed/stdlib/importlib/machinery.pyi +++ b/mypy/typeshed/stdlib/importlib/machinery.pyi @@ -31,8 +31,10 @@ class ModuleSpec: class BuiltinImporter(importlib.abc.MetaPathFinder, importlib.abc.InspectLoader): # MetaPathFinder - @classmethod - def find_module(cls, fullname: str, path: Sequence[str] | None = None) -> importlib.abc.Loader | None: ... + if sys.version_info < (3, 12): + @classmethod + def find_module(cls, fullname: str, path: Sequence[str] | None = None) -> importlib.abc.Loader | None: ... + @classmethod def find_spec( cls, fullname: str, path: Sequence[str] | None = None, target: types.ModuleType | None = None @@ -47,8 +49,9 @@ class BuiltinImporter(importlib.abc.MetaPathFinder, importlib.abc.InspectLoader) @classmethod def get_source(cls, fullname: str) -> None: ... # Loader - @staticmethod - def module_repr(module: types.ModuleType) -> str: ... + if sys.version_info < (3, 12): + @staticmethod + def module_repr(module: types.ModuleType) -> str: ... if sys.version_info >= (3, 10): @staticmethod def create_module(spec: ModuleSpec) -> types.ModuleType | None: ... @@ -62,8 +65,10 @@ class BuiltinImporter(importlib.abc.MetaPathFinder, importlib.abc.InspectLoader) class FrozenImporter(importlib.abc.MetaPathFinder, importlib.abc.InspectLoader): # MetaPathFinder - @classmethod - def find_module(cls, fullname: str, path: Sequence[str] | None = None) -> importlib.abc.Loader | None: ... + if sys.version_info < (3, 12): + @classmethod + def find_module(cls, fullname: str, path: Sequence[str] | None = None) -> importlib.abc.Loader | None: ... + @classmethod def find_spec( cls, fullname: str, path: Sequence[str] | None = None, target: types.ModuleType | None = None @@ -78,8 +83,9 @@ class FrozenImporter(importlib.abc.MetaPathFinder, importlib.abc.InspectLoader): @classmethod def get_source(cls, fullname: str) -> None: ... # Loader - @staticmethod - def module_repr(m: types.ModuleType) -> str: ... + if sys.version_info < (3, 12): + @staticmethod + def module_repr(m: types.ModuleType) -> str: ... if sys.version_info >= (3, 10): @staticmethod def create_module(spec: ModuleSpec) -> types.ModuleType | None: ... @@ -91,8 +97,10 @@ class FrozenImporter(importlib.abc.MetaPathFinder, importlib.abc.InspectLoader): def exec_module(module: types.ModuleType) -> None: ... class WindowsRegistryFinder(importlib.abc.MetaPathFinder): - @classmethod - def find_module(cls, fullname: str, path: Sequence[str] | None = None) -> importlib.abc.Loader | None: ... + if sys.version_info < (3, 12): + @classmethod + def find_module(cls, fullname: str, path: Sequence[str] | None = None) -> importlib.abc.Loader | None: ... + @classmethod def find_spec( cls, fullname: str, path: Sequence[str] | None = None, target: types.ModuleType | None = None @@ -116,8 +124,9 @@ class PathFinder: def find_spec( cls, fullname: str, path: Sequence[str] | None = None, target: types.ModuleType | None = None ) -> ModuleSpec | None: ... - @classmethod - def find_module(cls, fullname: str, path: Sequence[str] | None = None) -> importlib.abc.Loader | None: ... + if sys.version_info < (3, 12): + @classmethod + def find_module(cls, fullname: str, path: Sequence[str] | None = None) -> importlib.abc.Loader | None: ... SOURCE_SUFFIXES: list[str] DEBUG_BYTECODE_SUFFIXES: list[str] diff --git a/mypy/typeshed/stdlib/importlib/metadata/__init__.pyi b/mypy/typeshed/stdlib/importlib/metadata/__init__.pyi index 0f8a6f56cf88..e52756544e9a 100644 --- a/mypy/typeshed/stdlib/importlib/metadata/__init__.pyi +++ b/mypy/typeshed/stdlib/importlib/metadata/__init__.pyi @@ -88,6 +88,7 @@ if sys.version_info >= (3, 10): @property def groups(self) -> set[str]: ... +if sys.version_info >= (3, 10) and sys.version_info < (3, 12): class SelectableGroups(dict[str, EntryPoints]): # use as dict is deprecated since 3.10 @classmethod def load(cls, eps: Iterable[EntryPoint]) -> Self: ... @@ -195,6 +196,16 @@ def distributions( if sys.version_info >= (3, 10): def metadata(distribution_name: str) -> PackageMetadata: ... + +else: + def metadata(distribution_name: str) -> Message: ... + +if sys.version_info >= (3, 12): + def entry_points( + *, name: str = ..., value: str = ..., group: str = ..., module: str = ..., attr: str = ..., extras: list[str] = ... + ) -> EntryPoints: ... + +elif sys.version_info >= (3, 10): @overload def entry_points() -> SelectableGroups: ... # type: ignore[misc] @overload @@ -203,7 +214,6 @@ if sys.version_info >= (3, 10): ) -> EntryPoints: ... else: - def metadata(distribution_name: str) -> Message: ... def entry_points() -> dict[str, list[EntryPoint]]: ... def version(distribution_name: str) -> str: ... diff --git a/mypy/typeshed/stdlib/importlib/metadata/_meta.pyi b/mypy/typeshed/stdlib/importlib/metadata/_meta.pyi index e3504fe4036a..64fefa9a84e2 100644 --- a/mypy/typeshed/stdlib/importlib/metadata/_meta.pyi +++ b/mypy/typeshed/stdlib/importlib/metadata/_meta.pyi @@ -1,5 +1,6 @@ +import sys from collections.abc import Iterator -from typing import Any, Protocol, TypeVar +from typing import Any, Protocol, TypeVar, overload _T = TypeVar("_T") @@ -8,15 +9,32 @@ class PackageMetadata(Protocol): def __contains__(self, item: str) -> bool: ... def __getitem__(self, key: str) -> str: ... def __iter__(self) -> Iterator[str]: ... - def get_all(self, name: str, failobj: _T = ...) -> list[Any] | _T: ... @property def json(self) -> dict[str, str | list[str]]: ... + @overload + def get_all(self, name: str, failobj: None = None) -> list[Any] | None: ... + @overload + def get_all(self, name: str, failobj: _T) -> list[Any] | _T: ... + if sys.version_info >= (3, 12): + @overload + def get(self, name: str, failobj: None = None) -> str | None: ... + @overload + def get(self, name: str, failobj: _T) -> _T | str: ... -class SimplePath(Protocol): - def joinpath(self) -> SimplePath: ... - def parent(self) -> SimplePath: ... - def read_text(self) -> str: ... - # There was a bug in `SimplePath` definition in cpython, see #8451 - # Strictly speaking `__div__` was defined in 3.10, not __truediv__, - # but it should have always been `__truediv__`. - def __truediv__(self) -> SimplePath: ... +if sys.version_info >= (3, 12): + class SimplePath(Protocol[_T]): + def joinpath(self) -> _T: ... + @property + def parent(self) -> _T: ... + def read_text(self) -> str: ... + def __truediv__(self, other: _T | str) -> _T: ... + +else: + class SimplePath(Protocol): + def joinpath(self) -> SimplePath: ... + def parent(self) -> SimplePath: ... + def read_text(self) -> str: ... + # There was a bug in `SimplePath` definition in cpython, see #8451 + # Strictly speaking `__div__` was defined in 3.10, not __truediv__, + # but it should have always been `__truediv__`. + def __truediv__(self) -> SimplePath: ... diff --git a/mypy/typeshed/stdlib/importlib/resources/__init__.pyi b/mypy/typeshed/stdlib/importlib/resources/__init__.pyi index ba3d9b087754..8d656563772c 100644 --- a/mypy/typeshed/stdlib/importlib/resources/__init__.pyi +++ b/mypy/typeshed/stdlib/importlib/resources/__init__.pyi @@ -7,6 +7,9 @@ from types import ModuleType from typing import Any, BinaryIO, TextIO from typing_extensions import TypeAlias +if sys.version_info >= (3, 9): + from importlib.abc import Traversable + __all__ = ["Package", "Resource", "contents", "is_resource", "open_binary", "open_text", "path", "read_binary", "read_text"] if sys.version_info >= (3, 9): @@ -31,9 +34,13 @@ def is_resource(package: Package, name: str) -> bool: ... def contents(package: Package) -> Iterator[str]: ... if sys.version_info >= (3, 9): - from importlib.abc import Traversable - def files(package: Package) -> Traversable: ... def as_file(path: Traversable) -> AbstractContextManager[Path]: ... +if sys.version_info >= (3, 12): + def files(anchor: Package | None = ...) -> Traversable: ... + +elif sys.version_info >= (3, 9): + def files(package: Package) -> Traversable: ... + if sys.version_info >= (3, 10): from importlib.abc import ResourceReader as ResourceReader diff --git a/mypy/typeshed/stdlib/importlib/util.pyi b/mypy/typeshed/stdlib/importlib/util.pyi index f988eb270a26..6608f70d4469 100644 --- a/mypy/typeshed/stdlib/importlib/util.pyi +++ b/mypy/typeshed/stdlib/importlib/util.pyi @@ -1,5 +1,6 @@ import importlib.abc import importlib.machinery +import sys import types from _typeshed import ReadableBuffer, StrOrBytesPath from collections.abc import Callable @@ -8,9 +9,11 @@ from typing_extensions import ParamSpec _P = ParamSpec("_P") -def module_for_loader(fxn: Callable[_P, types.ModuleType]) -> Callable[_P, types.ModuleType]: ... -def set_loader(fxn: Callable[_P, types.ModuleType]) -> Callable[_P, types.ModuleType]: ... -def set_package(fxn: Callable[_P, types.ModuleType]) -> Callable[_P, types.ModuleType]: ... +if sys.version_info < (3, 12): + def module_for_loader(fxn: Callable[_P, types.ModuleType]) -> Callable[_P, types.ModuleType]: ... + def set_loader(fxn: Callable[_P, types.ModuleType]) -> Callable[_P, types.ModuleType]: ... + def set_package(fxn: Callable[_P, types.ModuleType]) -> Callable[_P, types.ModuleType]: ... + def resolve_name(name: str, package: str | None) -> str: ... MAGIC_NUMBER: bytes @@ -37,4 +40,4 @@ class LazyLoader(importlib.abc.Loader): def factory(cls, loader: importlib.abc.Loader) -> Callable[..., LazyLoader]: ... def exec_module(self, module: types.ModuleType) -> None: ... -def source_hash(source_bytes: ReadableBuffer) -> int: ... +def source_hash(source_bytes: ReadableBuffer) -> bytes: ... diff --git a/mypy/typeshed/stdlib/pathlib.pyi b/mypy/typeshed/stdlib/pathlib.pyi index a509ec3af9f2..10ffa4a778e8 100644 --- a/mypy/typeshed/stdlib/pathlib.pyi +++ b/mypy/typeshed/stdlib/pathlib.pyi @@ -8,6 +8,7 @@ from _typeshed import ( ReadableBuffer, StrOrBytesPath, StrPath, + Unused, ) from collections.abc import Callable, Generator, Iterator, Sequence from io import BufferedRandom, BufferedReader, BufferedWriter, FileIO, TextIOWrapper @@ -38,9 +39,13 @@ class PurePath(PathLike[str]): def suffixes(self) -> list[str]: ... @property def stem(self) -> str: ... - def __new__(cls, *args: StrPath) -> Self: ... + if sys.version_info >= (3, 12): + def __new__(cls, *args: StrPath, **kwargs: Unused) -> Self: ... + def __init__(self, *args: StrPath) -> None: ... + else: + def __new__(cls, *args: StrPath) -> Self: ... + def __hash__(self) -> int: ... - def __eq__(self, other: object) -> bool: ... def __fspath__(self) -> str: ... def __lt__(self, other: PurePath) -> bool: ... def __le__(self, other: PurePath) -> bool: ... @@ -53,7 +58,9 @@ class PurePath(PathLike[str]): def as_uri(self) -> str: ... def is_absolute(self) -> bool: ... def is_reserved(self) -> bool: ... - if sys.version_info >= (3, 9): + if sys.version_info >= (3, 12): + def is_relative_to(self, __other: StrPath, *_deprecated: StrPath) -> bool: ... + elif sys.version_info >= (3, 9): def is_relative_to(self, *other: StrPath) -> bool: ... if sys.version_info >= (3, 12): @@ -61,7 +68,11 @@ class PurePath(PathLike[str]): else: def match(self, path_pattern: str) -> bool: ... - def relative_to(self, *other: StrPath) -> Self: ... + if sys.version_info >= (3, 12): + def relative_to(self, __other: StrPath, *_deprecated: StrPath, walk_up: bool = False) -> Self: ... + else: + def relative_to(self, *other: StrPath) -> Self: ... + def with_name(self, name: str) -> Self: ... if sys.version_info >= (3, 9): def with_stem(self, stem: str) -> Self: ... diff --git a/mypy/typeshed/stdlib/poplib.pyi b/mypy/typeshed/stdlib/poplib.pyi index c64e47e8ef72..808e7e5222af 100644 --- a/mypy/typeshed/stdlib/poplib.pyi +++ b/mypy/typeshed/stdlib/poplib.pyi @@ -1,5 +1,6 @@ import socket import ssl +import sys from builtins import list as _list # conflicts with a method named "list" from re import Pattern from typing import Any, BinaryIO, NoReturn, overload @@ -51,14 +52,20 @@ class POP3: def stls(self, context: ssl.SSLContext | None = None) -> bytes: ... class POP3_SSL(POP3): - def __init__( - self, - host: str, - port: int = 995, - keyfile: str | None = None, - certfile: str | None = None, - timeout: float = ..., - context: ssl.SSLContext | None = None, - ) -> None: ... - # "context" is actually the last argument, but that breaks LSP and it doesn't really matter because all the arguments are ignored - def stls(self, context: Any = None, keyfile: Any = None, certfile: Any = None) -> NoReturn: ... + if sys.version_info >= (3, 12): + def __init__( + self, host: str, port: int = 995, *, timeout: float = ..., context: ssl.SSLContext | None = None + ) -> None: ... + def stls(self, context: Any = None) -> NoReturn: ... + else: + def __init__( + self, + host: str, + port: int = 995, + keyfile: str | None = None, + certfile: str | None = None, + timeout: float = ..., + context: ssl.SSLContext | None = None, + ) -> None: ... + # "context" is actually the last argument, but that breaks LSP and it doesn't really matter because all the arguments are ignored + def stls(self, context: Any = None, keyfile: Any = None, certfile: Any = None) -> NoReturn: ... diff --git a/mypy/typeshed/stdlib/smtplib.pyi b/mypy/typeshed/stdlib/smtplib.pyi index 584fa164fec9..e584d7f571a7 100644 --- a/mypy/typeshed/stdlib/smtplib.pyi +++ b/mypy/typeshed/stdlib/smtplib.pyi @@ -128,7 +128,13 @@ class SMTP: def auth_plain(self, challenge: ReadableBuffer | None = None) -> str: ... def auth_login(self, challenge: ReadableBuffer | None = None) -> str: ... def login(self, user: str, password: str, *, initial_response_ok: bool = True) -> _Reply: ... - def starttls(self, keyfile: str | None = None, certfile: str | None = None, context: SSLContext | None = None) -> _Reply: ... + if sys.version_info >= (3, 12): + def starttls(self, *, context: SSLContext | None = None) -> _Reply: ... + else: + def starttls( + self, keyfile: str | None = None, certfile: str | None = None, context: SSLContext | None = None + ) -> _Reply: ... + def sendmail( self, from_addr: str, @@ -152,17 +158,29 @@ class SMTP_SSL(SMTP): keyfile: str | None certfile: str | None context: SSLContext - def __init__( - self, - host: str = "", - port: int = 0, - local_hostname: str | None = None, - keyfile: str | None = None, - certfile: str | None = None, - timeout: float = ..., - source_address: _SourceAddress | None = None, - context: SSLContext | None = None, - ) -> None: ... + if sys.version_info >= (3, 12): + def __init__( + self, + host: str = "", + port: int = 0, + local_hostname: str | None = None, + *, + timeout: float = ..., + source_address: _SourceAddress | None = None, + context: SSLContext | None = None, + ) -> None: ... + else: + def __init__( + self, + host: str = "", + port: int = 0, + local_hostname: str | None = None, + keyfile: str | None = None, + certfile: str | None = None, + timeout: float = ..., + source_address: _SourceAddress | None = None, + context: SSLContext | None = None, + ) -> None: ... LMTP_PORT: int diff --git a/mypy/typeshed/stdlib/sqlite3/dbapi2.pyi b/mypy/typeshed/stdlib/sqlite3/dbapi2.pyi index 41f731e21e26..e85f49207763 100644 --- a/mypy/typeshed/stdlib/sqlite3/dbapi2.pyi +++ b/mypy/typeshed/stdlib/sqlite3/dbapi2.pyi @@ -351,7 +351,7 @@ class Connection: @overload def cursor(self, cursorClass: None = None) -> Cursor: ... @overload - def cursor(self, cursorClass: Callable[[], _CursorT]) -> _CursorT: ... + def cursor(self, cursorClass: Callable[[Connection], _CursorT]) -> _CursorT: ... def execute(self, sql: str, parameters: _Parameters = ...) -> Cursor: ... def executemany(self, __sql: str, __parameters: Iterable[_Parameters]) -> Cursor: ... def executescript(self, __sql_script: str) -> Cursor: ... diff --git a/mypy/typeshed/stdlib/tkinter/__init__.pyi b/mypy/typeshed/stdlib/tkinter/__init__.pyi index a03c48c039dd..a0a88a8ac82e 100644 --- a/mypy/typeshed/stdlib/tkinter/__init__.pyi +++ b/mypy/typeshed/stdlib/tkinter/__init__.pyi @@ -6,7 +6,7 @@ from enum import Enum from tkinter.constants import * from tkinter.font import _FontDescription from types import TracebackType -from typing import Any, Generic, NamedTuple, Protocol, TypeVar, overload, type_check_only +from typing import Any, Generic, NamedTuple, TypeVar, overload, type_check_only from typing_extensions import Literal, TypeAlias, TypedDict if sys.version_info >= (3, 9): @@ -720,9 +720,6 @@ class Wm: def wm_withdraw(self) -> None: ... withdraw = wm_withdraw -class _ExceptionReportingCallback(Protocol): - def __call__(self, __exc: type[BaseException], __val: BaseException, __tb: TracebackType | None) -> object: ... - class Tk(Misc, Wm): master: None def __init__( @@ -764,7 +761,7 @@ class Tk(Misc, Wm): config = configure def destroy(self) -> None: ... def readprofile(self, baseName: str, className: str) -> None: ... - report_callback_exception: _ExceptionReportingCallback + report_callback_exception: Callable[[type[BaseException], BaseException, TracebackType | None], object] # Tk has __getattr__ so that tk_instance.foo falls back to tk_instance.tk.foo # Please keep in sync with _tkinter.TkappType. # Some methods are intentionally missing because they are inherited from Misc instead. diff --git a/mypy/typeshed/stdlib/turtle.pyi b/mypy/typeshed/stdlib/turtle.pyi index 80ea40879dee..36cd5f1f6e9d 100644 --- a/mypy/typeshed/stdlib/turtle.pyi +++ b/mypy/typeshed/stdlib/turtle.pyi @@ -129,6 +129,9 @@ __all__ = [ "Terminator", ] +if sys.version_info >= (3, 12): + __all__ += ["teleport"] + # Note: '_Color' is the alias we use for arguments and _AnyColor is the # alias we use for return types. Really, these two aliases should be the # same, but as per the "no union returns" typeshed policy, we'll return @@ -648,6 +651,9 @@ def shape(name: None = None) -> str: ... @overload def shape(name: str) -> None: ... +if sys.version_info >= (3, 12): + def teleport(x: float | None = None, y: float | None = None, *, fill_gap: bool = False) -> None: ... + # Unsafely overlaps when no arguments are provided @overload def shapesize() -> tuple[float, float, float]: ... # type: ignore[misc] diff --git a/mypy/typeshed/stdlib/typing.pyi b/mypy/typeshed/stdlib/typing.pyi index a9bffdf5214f..2c1ebe6d7f95 100644 --- a/mypy/typeshed/stdlib/typing.pyi +++ b/mypy/typeshed/stdlib/typing.pyi @@ -126,6 +126,9 @@ if sys.version_info >= (3, 11): "reveal_type", ] +if sys.version_info >= (3, 12): + __all__ += ["TypeAliasType", "override"] + ContextManager = AbstractContextManager AsyncContextManager = AbstractAsyncContextManager @@ -323,7 +326,9 @@ AnyStr = TypeVar("AnyStr", str, bytes) # noqa: Y001 # Technically in 3.7 this inherited from GenericMeta. But let's not reflect that, since # type checkers tend to assume that Protocols all have the ABCMeta metaclass. -class _ProtocolMeta(ABCMeta): ... +class _ProtocolMeta(ABCMeta): + if sys.version_info >= (3, 12): + def __init__(cls, *args: Any, **kwargs: Any) -> None: ... # Abstract base classes. @@ -945,7 +950,7 @@ if sys.version_info >= (3, 10): def _type_repr(obj: object) -> str: ... if sys.version_info >= (3, 12): - def override(__arg: _F) -> _F: ... + def override(__method: _F) -> _F: ... @_final class TypeAliasType: def __init__( diff --git a/mypy/typeshed/stdlib/unittest/mock.pyi b/mypy/typeshed/stdlib/unittest/mock.pyi index 66120197b269..baf025bdeb5a 100644 --- a/mypy/typeshed/stdlib/unittest/mock.pyi +++ b/mypy/typeshed/stdlib/unittest/mock.pyi @@ -106,7 +106,25 @@ class Base: # We subclass with "Any" because mocks are explicitly designed to stand in for other types, # something that can't be expressed with our static type system. class NonCallableMock(Base, Any): - def __new__(__cls, *args: Any, **kw: Any) -> Self: ... + if sys.version_info >= (3, 12): + def __new__( + cls, + spec: list[str] | object | type[object] | None = None, + wraps: Any | None = None, + name: str | None = None, + spec_set: list[str] | object | type[object] | None = None, + parent: NonCallableMock | None = None, + _spec_state: Any | None = None, + _new_name: str = "", + _new_parent: NonCallableMock | None = None, + _spec_as_instance: bool = False, + _eat_self: bool | None = None, + unsafe: bool = False, + **kwargs: Any, + ) -> Self: ... + else: + def __new__(__cls, *args: Any, **kw: Any) -> Self: ... + def __init__( self, spec: list[str] | object | type[object] | None = None, diff --git a/mypy/typeshed/stdlib/urllib/request.pyi b/mypy/typeshed/stdlib/urllib/request.pyi index 079c9755528c..a4849dfa2e6e 100644 --- a/mypy/typeshed/stdlib/urllib/request.pyi +++ b/mypy/typeshed/stdlib/urllib/request.pyi @@ -235,7 +235,11 @@ class _HTTPConnectionProtocol(Protocol): ) -> HTTPConnection: ... class AbstractHTTPHandler(BaseHandler): # undocumented - def __init__(self, debuglevel: int = 0) -> None: ... + if sys.version_info >= (3, 12): + def __init__(self, debuglevel: int | None = None) -> None: ... + else: + def __init__(self, debuglevel: int = 0) -> None: ... + def set_http_debuglevel(self, level: int) -> None: ... def do_request_(self, request: Request) -> Request: ... def do_open(self, http_class: _HTTPConnectionProtocol, req: Request, **http_conn_args: Any) -> HTTPResponse: ... @@ -245,9 +249,15 @@ class HTTPHandler(AbstractHTTPHandler): def http_request(self, request: Request) -> Request: ... # undocumented class HTTPSHandler(AbstractHTTPHandler): - def __init__( - self, debuglevel: int = 0, context: ssl.SSLContext | None = None, check_hostname: bool | None = None - ) -> None: ... + if sys.version_info >= (3, 12): + def __init__( + self, debuglevel: int | None = None, context: ssl.SSLContext | None = None, check_hostname: bool | None = None + ) -> None: ... + else: + def __init__( + self, debuglevel: int = 0, context: ssl.SSLContext | None = None, check_hostname: bool | None = None + ) -> None: ... + def https_open(self, req: Request) -> HTTPResponse: ... def https_request(self, request: Request) -> Request: ... # undocumented diff --git a/mypy/typeshed/stdlib/weakref.pyi b/mypy/typeshed/stdlib/weakref.pyi index ecb98d4269d5..ca5366602ceb 100644 --- a/mypy/typeshed/stdlib/weakref.pyi +++ b/mypy/typeshed/stdlib/weakref.pyi @@ -65,6 +65,10 @@ class WeakValueDictionary(MutableMapping[_KT, _VT]): def copy(self) -> WeakValueDictionary[_KT, _VT]: ... __copy__ = copy def __deepcopy__(self, memo: Any) -> Self: ... + @overload + def get(self, key: _KT, default: None = None) -> _VT | None: ... + @overload + def get(self, key: _KT, default: _T) -> _VT | _T: ... # These are incompatible with Mapping def keys(self) -> Iterator[_KT]: ... # type: ignore[override] def values(self) -> Iterator[_VT]: ... # type: ignore[override] @@ -107,6 +111,10 @@ class WeakKeyDictionary(MutableMapping[_KT, _VT]): def copy(self) -> WeakKeyDictionary[_KT, _VT]: ... __copy__ = copy def __deepcopy__(self, memo: Any) -> Self: ... + @overload + def get(self, key: _KT, default: None = None) -> _VT | None: ... + @overload + def get(self, key: _KT, default: _T) -> _VT | _T: ... # These are incompatible with Mapping def keys(self) -> Iterator[_KT]: ... # type: ignore[override] def values(self) -> Iterator[_VT]: ... # type: ignore[override] diff --git a/mypy/typeshed/stdlib/zipfile.pyi b/mypy/typeshed/stdlib/zipfile.pyi index abda7a3b9625..dc07eb3f2a38 100644 --- a/mypy/typeshed/stdlib/zipfile.pyi +++ b/mypy/typeshed/stdlib/zipfile.pyi @@ -257,7 +257,11 @@ if sys.version_info >= (3, 8): @property def open(self) -> _PathOpenProtocol: ... - def iterdir(self) -> Iterator[Path]: ... + if sys.version_info >= (3, 10): + def iterdir(self) -> Iterator[Self]: ... + else: + def iterdir(self) -> Iterator[Path]: ... + def is_dir(self) -> bool: ... def is_file(self) -> bool: ... def exists(self) -> bool: ... @@ -274,6 +278,14 @@ if sys.version_info >= (3, 8): def joinpath(self, *other: StrPath) -> Path: ... else: def joinpath(self, add: StrPath) -> Path: ... # undocumented + if sys.version_info >= (3, 12): + def glob(self, pattern: str) -> Iterator[Self]: ... + def rglob(self, pattern: str) -> Iterator[Self]: ... + def is_symlink(self) -> Literal[False]: ... + def relative_to(self, other: Path, *extra: StrPath) -> str: ... + def match(self, path_pattern: str) -> bool: ... + def __eq__(self, other: object) -> bool: ... + def __hash__(self) -> int: ... def __truediv__(self, add: StrPath) -> Path: ... From 9859fe7ba3ae046ed3aa822f869ec4866978eac8 Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Mon, 26 Sep 2022 12:55:07 -0700 Subject: [PATCH 051/144] Remove use of LiteralString in builtins (#13743) --- mypy/typeshed/stdlib/builtins.pyi | 93 ------------------------------- 1 file changed, 93 deletions(-) diff --git a/mypy/typeshed/stdlib/builtins.pyi b/mypy/typeshed/stdlib/builtins.pyi index cf4f857c5524..2c7331e87b1a 100644 --- a/mypy/typeshed/stdlib/builtins.pyi +++ b/mypy/typeshed/stdlib/builtins.pyi @@ -56,7 +56,6 @@ from typing import ( # noqa: Y022 from typing_extensions import ( Concatenate, Literal, - LiteralString, ParamSpec, Self, SupportsIndex, @@ -442,17 +441,8 @@ class str(Sequence[str]): def __new__(cls, object: object = ...) -> Self: ... @overload def __new__(cls, object: ReadableBuffer, encoding: str = ..., errors: str = ...) -> Self: ... - @overload - def capitalize(self: LiteralString) -> LiteralString: ... - @overload def capitalize(self) -> str: ... # type: ignore[misc] - @overload - def casefold(self: LiteralString) -> LiteralString: ... - @overload def casefold(self) -> str: ... # type: ignore[misc] - @overload - def center(self: LiteralString, __width: SupportsIndex, __fillchar: LiteralString = " ") -> LiteralString: ... - @overload def center(self, __width: SupportsIndex, __fillchar: str = " ") -> str: ... # type: ignore[misc] def count(self, x: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ... def encode(self, encoding: str = "utf-8", errors: str = "strict") -> bytes: ... @@ -460,20 +450,11 @@ class str(Sequence[str]): self, __suffix: str | tuple[str, ...], __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ... ) -> bool: ... if sys.version_info >= (3, 8): - @overload - def expandtabs(self: LiteralString, tabsize: SupportsIndex = 8) -> LiteralString: ... - @overload def expandtabs(self, tabsize: SupportsIndex = 8) -> str: ... # type: ignore[misc] else: - @overload - def expandtabs(self: LiteralString, tabsize: int = 8) -> LiteralString: ... - @overload def expandtabs(self, tabsize: int = 8) -> str: ... # type: ignore[misc] def find(self, __sub: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ... - @overload - def format(self: LiteralString, *args: LiteralString, **kwargs: LiteralString) -> LiteralString: ... - @overload def format(self, *args: object, **kwargs: object) -> str: ... def format_map(self, map: _FormatMapMapping) -> str: ... def index(self, __sub: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ... @@ -489,91 +470,32 @@ class str(Sequence[str]): def isspace(self) -> bool: ... def istitle(self) -> bool: ... def isupper(self) -> bool: ... - @overload - def join(self: LiteralString, __iterable: Iterable[LiteralString]) -> LiteralString: ... - @overload def join(self, __iterable: Iterable[str]) -> str: ... # type: ignore[misc] - @overload - def ljust(self: LiteralString, __width: SupportsIndex, __fillchar: LiteralString = " ") -> LiteralString: ... - @overload def ljust(self, __width: SupportsIndex, __fillchar: str = " ") -> str: ... # type: ignore[misc] - @overload - def lower(self: LiteralString) -> LiteralString: ... - @overload def lower(self) -> str: ... # type: ignore[misc] - @overload - def lstrip(self: LiteralString, __chars: LiteralString | None = None) -> LiteralString: ... - @overload def lstrip(self, __chars: str | None = None) -> str: ... # type: ignore[misc] - @overload - def partition(self: LiteralString, __sep: LiteralString) -> tuple[LiteralString, LiteralString, LiteralString]: ... - @overload def partition(self, __sep: str) -> tuple[str, str, str]: ... # type: ignore[misc] - @overload - def replace( - self: LiteralString, __old: LiteralString, __new: LiteralString, __count: SupportsIndex = -1 - ) -> LiteralString: ... - @overload def replace(self, __old: str, __new: str, __count: SupportsIndex = -1) -> str: ... # type: ignore[misc] if sys.version_info >= (3, 9): - @overload - def removeprefix(self: LiteralString, __prefix: LiteralString) -> LiteralString: ... - @overload def removeprefix(self, __prefix: str) -> str: ... # type: ignore[misc] - @overload - def removesuffix(self: LiteralString, __suffix: LiteralString) -> LiteralString: ... - @overload def removesuffix(self, __suffix: str) -> str: ... # type: ignore[misc] def rfind(self, __sub: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ... def rindex(self, __sub: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ... - @overload - def rjust(self: LiteralString, __width: SupportsIndex, __fillchar: LiteralString = " ") -> LiteralString: ... - @overload def rjust(self, __width: SupportsIndex, __fillchar: str = " ") -> str: ... # type: ignore[misc] - @overload - def rpartition(self: LiteralString, __sep: LiteralString) -> tuple[LiteralString, LiteralString, LiteralString]: ... - @overload def rpartition(self, __sep: str) -> tuple[str, str, str]: ... # type: ignore[misc] - @overload - def rsplit(self: LiteralString, sep: LiteralString | None = None, maxsplit: SupportsIndex = -1) -> list[LiteralString]: ... - @overload def rsplit(self, sep: str | None = None, maxsplit: SupportsIndex = -1) -> list[str]: ... # type: ignore[misc] - @overload - def rstrip(self: LiteralString, __chars: LiteralString | None = None) -> LiteralString: ... - @overload def rstrip(self, __chars: str | None = None) -> str: ... # type: ignore[misc] - @overload - def split(self: LiteralString, sep: LiteralString | None = None, maxsplit: SupportsIndex = -1) -> list[LiteralString]: ... - @overload def split(self, sep: str | None = None, maxsplit: SupportsIndex = -1) -> list[str]: ... # type: ignore[misc] - @overload - def splitlines(self: LiteralString, keepends: bool = False) -> list[LiteralString]: ... - @overload def splitlines(self, keepends: bool = False) -> list[str]: ... # type: ignore[misc] def startswith( self, __prefix: str | tuple[str, ...], __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ... ) -> bool: ... - @overload - def strip(self: LiteralString, __chars: LiteralString | None = None) -> LiteralString: ... - @overload def strip(self, __chars: str | None = None) -> str: ... # type: ignore[misc] - @overload - def swapcase(self: LiteralString) -> LiteralString: ... - @overload def swapcase(self) -> str: ... # type: ignore[misc] - @overload - def title(self: LiteralString) -> LiteralString: ... - @overload def title(self) -> str: ... # type: ignore[misc] def translate(self, __table: _TranslateTable) -> str: ... - @overload - def upper(self: LiteralString) -> LiteralString: ... - @overload def upper(self) -> str: ... # type: ignore[misc] - @overload - def zfill(self: LiteralString, __width: SupportsIndex) -> LiteralString: ... - @overload def zfill(self, __width: SupportsIndex) -> str: ... # type: ignore[misc] @staticmethod @overload @@ -584,9 +506,6 @@ class str(Sequence[str]): @staticmethod @overload def maketrans(__x: str, __y: str, __z: str) -> dict[int, int | None]: ... - @overload - def __add__(self: LiteralString, __value: LiteralString) -> LiteralString: ... - @overload def __add__(self, __value: str) -> str: ... # type: ignore[misc] # Incompatible with Sequence.__contains__ def __contains__(self, __key: str) -> bool: ... # type: ignore[override] @@ -595,25 +514,13 @@ class str(Sequence[str]): def __getitem__(self, __key: SupportsIndex | slice) -> str: ... def __gt__(self, __value: str) -> bool: ... def __hash__(self) -> int: ... - @overload - def __iter__(self: LiteralString) -> Iterator[LiteralString]: ... - @overload def __iter__(self) -> Iterator[str]: ... # type: ignore[misc] def __le__(self, __value: str) -> bool: ... def __len__(self) -> int: ... def __lt__(self, __value: str) -> bool: ... - @overload - def __mod__(self: LiteralString, __value: LiteralString | tuple[LiteralString, ...]) -> LiteralString: ... - @overload def __mod__(self, __value: Any) -> str: ... - @overload - def __mul__(self: LiteralString, __value: SupportsIndex) -> LiteralString: ... - @overload def __mul__(self, __value: SupportsIndex) -> str: ... # type: ignore[misc] def __ne__(self, __value: object) -> bool: ... - @overload - def __rmul__(self: LiteralString, __value: SupportsIndex) -> LiteralString: ... - @overload def __rmul__(self, __value: SupportsIndex) -> str: ... # type: ignore[misc] def __getnewargs__(self) -> tuple[str]: ... From 378a866e90f534a0f0e9bbbca1324317ba784bbb Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Sat, 29 Oct 2022 12:47:21 -0700 Subject: [PATCH 052/144] Revert sum literal integer change (#13961) This is allegedly causing large performance problems, see 13821 typeshed/8231 had zero hits on mypy_primer, so it's not the worst thing to undo. Patching this in typeshed also feels weird, since there's a more general soundness issue. If a typevar has a bound or constraint, we might not want to solve it to a Literal. If we can confirm the performance regression or fix the unsoundness within mypy, I might pursue upstreaming this in typeshed. (Reminder: add this to the sync_typeshed script once merged) --- mypy/typeshed/stdlib/builtins.pyi | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mypy/typeshed/stdlib/builtins.pyi b/mypy/typeshed/stdlib/builtins.pyi index 2c7331e87b1a..9e413579e0fb 100644 --- a/mypy/typeshed/stdlib/builtins.pyi +++ b/mypy/typeshed/stdlib/builtins.pyi @@ -1698,11 +1698,11 @@ _SupportsSumNoDefaultT = TypeVar("_SupportsSumNoDefaultT", bound=_SupportsSumWit # Instead, we special-case the most common examples of this: bool and literal integers. if sys.version_info >= (3, 8): @overload - def sum(__iterable: Iterable[bool | _LiteralInteger], start: int = 0) -> int: ... # type: ignore[misc] + def sum(__iterable: Iterable[bool], start: int = 0) -> int: ... # type: ignore[misc] else: @overload - def sum(__iterable: Iterable[bool | _LiteralInteger], __start: int = 0) -> int: ... # type: ignore[misc] + def sum(__iterable: Iterable[bool], __start: int = 0) -> int: ... # type: ignore[misc] @overload def sum(__iterable: Iterable[_SupportsSumNoDefaultT]) -> _SupportsSumNoDefaultT | Literal[0]: ... From 2816b97d5c61355d089b291b861df4c64f4aa96a Mon Sep 17 00:00:00 2001 From: AlexWaygood Date: Mon, 1 May 2023 20:34:55 +0100 Subject: [PATCH 053/144] Revert typeshed ctypes change Since the plugin provides superior type checking: https://github.com/python/mypy/pull/13987#issuecomment-1310863427 A manual cherry-pick of e437cdf. --- mypy/typeshed/stdlib/_ctypes.pyi | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/mypy/typeshed/stdlib/_ctypes.pyi b/mypy/typeshed/stdlib/_ctypes.pyi index 1f15ac057988..538c07d54aad 100644 --- a/mypy/typeshed/stdlib/_ctypes.pyi +++ b/mypy/typeshed/stdlib/_ctypes.pyi @@ -159,11 +159,7 @@ class Array(_CData, Generic[_CT]): def _type_(self) -> type[_CT]: ... @_type_.setter def _type_(self, value: type[_CT]) -> None: ... - # Note: only available if _CT == c_char - @property - def raw(self) -> bytes: ... - @raw.setter - def raw(self, value: ReadableBuffer) -> None: ... + raw: bytes # Note: only available if _CT == c_char value: Any # Note: bytes if _CT == c_char, str if _CT == c_wchar, unavailable otherwise # TODO These methods cannot be annotated correctly at the moment. # All of these "Any"s stand for the array's element type, but it's not possible to use _CT From 7d987a1056e4c6ee6f75aa4841f7e0c73ca9b496 Mon Sep 17 00:00:00 2001 From: AlexWaygood Date: Sat, 4 Mar 2023 13:14:11 +0000 Subject: [PATCH 054/144] Revert use of `ParamSpec` for `functools.wraps` --- mypy/typeshed/stdlib/functools.pyi | 40 +++++++++++------------------- 1 file changed, 14 insertions(+), 26 deletions(-) diff --git a/mypy/typeshed/stdlib/functools.pyi b/mypy/typeshed/stdlib/functools.pyi index 0d08cdb19e3f..1b4e59b7c120 100644 --- a/mypy/typeshed/stdlib/functools.pyi +++ b/mypy/typeshed/stdlib/functools.pyi @@ -1,9 +1,9 @@ import sys import types -from _typeshed import SupportsAllComparisons, SupportsItems +from _typeshed import IdentityFunction, SupportsAllComparisons, SupportsItems from collections.abc import Callable, Hashable, Iterable, Sequence, Sized from typing import Any, Generic, NamedTuple, TypeVar, overload -from typing_extensions import Literal, ParamSpec, Self, TypeAlias, TypedDict, final +from typing_extensions import Literal, Self, TypeAlias, TypedDict, final if sys.version_info >= (3, 9): from types import GenericAlias @@ -28,12 +28,10 @@ if sys.version_info >= (3, 8): if sys.version_info >= (3, 9): __all__ += ["cache"] +_AnyCallable: TypeAlias = Callable[..., object] + _T = TypeVar("_T") _S = TypeVar("_S") -_PWrapped = ParamSpec("_PWrapped") -_RWrapped = TypeVar("_RWrapped") -_PWrapper = ParamSpec("_PWrapper") -_RWrapper = TypeVar("_RWrapper") @overload def reduce(function: Callable[[_T, _S], _T], sequence: Iterable[_S], initial: _T) -> _T: ... @@ -87,41 +85,31 @@ else: ] WRAPPER_UPDATES: tuple[Literal["__dict__"]] -class _Wrapped(Generic[_PWrapped, _RWrapped, _PWrapper, _RWrapper]): - __wrapped__: Callable[_PWrapped, _RWrapped] - def __call__(self, *args: _PWrapper.args, **kwargs: _PWrapper.kwargs) -> _RWrapper: ... - # as with ``Callable``, we'll assume that these attributes exist - __name__: str - __qualname__: str - -class _Wrapper(Generic[_PWrapped, _RWrapped]): - def __call__(self, f: Callable[_PWrapper, _RWrapper]) -> _Wrapped[_PWrapped, _RWrapped, _PWrapper, _RWrapper]: ... - if sys.version_info >= (3, 12): def update_wrapper( - wrapper: Callable[_PWrapper, _RWrapper], - wrapped: Callable[_PWrapped, _RWrapped], + wrapper: _T, + wrapped: _AnyCallable, assigned: Sequence[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotations__", "__type_params__"), updated: Sequence[str] = ("__dict__",), - ) -> _Wrapped[_PWrapped, _RWrapped, _PWrapper, _RWrapper]: ... + ) -> _T: ... def wraps( - wrapped: Callable[_PWrapped, _RWrapped], + wrapped: _AnyCallable, assigned: Sequence[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotations__", "__type_params__"), updated: Sequence[str] = ("__dict__",), - ) -> _Wrapper[_PWrapped, _RWrapped]: ... + ) -> IdentityFunction: ... else: def update_wrapper( - wrapper: Callable[_PWrapper, _RWrapper], - wrapped: Callable[_PWrapped, _RWrapped], + wrapper: _T, + wrapped: _AnyCallable, assigned: Sequence[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotations__"), updated: Sequence[str] = ("__dict__",), - ) -> _Wrapped[_PWrapped, _RWrapped, _PWrapper, _RWrapper]: ... + ) -> _T: ... def wraps( - wrapped: Callable[_PWrapped, _RWrapped], + wrapped: _AnyCallable, assigned: Sequence[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotations__"), updated: Sequence[str] = ("__dict__",), - ) -> _Wrapper[_PWrapped, _RWrapped]: ... + ) -> IdentityFunction: ... def total_ordering(cls: type[_T]) -> type[_T]: ... def cmp_to_key(mycmp: Callable[[_T, _T], int]) -> Callable[[_T], SupportsAllComparisons]: ... From ec665cc8b4f59e81ec28ea946bc673cb20028751 Mon Sep 17 00:00:00 2001 From: AlexWaygood Date: Fri, 15 Sep 2023 20:50:25 +0100 Subject: [PATCH 055/144] Fix the newly-uncovered stubtest bug --- mypy/stubtest.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mypy/stubtest.py b/mypy/stubtest.py index a804835a632b..a5028581f7a1 100644 --- a/mypy/stubtest.py +++ b/mypy/stubtest.py @@ -1686,7 +1686,7 @@ def get_importable_stdlib_modules() -> set[str]: modules_by_finder[m.module_finder].add(m.name) for finder, module_group in modules_by_finder.items(): if ( - "site-packages" not in Path(finder.path).parents + "site-packages" not in Path(finder.path).parts # if "_queue" is present, it's most likely the module finder # for stdlib extension modules; # if "queue" is present, it's most likely the module finder From 0222bf492e035ab1062a6d6fc38abc249a8ae211 Mon Sep 17 00:00:00 2001 From: Alex Waygood Date: Sat, 16 Sep 2023 21:57:54 +0100 Subject: [PATCH 056/144] Update hashes in `sync-typeshed.py` following typeshed sync (#16126) Followup to #16121 --- misc/sync-typeshed.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/misc/sync-typeshed.py b/misc/sync-typeshed.py index 36967f86262e..77f921a89b1b 100644 --- a/misc/sync-typeshed.py +++ b/misc/sync-typeshed.py @@ -179,10 +179,10 @@ def main() -> None: print("Created typeshed sync commit.") commits_to_cherry_pick = [ - "2f6b6e66c", # LiteralString reverts - "120af30e7", # sum reverts - "1866d28f1", # ctypes reverts - "3240da455", # ParamSpec for functools.wraps + "9859fe7ba", # LiteralString reverts + "378a866e9", # sum reverts + "2816b97d5", # ctypes reverts + "7d987a105", # ParamSpec for functools.wraps ] for commit in commits_to_cherry_pick: try: From b65cd9ae6e1ae4b25e4af4f0e855646bbe382b29 Mon Sep 17 00:00:00 2001 From: Ilya Priven Date: Sun, 17 Sep 2023 05:39:48 -0400 Subject: [PATCH 057/144] dataclass.replace: allow transformed classes (#15915) We [already synthesize](https://github.com/python/mypy/issues/15843#issuecomment-1685159995) `__dataclass_fields__` for all classes including `@dataclass_transform`'d ones, thus assume more than PEP-681 says. We might as well assume `dataclasses.replace` applies on all same classes. This way we risk false positive since it'll raise in runtime. Fixes #15843. --- mypy/plugins/dataclasses.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/mypy/plugins/dataclasses.py b/mypy/plugins/dataclasses.py index 99f079705c3f..f2ae3fd3d01e 100644 --- a/mypy/plugins/dataclasses.py +++ b/mypy/plugins/dataclasses.py @@ -375,9 +375,7 @@ def transform(self) -> bool: add_attribute_to_class(self._api, self._cls, "__match_args__", match_args_type) self._add_dataclass_fields_magic_attribute() - - if self._spec is _TRANSFORM_SPEC_FOR_DATACLASSES: - self._add_internal_replace_method(attributes) + self._add_internal_replace_method(attributes) if "__post_init__" in info.names: self._add_internal_post_init_method(attributes) From c99b93646c7edb5ae33a84c9b322b289b97e0117 Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Sun, 17 Sep 2023 05:22:59 -0700 Subject: [PATCH 058/144] Fix mypyc regression with pretty (#16124) Fixes #15877 Regression was introduced by #15070. Previously Errors objects created in mypyc build would just use all the default values, now they use the actual options object involved --- mypy/errors.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/mypy/errors.py b/mypy/errors.py index a678b790cb8c..4e62a48aeb27 100644 --- a/mypy/errors.py +++ b/mypy/errors.py @@ -909,8 +909,7 @@ def file_messages(self, path: str) -> list[str]: return [] self.flushed_files.add(path) source_lines = None - if self.options.pretty: - assert self.read_source + if self.options.pretty and self.read_source: source_lines = self.read_source(path) return self.format_messages(self.error_info_map[path], source_lines) From bf7eab682a2bc63ec90e868610231a105a9b415f Mon Sep 17 00:00:00 2001 From: Hamir Mahal Date: Sun, 17 Sep 2023 13:30:30 -0700 Subject: [PATCH 059/144] Use comments in issue template (#15742) Fixes #15741 --- .github/ISSUE_TEMPLATE/feature.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/feature.md b/.github/ISSUE_TEMPLATE/feature.md index 135bc2bd3b94..984e552e51b1 100644 --- a/.github/ISSUE_TEMPLATE/feature.md +++ b/.github/ISSUE_TEMPLATE/feature.md @@ -6,8 +6,8 @@ labels: "feature" **Feature** -(A clear and concise description of your feature proposal.) + **Pitch** -(Please explain why this feature should be implemented and how it would be used. Add examples, if applicable.) + From 9b9152484c6b1ba3934373ca0c7600f71392fb06 Mon Sep 17 00:00:00 2001 From: Hamir Mahal Date: Sun, 17 Sep 2023 13:32:20 -0700 Subject: [PATCH 060/144] Make it easier to copy commands from docs README (#16133) Fixes #16132. --- docs/README.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/docs/README.md b/docs/README.md index 0d574c9213a5..e72164c78560 100644 --- a/docs/README.md +++ b/docs/README.md @@ -15,13 +15,13 @@ Install Sphinx and other dependencies (i.e. theme) needed for the documentation. From the `docs` directory, use `pip`: ``` -$ pip install -r requirements-docs.txt +pip install -r requirements-docs.txt ``` Build the documentation like this: ``` -$ make html +make html ``` The built documentation will be placed in the `docs/build` directory. Open @@ -33,13 +33,13 @@ Helpful documentation build commands Clean the documentation build: ``` -$ make clean +make clean ``` Test and check the links found in the documentation: ``` -$ make linkcheck +make linkcheck ``` Documentation on Read The Docs From 1dcff0d2235ba6570f290a126f1bdd762f2d4991 Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Mon, 18 Sep 2023 20:50:32 -0700 Subject: [PATCH 061/144] Preserve implicitly exported types via attribute access (#16129) Resolves #13965. Follow up to #13967. Unblocks #14086 --- mypy/checkmember.py | 15 ++++++++++++++- test-data/unit/check-flags.test | 26 +++++++++++++++++++------- test-data/unit/check-modules.test | 3 ++- 3 files changed, 35 insertions(+), 9 deletions(-) diff --git a/mypy/checkmember.py b/mypy/checkmember.py index 59af0d402e14..4316a59281c3 100644 --- a/mypy/checkmember.py +++ b/mypy/checkmember.py @@ -24,6 +24,7 @@ FuncDef, IndexExpr, MypyFile, + NameExpr, OverloadedFuncDef, SymbolNode, SymbolTable, @@ -608,7 +609,19 @@ def analyze_member_var_access( mx.msg.undefined_in_superclass(name, mx.context) return AnyType(TypeOfAny.from_error) else: - return report_missing_attribute(mx.original_type, itype, name, mx) + ret = report_missing_attribute(mx.original_type, itype, name, mx) + # Avoid paying double jeopardy if we can't find the member due to --no-implicit-reexport + if ( + mx.module_symbol_table is not None + and name in mx.module_symbol_table + and not mx.module_symbol_table[name].module_public + ): + v = mx.module_symbol_table[name].node + e = NameExpr(name) + e.set_line(mx.context) + e.node = v + return mx.chk.expr_checker.analyze_ref_expr(e, lvalue=mx.is_lvalue) + return ret def check_final_member(name: str, info: TypeInfo, msg: MessageBuilder, ctx: Context) -> None: diff --git a/test-data/unit/check-flags.test b/test-data/unit/check-flags.test index 96f78d81dd16..06b7cab8391b 100644 --- a/test-data/unit/check-flags.test +++ b/test-data/unit/check-flags.test @@ -1611,14 +1611,22 @@ from other_module_2 import a # E: Module "other_module_2" does not explicitly e reveal_type(a) # N: Revealed type is "builtins.int" import other_module_2 -# TODO: this should also reveal builtins.int, see #13965 -reveal_type(other_module_2.a) # E: "object" does not explicitly export attribute "a" [attr-defined] \ - # N: Revealed type is "Any" +reveal_type(other_module_2.a) # E: Module "other_module_2" does not explicitly export attribute "a" [attr-defined] \ + # N: Revealed type is "builtins.int" + +from other_module_2 import b # E: Module "other_module_2" does not explicitly export attribute "b" [attr-defined] +reveal_type(b) # N: Revealed type is "def (a: builtins.int) -> builtins.str" + +import other_module_2 +reveal_type(other_module_2.b) # E: Module "other_module_2" does not explicitly export attribute "b" [attr-defined] \ + # N: Revealed type is "def (a: builtins.int) -> builtins.str" [file other_module_1.py] a = 5 +def b(a: int) -> str: ... [file other_module_2.py] -from other_module_1 import a +from other_module_1 import a, b +[builtins fixtures/module.pyi] [case testNoImplicitReexportRespectsAll] # flags: --no-implicit-reexport @@ -1649,11 +1657,15 @@ __all__ = ('b',) [case testNoImplicitReexportGetAttr] # flags: --no-implicit-reexport --python-version 3.7 from other_module_2 import a # E: Module "other_module_2" does not explicitly export attribute "a" +reveal_type(a) # N: Revealed type is "builtins.int" +from other_module_2 import b # E: Module "other_module_2" does not explicitly export attribute "b" +reveal_type(b) # N: Revealed type is "builtins.str" [file other_module_1.py] -from typing import Any -def __getattr__(name: str) -> Any: ... +b: str = "asdf" +def __getattr__(name: str) -> int: ... [file other_module_2.py] -from other_module_1 import a +from other_module_1 import a, b +def __getattr__(name: str) -> bytes: ... [builtins fixtures/tuple.pyi] [case textNoImplicitReexportSuggestions] diff --git a/test-data/unit/check-modules.test b/test-data/unit/check-modules.test index 94368f6c1113..abbdf4987c46 100644 --- a/test-data/unit/check-modules.test +++ b/test-data/unit/check-modules.test @@ -1862,7 +1862,8 @@ import stub reveal_type(stub.y) # N: Revealed type is "builtins.int" reveal_type(stub.z) # E: Module "stub" does not explicitly export attribute "z" \ - # N: Revealed type is "Any" + # N: Revealed type is "builtins.int" + [file stub.pyi] from substub import y as y From ba978f461e1f88327f9caa2e83774caaaeee1a6a Mon Sep 17 00:00:00 2001 From: Petter Friberg Date: Tue, 19 Sep 2023 08:10:31 +0200 Subject: [PATCH 062/144] Call dynamic class hook on generic classes (#16052) Fixes: #8359 CC @sobolevn `get_dynamic_class_hook()` will now additionally be called for generic classes with parameters. e.g. ```python y = SomeGenericClass[type, ...].method() ``` --- mypy/semanal.py | 7 ++++ test-data/unit/check-custom-plugin.test | 12 +++++- .../unit/plugins/dyn_class_from_method.py | 40 ++++++++++++++++++- 3 files changed, 57 insertions(+), 2 deletions(-) diff --git a/mypy/semanal.py b/mypy/semanal.py index 70403eed57ae..e19cd86d5e89 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -3205,6 +3205,13 @@ def apply_dynamic_class_hook(self, s: AssignmentStmt) -> None: if isinstance(callee_expr, RefExpr) and callee_expr.fullname: method_name = call.callee.name fname = callee_expr.fullname + "." + method_name + elif ( + isinstance(callee_expr, IndexExpr) + and isinstance(callee_expr.base, RefExpr) + and isinstance(callee_expr.analyzed, TypeApplication) + ): + method_name = call.callee.name + fname = callee_expr.base.fullname + "." + method_name elif isinstance(callee_expr, CallExpr): # check if chain call call = callee_expr diff --git a/test-data/unit/check-custom-plugin.test b/test-data/unit/check-custom-plugin.test index 22374d09cf9f..63529cf165ce 100644 --- a/test-data/unit/check-custom-plugin.test +++ b/test-data/unit/check-custom-plugin.test @@ -684,12 +684,16 @@ plugins=/test-data/unit/plugins/dyn_class.py [case testDynamicClassHookFromClassMethod] # flags: --config-file tmp/mypy.ini -from mod import QuerySet, Manager +from mod import QuerySet, Manager, GenericQuerySet MyManager = Manager.from_queryset(QuerySet) +ManagerFromGenericQuerySet = GenericQuerySet[int].as_manager() reveal_type(MyManager()) # N: Revealed type is "__main__.MyManager" reveal_type(MyManager().attr) # N: Revealed type is "builtins.str" +reveal_type(ManagerFromGenericQuerySet()) # N: Revealed type is "__main__.ManagerFromGenericQuerySet" +reveal_type(ManagerFromGenericQuerySet().attr) # N: Revealed type is "builtins.int" +queryset: GenericQuerySet[int] = ManagerFromGenericQuerySet() def func(manager: MyManager) -> None: reveal_type(manager) # N: Revealed type is "__main__.MyManager" @@ -704,6 +708,12 @@ class QuerySet: class Manager: @classmethod def from_queryset(cls, queryset_cls: Type[QuerySet]): ... +T = TypeVar("T") +class GenericQuerySet(Generic[T]): + attr: T + + @classmethod + def as_manager(cls): ... [builtins fixtures/classmethod.pyi] [file mypy.ini] diff --git a/test-data/unit/plugins/dyn_class_from_method.py b/test-data/unit/plugins/dyn_class_from_method.py index b84754654084..2630b16be66e 100644 --- a/test-data/unit/plugins/dyn_class_from_method.py +++ b/test-data/unit/plugins/dyn_class_from_method.py @@ -2,7 +2,19 @@ from typing import Callable -from mypy.nodes import GDEF, Block, ClassDef, RefExpr, SymbolTable, SymbolTableNode, TypeInfo +from mypy.nodes import ( + GDEF, + Block, + ClassDef, + IndexExpr, + MemberExpr, + NameExpr, + RefExpr, + SymbolTable, + SymbolTableNode, + TypeApplication, + TypeInfo, +) from mypy.plugin import DynamicClassDefContext, Plugin from mypy.types import Instance @@ -13,6 +25,8 @@ def get_dynamic_class_hook( ) -> Callable[[DynamicClassDefContext], None] | None: if "from_queryset" in fullname: return add_info_hook + if "as_manager" in fullname: + return as_manager_hook return None @@ -34,5 +48,29 @@ def add_info_hook(ctx: DynamicClassDefContext) -> None: ctx.api.add_symbol_table_node(ctx.name, SymbolTableNode(GDEF, info)) +def as_manager_hook(ctx: DynamicClassDefContext) -> None: + class_def = ClassDef(ctx.name, Block([])) + class_def.fullname = ctx.api.qualified_name(ctx.name) + + info = TypeInfo(SymbolTable(), class_def, ctx.api.cur_mod_id) + class_def.info = info + assert isinstance(ctx.call.callee, MemberExpr) + assert isinstance(ctx.call.callee.expr, IndexExpr) + assert isinstance(ctx.call.callee.expr.analyzed, TypeApplication) + assert isinstance(ctx.call.callee.expr.analyzed.expr, NameExpr) + + queryset_type_fullname = ctx.call.callee.expr.analyzed.expr.fullname + queryset_node = ctx.api.lookup_fully_qualified_or_none(queryset_type_fullname) + assert queryset_node is not None + queryset_info = queryset_node.node + assert isinstance(queryset_info, TypeInfo) + parameter_type = ctx.call.callee.expr.analyzed.types[0] + + obj = ctx.api.named_type("builtins.object") + info.mro = [info, queryset_info, obj.type] + info.bases = [Instance(queryset_info, [parameter_type])] + ctx.api.add_symbol_table_node(ctx.name, SymbolTableNode(GDEF, info)) + + def plugin(version: str) -> type[DynPlugin]: return DynPlugin From 249f3f8285d9d2a0f77273ace805dac0eef684c6 Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Mon, 18 Sep 2023 23:53:31 -0700 Subject: [PATCH 063/144] Fix inference for overloaded __call__ with generic self (#16053) Fixes #8283 Co-authored-by: ilevkivskyi --- mypy/checkexpr.py | 4 ++- mypy/checkmember.py | 13 ++++--- mypy/subtypes.py | 51 +++++++++++++++------------ test-data/unit/check-overloading.test | 24 +++++++++++++ test-data/unit/check-tuples.test | 14 ++++++++ 5 files changed, 76 insertions(+), 30 deletions(-) diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index f46c8cb15c6f..7b9b84938930 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -1475,6 +1475,7 @@ def check_call( callable_node: Expression | None = None, callable_name: str | None = None, object_type: Type | None = None, + original_type: Type | None = None, ) -> tuple[Type, Type]: """Type check a call. @@ -1537,7 +1538,7 @@ def check_call( is_super=False, is_operator=True, msg=self.msg, - original_type=callee, + original_type=original_type or callee, chk=self.chk, in_literal_context=self.is_literal_context(), ) @@ -1578,6 +1579,7 @@ def check_call( callable_node, callable_name, object_type, + original_type=callee, ) else: return self.msg.not_callable(callee, context), AnyType(TypeOfAny.from_error) diff --git a/mypy/checkmember.py b/mypy/checkmember.py index 4316a59281c3..1557b62917dc 100644 --- a/mypy/checkmember.py +++ b/mypy/checkmember.py @@ -331,13 +331,12 @@ def analyze_instance_member_access( signature = method.type signature = freshen_all_functions_type_vars(signature) if not method.is_static: - if name != "__call__": - # TODO: use proper treatment of special methods on unions instead - # of this hack here and below (i.e. mx.self_type). - dispatched_type = meet.meet_types(mx.original_type, typ) - signature = check_self_arg( - signature, dispatched_type, method.is_class, mx.context, name, mx.msg - ) + # TODO: use proper treatment of special methods on unions instead + # of this hack here and below (i.e. mx.self_type). + dispatched_type = meet.meet_types(mx.original_type, typ) + signature = check_self_arg( + signature, dispatched_type, method.is_class, mx.context, name, mx.msg + ) signature = bind_self(signature, mx.self_type, is_classmethod=method.is_class) # TODO: should we skip these steps for static methods as well? # Since generic static methods should not be allowed. diff --git a/mypy/subtypes.py b/mypy/subtypes.py index 9ed2e4af4051..c5399db0a494 100644 --- a/mypy/subtypes.py +++ b/mypy/subtypes.py @@ -454,19 +454,22 @@ def visit_instance(self, left: Instance) -> bool: if isinstance(unpacked, Instance): return self._is_subtype(left, unpacked) if left.type.has_base(right.partial_fallback.type.fullname): - # Special case to consider Foo[*tuple[Any, ...]] (i.e. bare Foo) a - # subtype of Foo[], when Foo is user defined variadic tuple type. - mapped = map_instance_to_supertype(left, right.partial_fallback.type) - if len(mapped.args) == 1 and isinstance(mapped.args[0], UnpackType): - unpacked = get_proper_type(mapped.args[0].type) - if isinstance(unpacked, Instance): - assert unpacked.type.fullname == "builtins.tuple" - if isinstance(get_proper_type(unpacked.args[0]), AnyType): - return not self.proper_subtype - if mapped.type.fullname == "builtins.tuple" and isinstance( - get_proper_type(mapped.args[0]), AnyType - ): - return not self.proper_subtype + if not self.proper_subtype: + # Special case to consider Foo[*tuple[Any, ...]] (i.e. bare Foo) a + # subtype of Foo[], when Foo is user defined variadic tuple type. + mapped = map_instance_to_supertype(left, right.partial_fallback.type) + for arg in map(get_proper_type, mapped.args): + if isinstance(arg, UnpackType): + unpacked = get_proper_type(arg.type) + if not isinstance(unpacked, Instance): + break + assert unpacked.type.fullname == "builtins.tuple" + if not isinstance(get_proper_type(unpacked.args[0]), AnyType): + break + elif not isinstance(arg, AnyType): + break + else: + return True return False if isinstance(right, TypeVarTupleType): # tuple[Any, ...] is like Any in the world of tuples (see special case above). @@ -534,15 +537,19 @@ def visit_instance(self, left: Instance) -> bool: right_args = ( right_prefix + (TupleType(list(right_middle), fallback),) + right_suffix ) - if len(t.args) == 1 and isinstance(t.args[0], UnpackType): - unpacked = get_proper_type(t.args[0].type) - if isinstance(unpacked, Instance): - assert unpacked.type.fullname == "builtins.tuple" - if ( - isinstance(get_proper_type(unpacked.args[0]), AnyType) - and not self.proper_subtype - ): - return True + if not self.proper_subtype: + for arg in map(get_proper_type, t.args): + if isinstance(arg, UnpackType): + unpacked = get_proper_type(arg.type) + if not isinstance(unpacked, Instance): + break + assert unpacked.type.fullname == "builtins.tuple" + if not isinstance(get_proper_type(unpacked.args[0]), AnyType): + break + elif not isinstance(arg, AnyType): + break + else: + return True type_params = zip(left_args, right_args, right.type.defn.type_vars) else: type_params = zip(t.args, right.args, right.type.defn.type_vars) diff --git a/test-data/unit/check-overloading.test b/test-data/unit/check-overloading.test index 4546c7171856..443a6fb5cb10 100644 --- a/test-data/unit/check-overloading.test +++ b/test-data/unit/check-overloading.test @@ -6650,3 +6650,27 @@ def d(x: int) -> int: ... def d(f: int, *, x: int) -> str: ... def d(*args, **kwargs): ... [builtins fixtures/tuple.pyi] + +[case testOverloadCallableGenericSelf] +from typing import Any, TypeVar, Generic, overload, reveal_type + +T = TypeVar("T") + +class MyCallable(Generic[T]): + def __init__(self, t: T): + self.t = t + + @overload + def __call__(self: "MyCallable[int]") -> str: ... + @overload + def __call__(self: "MyCallable[str]") -> int: ... + def __call__(self): ... + +c = MyCallable(5) +reveal_type(c) # N: Revealed type is "__main__.MyCallable[builtins.int]" +reveal_type(c()) # N: Revealed type is "builtins.str" + +c2 = MyCallable("test") +reveal_type(c2) # N: Revealed type is "__main__.MyCallable[builtins.str]" +reveal_type(c2()) # should be int # N: Revealed type is "builtins.int" +[builtins fixtures/tuple.pyi] diff --git a/test-data/unit/check-tuples.test b/test-data/unit/check-tuples.test index 391fa20db738..ed2c3550a04e 100644 --- a/test-data/unit/check-tuples.test +++ b/test-data/unit/check-tuples.test @@ -1434,7 +1434,21 @@ def foo(o: CallableTuple) -> int: class CallableTuple(Tuple[str, int]): def __call__(self, n: int, m: int) -> int: return n +[builtins fixtures/tuple.pyi] + +[case testTypeTupleGenericCall] +from typing import Generic, Tuple, TypeVar + +T = TypeVar('T') +def foo(o: CallableTuple[int]) -> int: + reveal_type(o) # N: Revealed type is "Tuple[builtins.str, builtins.int, fallback=__main__.CallableTuple[builtins.int]]" + reveal_type(o.count(3)) # N: Revealed type is "builtins.int" + return o(1, 2) + +class CallableTuple(Tuple[str, T]): + def __call__(self, n: int, m: int) -> int: + return n [builtins fixtures/tuple.pyi] [case testTupleCompatibleWithSequence] From c9929e2c906d377ca7026c4be10f88a1bd7ecff1 Mon Sep 17 00:00:00 2001 From: Nikita Sobolev Date: Tue, 19 Sep 2023 18:42:06 +0300 Subject: [PATCH 064/144] Fix crash on dataclass field / property collision (#16147) I think the current error message is enough: https://github.com/python/mypy/issues/16141 CC @ikonst and @hauntsaninja --- mypy/plugins/dataclasses.py | 5 +++++ test-data/unit/check-dataclasses.test | 12 ++++++++++++ 2 files changed, 17 insertions(+) diff --git a/mypy/plugins/dataclasses.py b/mypy/plugins/dataclasses.py index f2ae3fd3d01e..a51b393fcbc4 100644 --- a/mypy/plugins/dataclasses.py +++ b/mypy/plugins/dataclasses.py @@ -23,6 +23,7 @@ ClassDef, Context, DataclassTransformSpec, + Decorator, Expression, FuncDef, FuncItem, @@ -575,6 +576,10 @@ def collect_attributes(self) -> list[DataclassAttribute] | None: # but the only alternative would be to modify the SymbolTable, # and it's a little hairy to do that in a plugin. continue + if isinstance(node, Decorator): + # This might be a property / field name clash. + # We will issue an error later. + continue assert isinstance(node, Var) diff --git a/test-data/unit/check-dataclasses.test b/test-data/unit/check-dataclasses.test index 35df84658259..d37ae569cc5e 100644 --- a/test-data/unit/check-dataclasses.test +++ b/test-data/unit/check-dataclasses.test @@ -2519,3 +2519,15 @@ a: MyDataclass b = [a, a] # trigger joining the types [builtins fixtures/dataclasses.pyi] + +[case testPropertyAndFieldRedefinitionNoCrash] +from dataclasses import dataclass + +@dataclass +class Foo: + @property + def c(self) -> int: + return 0 + + c: int # E: Name "c" already defined on line 5 +[builtins fixtures/dataclasses.pyi] From 7089a7fe635cfbed2916bb4f67243b317ccf37ea Mon Sep 17 00:00:00 2001 From: Anders Kaseorg Date: Tue, 19 Sep 2023 22:24:36 -0700 Subject: [PATCH 065/144] Do not consider `import a.b as b` an explicit reexport (#14086) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The point of the `import a as a` and `from a import b as b` syntax for explicit reexport is that it indicates an intention to do something different from the ordinary `import a` and `from a import b`. That is not the case with `import a.b as b`. Even mypy’s own code includes `import mypy.types as types`, which was not intended to be a reexport; if it were, it would be written `from mypy import types as types`. Pyright agrees that `import a.b as b` should not reexport. Signed-off-by: Anders Kaseorg --- mypy/semanal.py | 2 +- test-data/unit/check-modules.test | 8 ++++++++ 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/mypy/semanal.py b/mypy/semanal.py index e19cd86d5e89..6e103e5d382c 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -2466,7 +2466,7 @@ def visit_import(self, i: Import) -> None: if as_id is not None: base_id = id imported_id = as_id - module_public = use_implicit_reexport or id.split(".")[-1] == as_id + module_public = use_implicit_reexport or id == as_id else: base_id = id.split(".")[0] imported_id = base_id diff --git a/test-data/unit/check-modules.test b/test-data/unit/check-modules.test index abbdf4987c46..44585fdd8d1a 100644 --- a/test-data/unit/check-modules.test +++ b/test-data/unit/check-modules.test @@ -1818,6 +1818,8 @@ m = n # E: Cannot assign multiple modules to name "m" without explicit "types.M from stub import Iterable # E: Module "stub" does not explicitly export attribute "Iterable" from stub import D # E: Module "stub" does not explicitly export attribute "D" from stub import C +from stub import foo +from stub import bar # E: Module "stub" does not explicitly export attribute "bar" c = C() reveal_type(c.x) # N: Revealed type is "builtins.int" @@ -1828,6 +1830,8 @@ reveal_type(it) # N: Revealed type is "typing.Iterable[builtins.int]" from typing import Iterable from substub import C as C from substub import C as D +from package import foo as foo +import package.bar as bar def fun(x: Iterable[str]) -> Iterable[int]: pass @@ -1835,6 +1839,10 @@ def fun(x: Iterable[str]) -> Iterable[int]: pass class C: x: int +[file package/foo.pyi] + +[file package/bar.pyi] + [builtins fixtures/module.pyi] [case testNoReExportFromStubsMemberType] From ff81a1c7abc91d9984fc73b9f2b9eab198001c8e Mon Sep 17 00:00:00 2001 From: Nikita Sobolev Date: Wed, 20 Sep 2023 13:50:48 +0300 Subject: [PATCH 066/144] Remove `is_classmethod_class` slot from `CallableType` (#16151) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This slot was not used anywhere: ``` » ag is_classmethod_class . ``` Moreover, since it was not initialized this code was failing with `AttributeError`: ```python x: CallableType for i in dir(x): print(i, getattr(x, i)) # failing on `is_classmethod_class` ``` --- mypy/types.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/mypy/types.py b/mypy/types.py index 22fcd601d6a0..2b5aec7789f7 100644 --- a/mypy/types.py +++ b/mypy/types.py @@ -1763,8 +1763,6 @@ class CallableType(FunctionLike): "definition", # For error messages. May be None. "variables", # Type variables for a generic function "is_ellipsis_args", # Is this Callable[..., t] (with literal '...')? - "is_classmethod_class", # Is this callable constructed for the benefit - # of a classmethod's 'cls' argument? "implicit", # Was this type implicitly generated instead of explicitly # specified by the user? "special_sig", # Non-None for signatures that require special handling From 9edda9a79790d8f7263234eca9509657ea0c37f0 Mon Sep 17 00:00:00 2001 From: Ikko Eltociear Ashimine Date: Sun, 24 Sep 2023 23:31:21 +0900 Subject: [PATCH 067/144] Fix typo in dataclasses.py (#16173) ``` heirarchy -> hierarchy ``` --- mypy/plugins/dataclasses.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mypy/plugins/dataclasses.py b/mypy/plugins/dataclasses.py index a51b393fcbc4..685d1b342055 100644 --- a/mypy/plugins/dataclasses.py +++ b/mypy/plugins/dataclasses.py @@ -730,7 +730,7 @@ def _freeze(self, attributes: list[DataclassAttribute]) -> None: for attr in attributes: # Classes that directly specify a dataclass_transform metaclass must be neither frozen # non non-frozen per PEP681. Though it is surprising, this means that attributes from - # such a class must be writable even if the rest of the class heirarchy is frozen. This + # such a class must be writable even if the rest of the class hierarchy is frozen. This # matches the behavior of Pyright (the reference implementation). if attr.is_neither_frozen_nor_nonfrozen: continue From 0c8b76195a773363721d5521653bcdf9989d8768 Mon Sep 17 00:00:00 2001 From: Ali Hamdan Date: Mon, 25 Sep 2023 02:28:08 +0200 Subject: [PATCH 068/144] stubgen: multiple fixes to the generated imports (#15624) * Fix handling of nested imports. Instead of assuming that a name is imported from a top level package, look in the imports for this name starting from the parent submodule up until the import is found * Fix "from imports" getting reexported unnecessarily * Fix import sorting when having import aliases Fixes #13661 Fixes #7006 --- mypy/stubgen.py | 24 ++++++++++----- test-data/unit/stubgen.test | 60 +++++++++++++++++++++++++++++++++++-- 2 files changed, 74 insertions(+), 10 deletions(-) diff --git a/mypy/stubgen.py b/mypy/stubgen.py index ca7249465746..e8c12ee4d99b 100755 --- a/mypy/stubgen.py +++ b/mypy/stubgen.py @@ -496,7 +496,9 @@ def add_import(self, module: str, alias: str | None = None) -> None: name = name.rpartition(".")[0] def require_name(self, name: str) -> None: - self.required_names.add(name.split(".")[0]) + while name not in self.direct_imports and "." in name: + name = name.rsplit(".", 1)[0] + self.required_names.add(name) def reexport(self, name: str) -> None: """Mark a given non qualified name as needed in __all__. @@ -516,7 +518,10 @@ def import_lines(self) -> list[str]: # be imported from it. the names can also be alias in the form 'original as alias' module_map: Mapping[str, list[str]] = defaultdict(list) - for name in sorted(self.required_names): + for name in sorted( + self.required_names, + key=lambda n: (self.reverse_alias[n], n) if n in self.reverse_alias else (n, ""), + ): # If we haven't seen this name in an import statement, ignore it if name not in self.module_for: continue @@ -540,7 +545,7 @@ def import_lines(self) -> list[str]: assert "." not in name # Because reexports only has nonqualified names result.append(f"import {name} as {name}\n") else: - result.append(f"import {self.direct_imports[name]}\n") + result.append(f"import {name}\n") # Now generate all the from ... import ... lines collected in module_map for module, names in sorted(module_map.items()): @@ -595,7 +600,7 @@ def visit_name_expr(self, e: NameExpr) -> None: self.refs.add(e.name) def visit_instance(self, t: Instance) -> None: - self.add_ref(t.type.fullname) + self.add_ref(t.type.name) super().visit_instance(t) def visit_unbound_type(self, t: UnboundType) -> None: @@ -614,7 +619,10 @@ def visit_callable_type(self, t: CallableType) -> None: t.ret_type.accept(self) def add_ref(self, fullname: str) -> None: - self.refs.add(fullname.split(".")[-1]) + self.refs.add(fullname) + while "." in fullname: + fullname = fullname.rsplit(".", 1)[0] + self.refs.add(fullname) class StubGenerator(mypy.traverser.TraverserVisitor): @@ -1295,6 +1303,7 @@ def visit_import_from(self, o: ImportFrom) -> None: if ( as_name is None and name not in self.referenced_names + and not any(n.startswith(name + ".") for n in self.referenced_names) and (not self._all_ or name in IGNORED_DUNDERS) and not is_private and module not in ("abc", "asyncio") + TYPING_MODULE_NAMES @@ -1303,14 +1312,15 @@ def visit_import_from(self, o: ImportFrom) -> None: # exported, unless there is an explicit __all__. Note that we need to special # case 'abc' since some references are deleted during semantic analysis. exported = True - top_level = full_module.split(".")[0] + top_level = full_module.split(".", 1)[0] + self_top_level = self.module.split(".", 1)[0] if ( as_name is None and not self.export_less and (not self._all_ or name in IGNORED_DUNDERS) and self.module and not is_private - and top_level in (self.module.split(".")[0], "_" + self.module.split(".")[0]) + and top_level in (self_top_level, "_" + self_top_level) ): # Export imports from the same package, since we can't reliably tell whether they # are part of the public API. diff --git a/test-data/unit/stubgen.test b/test-data/unit/stubgen.test index 828680fadcf2..23dbf36a551b 100644 --- a/test-data/unit/stubgen.test +++ b/test-data/unit/stubgen.test @@ -2772,9 +2772,9 @@ y: b.Y z: p.a.X [out] +import p.a import p.a as a import p.b as b -import p.a x: a.X y: b.Y @@ -2787,7 +2787,7 @@ from p import a x: a.X [out] -from p import a as a +from p import a x: a.X @@ -2809,7 +2809,7 @@ from p import a x: a.X [out] -from p import a as a +from p import a x: a.X @@ -2859,6 +2859,60 @@ import p.a x: a.X y: p.a.Y +[case testNestedImports] +import p +import p.m1 +import p.m2 + +x: p.X +y: p.m1.Y +z: p.m2.Z + +[out] +import p +import p.m1 +import p.m2 + +x: p.X +y: p.m1.Y +z: p.m2.Z + +[case testNestedImportsAliased] +import p as t +import p.m1 as pm1 +import p.m2 as pm2 + +x: t.X +y: pm1.Y +z: pm2.Z + +[out] +import p as t +import p.m1 as pm1 +import p.m2 as pm2 + +x: t.X +y: pm1.Y +z: pm2.Z + +[case testNestedFromImports] +from p import m1 +from p.m1 import sm1 +from p.m2 import sm2 + +x: m1.X +y: sm1.Y +z: sm2.Z + +[out] +from p import m1 +from p.m1 import sm1 +from p.m2 import sm2 + +x: m1.X +y: sm1.Y +z: sm2.Z + [case testOverload_fromTypingImport] from typing import Tuple, Union, overload From 4b66fa9de07828621fee1d53abd533f3903e570a Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Wed, 27 Sep 2023 00:29:11 +0100 Subject: [PATCH 069/144] Special-case type inference of empty collections (#16122) Fixes https://github.com/python/mypy/issues/230 Fixes https://github.com/python/mypy/issues/6463 I bet it fixes some other duplicates, I closed couple yesterday, but likely there are more. This may look a bit ad-hoc, but after some thinking this now starts to make sense to me for two reasons: * Unless I am missing something, this should be completely safe. Special-casing only applies to inferred types (i.e. empty collection literals etc). * Empty collections _are_ actually special. Even if we solve some classes of issues with more principled solutions (e.g. I want to re-work type inference against unions in near future), there will always be some corner cases involving empty collections. Similar issues keep coming, so I think it is a good idea to add this special-casing (especially taking into account how simple it is, and that it closer some "popular" issues). --- mypy/solve.py | 14 ++++++++++++ mypy/subtypes.py | 7 ++++++ mypy/test/testpep561.py | 2 +- test-data/unit/check-inference-context.test | 11 ++-------- test-data/unit/check-inference.test | 24 +++++++++++++++++++++ test-data/unit/check-varargs.test | 24 ++++----------------- 6 files changed, 52 insertions(+), 30 deletions(-) diff --git a/mypy/solve.py b/mypy/solve.py index 7cdf1c10c9b5..52e6549e98a6 100644 --- a/mypy/solve.py +++ b/mypy/solve.py @@ -239,6 +239,20 @@ def solve_one(lowers: Iterable[Type], uppers: Iterable[Type]) -> Type | None: top: Type | None = None candidate: Type | None = None + # Filter out previous results of failed inference, they will only spoil the current pass... + new_uppers = [] + for u in uppers: + pu = get_proper_type(u) + if not isinstance(pu, UninhabitedType) or not pu.ambiguous: + new_uppers.append(u) + uppers = new_uppers + + # ...unless this is the only information we have, then we just pass it on. + if not uppers and not lowers: + candidate = UninhabitedType() + candidate.ambiguous = True + return candidate + # Process each bound separately, and calculate the lower and upper # bounds based on constraints. Note that we assume that the constraint # targets do not have constraint references. diff --git a/mypy/subtypes.py b/mypy/subtypes.py index c5399db0a494..822c4b0ebf32 100644 --- a/mypy/subtypes.py +++ b/mypy/subtypes.py @@ -18,6 +18,7 @@ ARG_STAR2, CONTRAVARIANT, COVARIANT, + INVARIANT, Decorator, FuncBase, OverloadedFuncDef, @@ -342,6 +343,12 @@ def _is_subtype( def check_type_parameter( left: Type, right: Type, variance: int, proper_subtype: bool, subtype_context: SubtypeContext ) -> bool: + # It is safe to consider empty collection literals and similar as covariant, since + # such type can't be stored in a variable, see checker.is_valid_inferred_type(). + if variance == INVARIANT: + p_left = get_proper_type(left) + if isinstance(p_left, UninhabitedType) and p_left.ambiguous: + variance = COVARIANT if variance == COVARIANT: if proper_subtype: return is_proper_subtype(left, right, subtype_context=subtype_context) diff --git a/mypy/test/testpep561.py b/mypy/test/testpep561.py index 48d0658cd1e9..9d2628c1fa5f 100644 --- a/mypy/test/testpep561.py +++ b/mypy/test/testpep561.py @@ -131,7 +131,7 @@ def test_pep561(testcase: DataDrivenTestCase) -> None: steps = testcase.find_steps() if steps != [[]]: - steps = [[]] + steps # type: ignore[assignment] + steps = [[]] + steps for i, operations in enumerate(steps): perform_file_operations(operations) diff --git a/test-data/unit/check-inference-context.test b/test-data/unit/check-inference-context.test index 169fee65f127..773a9ffd8274 100644 --- a/test-data/unit/check-inference-context.test +++ b/test-data/unit/check-inference-context.test @@ -1321,11 +1321,7 @@ from typing import List, TypeVar T = TypeVar('T', bound=int) def f(x: List[T]) -> List[T]: ... -# TODO: improve error message for such cases, see #3283 and #5706 -y: List[str] = f([]) \ - # E: Incompatible types in assignment (expression has type "List[Never]", variable has type "List[str]") \ - # N: "List" is invariant -- see https://mypy.readthedocs.io/en/stable/common_issues.html#variance \ - # N: Consider using "Sequence" instead, which is covariant +y: List[str] = f([]) [builtins fixtures/list.pyi] [case testWideOuterContextNoArgs] @@ -1342,10 +1338,7 @@ from typing import TypeVar, Optional, List T = TypeVar('T', bound=int) def f(x: Optional[T] = None) -> List[T]: ... -y: List[str] = f() \ - # E: Incompatible types in assignment (expression has type "List[Never]", variable has type "List[str]") \ - # N: "List" is invariant -- see https://mypy.readthedocs.io/en/stable/common_issues.html#variance \ - # N: Consider using "Sequence" instead, which is covariant +y: List[str] = f() [builtins fixtures/list.pyi] [case testUseCovariantGenericOuterContext] diff --git a/test-data/unit/check-inference.test b/test-data/unit/check-inference.test index f9a4d58c74af..caa44cb40ad4 100644 --- a/test-data/unit/check-inference.test +++ b/test-data/unit/check-inference.test @@ -3686,3 +3686,27 @@ def g(*args: str) -> None: pass reveal_type(f(g)) # N: Revealed type is "Tuple[Never, Never]" \ # E: Argument 1 to "f" has incompatible type "Callable[[VarArg(str)], None]"; expected "Call[Never]" [builtins fixtures/list.pyi] + +[case testInferenceWorksWithEmptyCollectionsNested] +from typing import List, TypeVar, NoReturn +T = TypeVar('T') +def f(a: List[T], b: List[T]) -> T: pass +x = ["yes"] +reveal_type(f(x, [])) # N: Revealed type is "builtins.str" +reveal_type(f(["yes"], [])) # N: Revealed type is "builtins.str" + +empty: List[NoReturn] +f(x, empty) # E: Cannot infer type argument 1 of "f" +f(["no"], empty) # E: Cannot infer type argument 1 of "f" +[builtins fixtures/list.pyi] + +[case testInferenceWorksWithEmptyCollectionsUnion] +from typing import Any, Dict, NoReturn, NoReturn, Union + +def foo() -> Union[Dict[str, Any], Dict[int, Any]]: + return {} + +empty: Dict[NoReturn, NoReturn] +def bar() -> Union[Dict[str, Any], Dict[int, Any]]: + return empty +[builtins fixtures/dict.pyi] diff --git a/test-data/unit/check-varargs.test b/test-data/unit/check-varargs.test index 41668e991972..2495a883aa71 100644 --- a/test-data/unit/check-varargs.test +++ b/test-data/unit/check-varargs.test @@ -602,31 +602,15 @@ class A: pass class B: pass if int(): - a, aa = G().f(*[a]) \ - # E: Incompatible types in assignment (expression has type "List[A]", variable has type "A") \ - # E: Incompatible types in assignment (expression has type "List[Never]", variable has type "List[A]") \ - # N: "List" is invariant -- see https://mypy.readthedocs.io/en/stable/common_issues.html#variance \ - # N: Consider using "Sequence" instead, which is covariant - + a, aa = G().f(*[a]) # E: Incompatible types in assignment (expression has type "List[A]", variable has type "A") if int(): aa, a = G().f(*[a]) # E: Incompatible types in assignment (expression has type "List[Never]", variable has type "A") if int(): - ab, aa = G().f(*[a]) \ - # E: Incompatible types in assignment (expression has type "List[Never]", variable has type "List[A]") \ - # N: "List" is invariant -- see https://mypy.readthedocs.io/en/stable/common_issues.html#variance \ - # N: Consider using "Sequence" instead, which is covariant \ - # E: Argument 1 to "f" of "G" has incompatible type "*List[A]"; expected "B" - + ab, aa = G().f(*[a]) # E: Argument 1 to "f" of "G" has incompatible type "*List[A]"; expected "B" if int(): - ao, ao = G().f(*[a]) \ - # E: Incompatible types in assignment (expression has type "List[Never]", variable has type "List[object]") \ - # N: "List" is invariant -- see https://mypy.readthedocs.io/en/stable/common_issues.html#variance \ - # N: Consider using "Sequence" instead, which is covariant + ao, ao = G().f(*[a]) if int(): - aa, aa = G().f(*[a]) \ - # E: Incompatible types in assignment (expression has type "List[Never]", variable has type "List[A]") \ - # N: "List" is invariant -- see https://mypy.readthedocs.io/en/stable/common_issues.html#variance \ - # N: Consider using "Sequence" instead, which is covariant + aa, aa = G().f(*[a]) [builtins fixtures/list.pyi] [case testCallerTupleVarArgsAndGenericCalleeVarArg] From 5f6961b38acd7381ff3f8071f1f31db192cba368 Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Wed, 27 Sep 2023 23:34:50 +0100 Subject: [PATCH 070/144] Use upper bounds as fallback solutions for inference (#16184) Fixes https://github.com/python/mypy/issues/13220 This looks a bit ad-hoc, but it is probably the least disruptive solution possible. --- mypy/solve.py | 35 +++++++++++++++++++++++++++++ test-data/unit/check-inference.test | 8 +++++++ 2 files changed, 43 insertions(+) diff --git a/mypy/solve.py b/mypy/solve.py index 52e6549e98a6..4d0ca6b7af24 100644 --- a/mypy/solve.py +++ b/mypy/solve.py @@ -109,6 +109,13 @@ def solve_constraints( else: candidate = AnyType(TypeOfAny.special_form) res.append(candidate) + + if not free_vars: + # Most of the validation for solutions is done in applytype.py, but here we can + # quickly test solutions w.r.t. to upper bounds, and use the latter (if possible), + # if solutions are actually not valid (due to poor inference context). + res = pre_validate_solutions(res, original_vars, constraints) + return res, free_vars @@ -487,3 +494,31 @@ def check_linear(scc: set[TypeVarId], lowers: Bounds, uppers: Bounds) -> bool: def get_vars(target: Type, vars: list[TypeVarId]) -> set[TypeVarId]: """Find type variables for which we are solving in a target type.""" return {tv.id for tv in get_all_type_vars(target)} & set(vars) + + +def pre_validate_solutions( + solutions: list[Type | None], + original_vars: Sequence[TypeVarLikeType], + constraints: list[Constraint], +) -> list[Type | None]: + """Check is each solution satisfies the upper bound of the corresponding type variable. + + If it doesn't satisfy the bound, check if bound itself satisfies all constraints, and + if yes, use it instead as a fallback solution. + """ + new_solutions: list[Type | None] = [] + for t, s in zip(original_vars, solutions): + if s is not None and not is_subtype(s, t.upper_bound): + bound_satisfies_all = True + for c in constraints: + if c.op == SUBTYPE_OF and not is_subtype(t.upper_bound, c.target): + bound_satisfies_all = False + break + if c.op == SUPERTYPE_OF and not is_subtype(c.target, t.upper_bound): + bound_satisfies_all = False + break + if bound_satisfies_all: + new_solutions.append(t.upper_bound) + continue + new_solutions.append(s) + return new_solutions diff --git a/test-data/unit/check-inference.test b/test-data/unit/check-inference.test index caa44cb40ad4..348eb8b60076 100644 --- a/test-data/unit/check-inference.test +++ b/test-data/unit/check-inference.test @@ -3542,6 +3542,14 @@ T = TypeVar("T") def type_or_callable(value: T, tp: Union[Type[T], Callable[[int], T]]) -> T: ... reveal_type(type_or_callable(A("test"), A)) # N: Revealed type is "__main__.A" +[case testUpperBoundAsInferenceFallback] +from typing import Callable, TypeVar, Any, Mapping, Optional +T = TypeVar("T", bound=Mapping[str, Any]) +def raises(opts: Optional[T]) -> T: pass +def assertRaises(cb: Callable[..., object]) -> None: pass +assertRaises(raises) # OK +[builtins fixtures/dict.pyi] + [case testJoinWithAnyFallback] from unknown import X # type: ignore[import] From d25d68065c18a30d975685bd7a13cb7d085a200c Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Thu, 28 Sep 2023 00:27:13 +0100 Subject: [PATCH 071/144] Use type variable bound when it appears as actual during inference (#16178) This should help with re-enabling the use of `ParamSpec` in `functools.wraps` (as it looks like some of the new errors in https://github.com/AlexWaygood/mypy/pull/11 are caused by not handling this). --------- Co-authored-by: hauntsaninja Co-authored-by: Shantanu <12621235+hauntsaninja@users.noreply.github.com> --- mypy/constraints.py | 12 ++++++++++++ test-data/unit/check-inference.test | 30 +++++++++++++++++++++++++++++ 2 files changed, 42 insertions(+) diff --git a/mypy/constraints.py b/mypy/constraints.py index 0524e38f9643..b61d882da3c4 100644 --- a/mypy/constraints.py +++ b/mypy/constraints.py @@ -328,6 +328,18 @@ def _infer_constraints( if isinstance(template, TypeVarType): return [Constraint(template, direction, actual)] + if ( + isinstance(actual, TypeVarType) + and not actual.id.is_meta_var() + and direction == SUPERTYPE_OF + ): + # Unless template is also a type variable (or a union that contains one), using the upper + # bound for inference will usually give better result for actual that is a type variable. + if not isinstance(template, UnionType) or not any( + isinstance(t, TypeVarType) for t in template.items + ): + actual = get_proper_type(actual.upper_bound) + # Now handle the case of either template or actual being a Union. # For a Union to be a subtype of another type, every item of the Union # must be a subtype of that type, so concatenate the constraints. diff --git a/test-data/unit/check-inference.test b/test-data/unit/check-inference.test index 348eb8b60076..0a95ffdd50cf 100644 --- a/test-data/unit/check-inference.test +++ b/test-data/unit/check-inference.test @@ -3695,6 +3695,36 @@ reveal_type(f(g)) # N: Revealed type is "Tuple[Never, Never]" \ # E: Argument 1 to "f" has incompatible type "Callable[[VarArg(str)], None]"; expected "Call[Never]" [builtins fixtures/list.pyi] +[case testInferenceAgainstTypeVarActualBound] +from typing import Callable, TypeVar + +T = TypeVar("T") +S = TypeVar("S") +def test(f: Callable[[T], S]) -> Callable[[T], S]: ... + +F = TypeVar("F", bound=Callable[..., object]) +def dec(f: F) -> F: + reveal_type(test(f)) # N: Revealed type is "def (Any) -> builtins.object" + return f + +[case testInferenceAgainstTypeVarActualUnionBound] +from typing import Protocol, TypeVar, Union + +T_co = TypeVar("T_co", covariant=True) +class SupportsFoo(Protocol[T_co]): + def foo(self) -> T_co: ... + +class A: + def foo(self) -> A: ... +class B: + def foo(self) -> B: ... + +def foo(f: SupportsFoo[T_co]) -> T_co: ... + +ABT = TypeVar("ABT", bound=Union[A, B]) +def simpler(k: ABT): + foo(k) + [case testInferenceWorksWithEmptyCollectionsNested] from typing import List, TypeVar, NoReturn T = TypeVar('T') From 0291ec90d46655d47fcf220be7eab8b5f7c035e7 Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Thu, 28 Sep 2023 22:32:36 +0100 Subject: [PATCH 072/144] Better support for variadic calls and indexing (#16131) This improves support for two features that were supported but only partially: variadic calls, and variadic indexing. Some notes: * I did not add dedicated support for slicing of tuples with homogeneous variadic items (except for cases covered by TypeVarTuple support, i.e. those not involving splitting of variadic item). This is tricky and it is not clear what cases people actually want. I left a TODO for this. * I prohibit multiple variadic items in a call expression. Technically, we can support some situations involving these, but this is tricky, and prohibiting this would be in the "spirit" of the PEP, IMO. * I may have still missed some cases for the calls, since there are so many options. If you have ideas for additional test cases, please let me know. * It was necessary to fix overload ambiguity logic to make some tests pass. This goes beyond TypeVarTuple support, but I think this is a correct change. --- mypy/checkexpr.py | 156 ++++++++++++++++++++---- mypy/constraints.py | 31 +++-- mypy/erasetype.py | 4 +- mypy/message_registry.py | 1 + mypy/types.py | 55 +++++++-- test-data/unit/check-overloading.test | 3 +- test-data/unit/check-tuples.test | 5 +- test-data/unit/check-typevar-tuple.test | 108 ++++++++++++++-- 8 files changed, 306 insertions(+), 57 deletions(-) diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index 7b9b84938930..95ab75e24585 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -1640,6 +1640,27 @@ def check_callable_call( callee.type_object().name, abstract_attributes, context ) + var_arg = callee.var_arg() + if var_arg and isinstance(var_arg.typ, UnpackType): + # It is hard to support multiple variadic unpacks (except for old-style *args: int), + # fail gracefully to avoid crashes later. + seen_unpack = False + for arg, arg_kind in zip(args, arg_kinds): + if arg_kind != ARG_STAR: + continue + arg_type = get_proper_type(self.accept(arg)) + if not isinstance(arg_type, TupleType) or any( + isinstance(t, UnpackType) for t in arg_type.items + ): + if seen_unpack: + self.msg.fail( + "Passing multiple variadic unpacks in a call is not supported", + context, + code=codes.CALL_ARG, + ) + return AnyType(TypeOfAny.from_error), callee + seen_unpack = True + formal_to_actual = map_actuals_to_formals( arg_kinds, arg_names, @@ -2405,7 +2426,7 @@ def check_argument_types( ] actual_kinds = [nodes.ARG_STAR] + [nodes.ARG_POS] * (len(actuals) - 1) - # TODO: can we really assert this? What if formal is just plain Unpack[Ts]? + # If we got here, the callee was previously inferred to have a suffix. assert isinstance(orig_callee_arg_type, UnpackType) assert isinstance(orig_callee_arg_type.type, ProperType) and isinstance( orig_callee_arg_type.type, TupleType @@ -2431,22 +2452,29 @@ def check_argument_types( inner_unpack = unpacked_type.items[inner_unpack_index] assert isinstance(inner_unpack, UnpackType) inner_unpacked_type = get_proper_type(inner_unpack.type) - # We assume heterogenous tuples are desugared earlier - assert isinstance(inner_unpacked_type, Instance) - assert inner_unpacked_type.type.fullname == "builtins.tuple" - callee_arg_types = ( - unpacked_type.items[:inner_unpack_index] - + [inner_unpacked_type.args[0]] - * (len(actuals) - len(unpacked_type.items) + 1) - + unpacked_type.items[inner_unpack_index + 1 :] - ) - callee_arg_kinds = [ARG_POS] * len(actuals) + if isinstance(inner_unpacked_type, TypeVarTupleType): + # This branch mimics the expanded_tuple case above but for + # the case where caller passed a single * unpacked tuple argument. + callee_arg_types = unpacked_type.items + callee_arg_kinds = [ + ARG_POS if i != inner_unpack_index else ARG_STAR + for i in range(len(unpacked_type.items)) + ] + else: + # We assume heterogeneous tuples are desugared earlier. + assert isinstance(inner_unpacked_type, Instance) + assert inner_unpacked_type.type.fullname == "builtins.tuple" + callee_arg_types = ( + unpacked_type.items[:inner_unpack_index] + + [inner_unpacked_type.args[0]] + * (len(actuals) - len(unpacked_type.items) + 1) + + unpacked_type.items[inner_unpack_index + 1 :] + ) + callee_arg_kinds = [ARG_POS] * len(actuals) elif isinstance(unpacked_type, TypeVarTupleType): callee_arg_types = [orig_callee_arg_type] callee_arg_kinds = [ARG_STAR] else: - # TODO: Any and Never can appear in Unpack (as a result of user error), - # fail gracefully here and elsewhere (and/or normalize them away). assert isinstance(unpacked_type, Instance) assert unpacked_type.type.fullname == "builtins.tuple" callee_arg_types = [unpacked_type.args[0]] * len(actuals) @@ -2458,8 +2486,10 @@ def check_argument_types( assert len(actual_types) == len(actuals) == len(actual_kinds) if len(callee_arg_types) != len(actual_types): - # TODO: Improve error message - self.chk.fail("Invalid number of arguments", context) + if len(actual_types) > len(callee_arg_types): + self.chk.msg.too_many_arguments(callee, context) + else: + self.chk.msg.too_few_arguments(callee, context, None) continue assert len(callee_arg_types) == len(actual_types) @@ -2764,11 +2794,17 @@ def infer_overload_return_type( ) is_match = not w.has_new_errors() if is_match: - # Return early if possible; otherwise record info so we can + # Return early if possible; otherwise record info, so we can # check for ambiguity due to 'Any' below. if not args_contain_any: return ret_type, infer_type - matches.append(typ) + p_infer_type = get_proper_type(infer_type) + if isinstance(p_infer_type, CallableType): + # Prefer inferred types if possible, this will avoid false triggers for + # Any-ambiguity caused by arguments with Any passed to generic overloads. + matches.append(p_infer_type) + else: + matches.append(typ) return_types.append(ret_type) inferred_types.append(infer_type) type_maps.append(m) @@ -4109,6 +4145,12 @@ def visit_index_with_type( # Visit the index, just to make sure we have a type for it available self.accept(index) + if isinstance(left_type, TupleType) and any( + isinstance(it, UnpackType) for it in left_type.items + ): + # Normalize variadic tuples for consistency. + left_type = expand_type(left_type, {}) + if isinstance(left_type, UnionType): original_type = original_type or left_type # Don't combine literal types, since we may need them for type narrowing. @@ -4129,12 +4171,15 @@ def visit_index_with_type( if ns is not None: out = [] for n in ns: - if n < 0: - n += len(left_type.items) - if 0 <= n < len(left_type.items): - out.append(left_type.items[n]) + item = self.visit_tuple_index_helper(left_type, n) + if item is not None: + out.append(item) else: self.chk.fail(message_registry.TUPLE_INDEX_OUT_OF_RANGE, e) + if any(isinstance(t, UnpackType) for t in left_type.items): + self.chk.note( + f"Variadic tuple can have length {left_type.length() - 1}", e + ) return AnyType(TypeOfAny.from_error) return make_simplified_union(out) else: @@ -4158,6 +4203,46 @@ def visit_index_with_type( e.method_type = method_type return result + def visit_tuple_index_helper(self, left: TupleType, n: int) -> Type | None: + unpack_index = find_unpack_in_list(left.items) + if unpack_index is None: + if n < 0: + n += len(left.items) + if 0 <= n < len(left.items): + return left.items[n] + return None + unpack = left.items[unpack_index] + assert isinstance(unpack, UnpackType) + unpacked = get_proper_type(unpack.type) + if isinstance(unpacked, TypeVarTupleType): + # Usually we say that TypeVarTuple can't be split, be in case of + # indexing it seems benign to just return the fallback item, similar + # to what we do when indexing a regular TypeVar. + middle = unpacked.tuple_fallback.args[0] + else: + assert isinstance(unpacked, Instance) + assert unpacked.type.fullname == "builtins.tuple" + middle = unpacked.args[0] + if n >= 0: + if n < unpack_index: + return left.items[n] + if n >= len(left.items) - 1: + # For tuple[int, *tuple[str, ...], int] we allow either index 0 or 1, + # since variadic item may have zero items. + return None + return UnionType.make_union( + [middle] + left.items[unpack_index + 1 : n + 2], left.line, left.column + ) + n += len(left.items) + if n <= 0: + # Similar to above, we only allow -1, and -2 for tuple[int, *tuple[str, ...], int] + return None + if n > unpack_index: + return left.items[n] + return UnionType.make_union( + left.items[n - 1 : unpack_index] + [middle], left.line, left.column + ) + def visit_tuple_slice_helper(self, left_type: TupleType, slic: SliceExpr) -> Type: begin: Sequence[int | None] = [None] end: Sequence[int | None] = [None] @@ -4183,7 +4268,11 @@ def visit_tuple_slice_helper(self, left_type: TupleType, slic: SliceExpr) -> Typ items: list[Type] = [] for b, e, s in itertools.product(begin, end, stride): - items.append(left_type.slice(b, e, s)) + item = left_type.slice(b, e, s) + if item is None: + self.chk.fail(message_registry.AMBIGUOUS_SLICE_OF_VARIADIC_TUPLE, slic) + return AnyType(TypeOfAny.from_error) + items.append(item) return make_simplified_union(items) def try_getting_int_literals(self, index: Expression) -> list[int] | None: @@ -4192,7 +4281,7 @@ def try_getting_int_literals(self, index: Expression) -> list[int] | None: Otherwise, returns None. Specifically, this function is guaranteed to return a list with - one or more ints if one one the following is true: + one or more ints if one the following is true: 1. 'expr' is a IntExpr or a UnaryExpr backed by an IntExpr 2. 'typ' is a LiteralType containing an int @@ -4223,11 +4312,30 @@ def try_getting_int_literals(self, index: Expression) -> list[int] | None: def nonliteral_tuple_index_helper(self, left_type: TupleType, index: Expression) -> Type: self.check_method_call_by_name("__getitem__", left_type, [index], [ARG_POS], context=index) # We could return the return type from above, but unions are often better than the join - union = make_simplified_union(left_type.items) + union = self.union_tuple_fallback_item(left_type) if isinstance(index, SliceExpr): return self.chk.named_generic_type("builtins.tuple", [union]) return union + def union_tuple_fallback_item(self, left_type: TupleType) -> Type: + # TODO: this duplicates logic in typeops.tuple_fallback(). + items = [] + for item in left_type.items: + if isinstance(item, UnpackType): + unpacked_type = get_proper_type(item.type) + if isinstance(unpacked_type, TypeVarTupleType): + unpacked_type = get_proper_type(unpacked_type.upper_bound) + if ( + isinstance(unpacked_type, Instance) + and unpacked_type.type.fullname == "builtins.tuple" + ): + items.append(unpacked_type.args[0]) + else: + raise NotImplementedError + else: + items.append(item) + return make_simplified_union(items) + def visit_typeddict_index_expr( self, td_type: TypedDictType, index: Expression, setitem: bool = False ) -> Type: diff --git a/mypy/constraints.py b/mypy/constraints.py index b61d882da3c4..ebd6765e8e82 100644 --- a/mypy/constraints.py +++ b/mypy/constraints.py @@ -137,25 +137,38 @@ def infer_constraints_for_callable( unpack_type = callee.arg_types[i] assert isinstance(unpack_type, UnpackType) - # In this case we are binding all of the actuals to *args + # In this case we are binding all the actuals to *args, # and we want a constraint that the typevar tuple being unpacked # is equal to a type list of all the actuals. actual_types = [] + + unpacked_type = get_proper_type(unpack_type.type) + if isinstance(unpacked_type, TypeVarTupleType): + tuple_instance = unpacked_type.tuple_fallback + elif isinstance(unpacked_type, TupleType): + tuple_instance = unpacked_type.partial_fallback + else: + assert False, "mypy bug: unhandled constraint inference case" + for actual in actuals: actual_arg_type = arg_types[actual] if actual_arg_type is None: continue - actual_types.append( - mapper.expand_actual_type( - actual_arg_type, - arg_kinds[actual], - callee.arg_names[i], - callee.arg_kinds[i], - ) + expanded_actual = mapper.expand_actual_type( + actual_arg_type, arg_kinds[actual], callee.arg_names[i], callee.arg_kinds[i] ) - unpacked_type = get_proper_type(unpack_type.type) + if arg_kinds[actual] != ARG_STAR or isinstance( + get_proper_type(actual_arg_type), TupleType + ): + actual_types.append(expanded_actual) + else: + # If we are expanding an iterable inside * actual, append a homogeneous item instead + actual_types.append( + UnpackType(tuple_instance.copy_modified(args=[expanded_actual])) + ) + if isinstance(unpacked_type, TypeVarTupleType): constraints.append( Constraint( diff --git a/mypy/erasetype.py b/mypy/erasetype.py index 24471f918319..7231ede66c65 100644 --- a/mypy/erasetype.py +++ b/mypy/erasetype.py @@ -82,7 +82,9 @@ def visit_instance(self, t: Instance) -> ProperType: # Valid erasure for *Ts is *tuple[Any, ...], not just Any. if isinstance(tv, TypeVarTupleType): args.append( - tv.tuple_fallback.copy_modified(args=[AnyType(TypeOfAny.special_form)]) + UnpackType( + tv.tuple_fallback.copy_modified(args=[AnyType(TypeOfAny.special_form)]) + ) ) else: args.append(AnyType(TypeOfAny.special_form)) diff --git a/mypy/message_registry.py b/mypy/message_registry.py index 713ec2e3c759..d75a1fab1b66 100644 --- a/mypy/message_registry.py +++ b/mypy/message_registry.py @@ -83,6 +83,7 @@ def with_additional_msg(self, info: str) -> ErrorMessage: INCOMPATIBLE_TYPES_IN_CAPTURE: Final = ErrorMessage("Incompatible types in capture pattern") MUST_HAVE_NONE_RETURN_TYPE: Final = ErrorMessage('The return type of "{}" must be None') TUPLE_INDEX_OUT_OF_RANGE: Final = ErrorMessage("Tuple index out of range") +AMBIGUOUS_SLICE_OF_VARIADIC_TUPLE: Final = ErrorMessage("Ambiguous slice of a variadic tuple") INVALID_SLICE_INDEX: Final = ErrorMessage("Slice index must be an integer, SupportsIndex or None") CANNOT_INFER_LAMBDA_TYPE: Final = ErrorMessage("Cannot infer type of lambda") CANNOT_ACCESS_INIT: Final = ( diff --git a/mypy/types.py b/mypy/types.py index 2b5aec7789f7..9817043db6c2 100644 --- a/mypy/types.py +++ b/mypy/types.py @@ -2416,14 +2416,53 @@ def copy_modified( items = self.items return TupleType(items, fallback, self.line, self.column) - def slice(self, begin: int | None, end: int | None, stride: int | None) -> TupleType: - return TupleType( - self.items[begin:end:stride], - self.partial_fallback, - self.line, - self.column, - self.implicit, - ) + def slice(self, begin: int | None, end: int | None, stride: int | None) -> TupleType | None: + if any(isinstance(t, UnpackType) for t in self.items): + total = len(self.items) + unpack_index = find_unpack_in_list(self.items) + assert unpack_index is not None + if begin is None and end is None: + # We special-case this to support reversing variadic tuples. + # General support for slicing is tricky, so we handle only simple cases. + if stride == -1: + slice_items = self.items[::-1] + elif stride is None or stride == 1: + slice_items = self.items + else: + return None + elif (begin is None or unpack_index >= begin >= 0) and ( + end is not None and unpack_index >= end >= 0 + ): + # Start and end are in the prefix, everything works in this case. + slice_items = self.items[begin:end:stride] + elif (begin is not None and unpack_index - total < begin < 0) and ( + end is None or unpack_index - total < end < 0 + ): + # Start and end are in the suffix, everything works in this case. + slice_items = self.items[begin:end:stride] + elif (begin is None or unpack_index >= begin >= 0) and ( + end is None or unpack_index - total < end < 0 + ): + # Start in the prefix, end in the suffix, we can support only trivial strides. + if stride is None or stride == 1: + slice_items = self.items[begin:end:stride] + else: + return None + elif (begin is not None and unpack_index - total < begin < 0) and ( + end is not None and unpack_index >= end >= 0 + ): + # Start in the suffix, end in the prefix, we can support only trivial strides. + if stride is None or stride == -1: + slice_items = self.items[begin:end:stride] + else: + return None + else: + # TODO: there some additional cases we can support for homogeneous variadic + # items, we can "eat away" finite number of items. + return None + else: + slice_items = self.items[begin:end:stride] + return TupleType(slice_items, self.partial_fallback, self.line, self.column, self.implicit) class TypedDictType(ProperType): diff --git a/test-data/unit/check-overloading.test b/test-data/unit/check-overloading.test index 443a6fb5cb10..b97eeb48115c 100644 --- a/test-data/unit/check-overloading.test +++ b/test-data/unit/check-overloading.test @@ -6501,8 +6501,7 @@ eggs = lambda: 'eggs' reveal_type(func(eggs)) # N: Revealed type is "def (builtins.str) -> builtins.str" spam: Callable[..., str] = lambda x, y: 'baz' -reveal_type(func(spam)) # N: Revealed type is "def (*Any, **Any) -> Any" - +reveal_type(func(spam)) # N: Revealed type is "def (*Any, **Any) -> builtins.str" [builtins fixtures/paramspec.pyi] [case testGenericOverloadOverlapWithType] diff --git a/test-data/unit/check-tuples.test b/test-data/unit/check-tuples.test index ed2c3550a04e..9dfee38bc0c6 100644 --- a/test-data/unit/check-tuples.test +++ b/test-data/unit/check-tuples.test @@ -1678,7 +1678,6 @@ def zip(*i: Iterable[Any]) -> Iterator[Tuple[Any, ...]]: ... def zip(i): ... def g(t: Tuple): - # Ideally, we'd infer that these are iterators of tuples - reveal_type(zip(*t)) # N: Revealed type is "typing.Iterator[Any]" - reveal_type(zip(t)) # N: Revealed type is "typing.Iterator[Any]" + reveal_type(zip(*t)) # N: Revealed type is "typing.Iterator[builtins.tuple[Any, ...]]" + reveal_type(zip(t)) # N: Revealed type is "typing.Iterator[Tuple[Any]]" [builtins fixtures/tuple.pyi] diff --git a/test-data/unit/check-typevar-tuple.test b/test-data/unit/check-typevar-tuple.test index d38d492fe9b2..e8d7966029e3 100644 --- a/test-data/unit/check-typevar-tuple.test +++ b/test-data/unit/check-typevar-tuple.test @@ -366,13 +366,25 @@ from typing_extensions import TypeVarTuple, Unpack Ts = TypeVarTuple("Ts") -# TODO: add less trivial tests with prefix/suffix etc. -# TODO: add tests that call with a type var tuple instead of just args. def args_to_tuple(*args: Unpack[Ts]) -> Tuple[Unpack[Ts]]: reveal_type(args) # N: Revealed type is "Tuple[Unpack[Ts`-1]]" - return args + reveal_type(args_to_tuple(1, *args)) # N: Revealed type is "Tuple[Literal[1]?, Unpack[Ts`-1]]" + reveal_type(args_to_tuple(*args, 'a')) # N: Revealed type is "Tuple[Unpack[Ts`-1], Literal['a']?]" + reveal_type(args_to_tuple(1, *args, 'a')) # N: Revealed type is "Tuple[Literal[1]?, Unpack[Ts`-1], Literal['a']?]" + args_to_tuple(*args, *args) # E: Passing multiple variadic unpacks in a call is not supported + ok = (1, 'a') + reveal_type(args_to_tuple(*ok, *ok)) # N: Revealed type is "Tuple[builtins.int, builtins.str, builtins.int, builtins.str]" + if int(): + return args + else: + return args_to_tuple(*args) reveal_type(args_to_tuple(1, 'a')) # N: Revealed type is "Tuple[Literal[1]?, Literal['a']?]" +vt: Tuple[int, ...] +reveal_type(args_to_tuple(1, *vt)) # N: Revealed type is "Tuple[Literal[1]?, Unpack[builtins.tuple[builtins.int, ...]]]" +reveal_type(args_to_tuple(*vt, 'a')) # N: Revealed type is "Tuple[Unpack[builtins.tuple[builtins.int, ...]], Literal['a']?]" +reveal_type(args_to_tuple(1, *vt, 'a')) # N: Revealed type is "Tuple[Literal[1]?, Unpack[builtins.tuple[builtins.int, ...]], Literal['a']?]" +args_to_tuple(*vt, *vt) # E: Passing multiple variadic unpacks in a call is not supported [builtins fixtures/tuple.pyi] [case testTypeVarTuplePep646TypeVarStarArgs] @@ -381,8 +393,17 @@ from typing_extensions import TypeVarTuple, Unpack Ts = TypeVarTuple("Ts") +def args_to_tuple(*args: Unpack[Ts]) -> Tuple[Unpack[Ts]]: + with_prefix_suffix(*args) # E: Too few arguments for "with_prefix_suffix" \ + # E: Argument 1 to "with_prefix_suffix" has incompatible type "*Tuple[Unpack[Ts]]"; expected "bool" + new_args = (True, "foo", *args, 5) + with_prefix_suffix(*new_args) + return args + def with_prefix_suffix(*args: Unpack[Tuple[bool, str, Unpack[Ts], int]]) -> Tuple[bool, str, Unpack[Ts], int]: reveal_type(args) # N: Revealed type is "Tuple[builtins.bool, builtins.str, Unpack[Ts`-1], builtins.int]" + reveal_type(args_to_tuple(*args)) # N: Revealed type is "Tuple[builtins.bool, builtins.str, Unpack[Ts`-1], builtins.int]" + reveal_type(args_to_tuple(1, *args, 'a')) # N: Revealed type is "Tuple[Literal[1]?, builtins.bool, builtins.str, Unpack[Ts`-1], builtins.int, Literal['a']?]" return args reveal_type(with_prefix_suffix(True, "bar", "foo", 5)) # N: Revealed type is "Tuple[builtins.bool, builtins.str, Literal['foo']?, builtins.int]" @@ -395,8 +416,7 @@ t = (True, "bar", "foo", 5) reveal_type(with_prefix_suffix(*t)) # N: Revealed type is "Tuple[builtins.bool, builtins.str, builtins.str, builtins.int]" reveal_type(with_prefix_suffix(True, *("bar", "foo"), 5)) # N: Revealed type is "Tuple[builtins.bool, builtins.str, Literal['foo']?, builtins.int]" -# TODO: handle list case -#reveal_type(with_prefix_suffix(True, "bar", *["foo1", "foo2"], 5)) +reveal_type(with_prefix_suffix(True, "bar", *["foo1", "foo2"], 5)) # N: Revealed type is "Tuple[builtins.bool, builtins.str, Unpack[builtins.tuple[builtins.str, ...]], builtins.int]" bad_t = (True, "bar") with_prefix_suffix(*bad_t) # E: Too few arguments for "with_prefix_suffix" @@ -434,7 +454,7 @@ reveal_type(C().foo2) # N: Revealed type is "def (*args: Unpack[Tuple[builtins. [case testTypeVarTuplePep646TypeVarStarArgsVariableLengthTuple] from typing import Tuple -from typing_extensions import Unpack +from typing_extensions import Unpack, TypeVarTuple def foo(*args: Unpack[Tuple[int, ...]]) -> None: reveal_type(args) # N: Revealed type is "builtins.tuple[builtins.int, ...]" @@ -442,11 +462,28 @@ def foo(*args: Unpack[Tuple[int, ...]]) -> None: foo(0, 1, 2) foo(0, 1, "bar") # E: Argument 3 to "foo" has incompatible type "str"; expected "int" - def foo2(*args: Unpack[Tuple[str, Unpack[Tuple[int, ...]], bool, bool]]) -> None: reveal_type(args) # N: Revealed type is "Tuple[builtins.str, Unpack[builtins.tuple[builtins.int, ...]], builtins.bool, builtins.bool]" - # TODO: generate an error - # reveal_type(args[1]) + reveal_type(args[1]) # N: Revealed type is "builtins.int" + +def foo3(*args: Unpack[Tuple[str, Unpack[Tuple[int, ...]], str, float]]) -> None: + reveal_type(args[0]) # N: Revealed type is "builtins.str" + reveal_type(args[1]) # N: Revealed type is "Union[builtins.int, builtins.str]" + reveal_type(args[2]) # N: Revealed type is "Union[builtins.int, builtins.str, builtins.float]" + args[3] # E: Tuple index out of range \ + # N: Variadic tuple can have length 3 + reveal_type(args[-1]) # N: Revealed type is "builtins.float" + reveal_type(args[-2]) # N: Revealed type is "builtins.str" + reveal_type(args[-3]) # N: Revealed type is "Union[builtins.str, builtins.int]" + args[-4] # E: Tuple index out of range \ + # N: Variadic tuple can have length 3 + reveal_type(args[::-1]) # N: Revealed type is "Tuple[builtins.float, builtins.str, Unpack[builtins.tuple[builtins.int, ...]], builtins.str]" + args[::2] # E: Ambiguous slice of a variadic tuple + args[:2] # E: Ambiguous slice of a variadic tuple + +Ts = TypeVarTuple("Ts") +def foo4(*args: Unpack[Tuple[str, Unpack[Ts], bool, bool]]) -> None: + reveal_type(args[1]) # N: Revealed type is "builtins.object" foo2("bar", 1, 2, 3, False, True) foo2(0, 1, 2, 3, False, True) # E: Argument 1 to "foo2" has incompatible type "int"; expected "str" @@ -908,7 +945,7 @@ def cons( return wrapped def star(f: Callable[[X], Y]) -> Callable[[Unpack[Tuple[X, ...]]], Tuple[Y, ...]]: - def wrapped(*xs: X): + def wrapped(*xs: X) -> Tuple[Y, ...]: if not xs: return nil() return cons(f, star(f))(*xs) @@ -1516,3 +1553,54 @@ def test(x: int, t: Tuple[T, ...]) -> Tuple[int, Unpack[Tuple[T, ...]]]: ... a: Any = test(42, ()) [builtins fixtures/tuple.pyi] + +[case testTypeVarTupleIndexTypeVar] +from typing import Any, List, Sequence, Tuple, TypeVar +from typing_extensions import TypeVarTuple, Unpack + +Ts = TypeVarTuple("Ts") +def f(data: Sequence[Tuple[Unpack[Ts]]]) -> List[Any]: + return [d[0] for d in data] # E: Tuple index out of range \ + # N: Variadic tuple can have length 0 + +T = TypeVar("T") +def g(data: Sequence[Tuple[T, Unpack[Ts]]]) -> List[T]: + return [d[0] for d in data] # OK +[builtins fixtures/tuple.pyi] + +[case testTypeVarTupleOverloadMatch] +from typing import Any, Generic, overload, Tuple, TypeVar +from typing_extensions import TypeVarTuple, Unpack + +_Ts = TypeVarTuple("_Ts") +_T = TypeVar("_T") +_T2 = TypeVar("_T2") + +class Container(Generic[_T]): ... +class Array(Generic[Unpack[_Ts]]): ... + +@overload +def build(entity: Container[_T], /) -> Array[_T]: ... +@overload +def build(entity: Container[_T], entity2: Container[_T2], /) -> Array[_T, _T2]: ... +@overload +def build(*entities: Container[Any]) -> Array[Unpack[Tuple[Any, ...]]]: ... +def build(*entities: Container[Any]) -> Array[Unpack[Tuple[Any, ...]]]: + ... + +def test(a: Container[Any], b: Container[int], c: Container[str]): + reveal_type(build(a, b)) # N: Revealed type is "__main__.Array[Any, builtins.int]" + reveal_type(build(b, c)) # N: Revealed type is "__main__.Array[builtins.int, builtins.str]" +[builtins fixtures/tuple.pyi] + +[case testTypeVarTupleIndexOldStyleNonNormalizedAndNonLiteral] +from typing import Any, Tuple +from typing_extensions import Unpack + +t: Tuple[Unpack[Tuple[int, ...]]] +reveal_type(t[42]) # N: Revealed type is "builtins.int" +i: int +reveal_type(t[i]) # N: Revealed type is "builtins.int" +t1: Tuple[int, Unpack[Tuple[int, ...]]] +reveal_type(t1[i]) # N: Revealed type is "builtins.int" +[builtins fixtures/tuple.pyi] From fddfc8dfb29ef9adec02f46eda8e92f74bdd7c9c Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Thu, 28 Sep 2023 23:02:38 +0100 Subject: [PATCH 073/144] Fix walrus interaction with empty collections (#16197) This fixes a regression caused by https://github.com/python/mypy/pull/16122 --- mypy/checkexpr.py | 5 ++++- test-data/unit/check-python38.test | 8 ++++++++ 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index 95ab75e24585..c132b35e5a2a 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -4093,7 +4093,10 @@ def visit_assignment_expr(self, e: AssignmentExpr) -> Type: value = self.accept(e.value) self.chk.check_assignment(e.target, e.value) self.chk.check_final(e) - self.chk.store_type(e.target, value) + if not has_uninhabited_component(value): + # TODO: can we get rid of this extra store_type()? + # Usually, check_assignment() already stores the lvalue type correctly. + self.chk.store_type(e.target, value) self.find_partial_type_ref_fast_path(e.target) return value diff --git a/test-data/unit/check-python38.test b/test-data/unit/check-python38.test index d83f29f2186a..1e99c760b67a 100644 --- a/test-data/unit/check-python38.test +++ b/test-data/unit/check-python38.test @@ -826,3 +826,11 @@ main:5: error: Dict entry 0 has incompatible type "str": "str"; expected "str": main:5: error: Unpacked dict entry 1 has incompatible type "Dict[str, str]"; expected "SupportsKeysAndGetItem[str, int]" dct: Dict[str, int] = {"a": "b", **other} ^~~~~ + +[case testWalrusAssignmentEmptyCollection] +from typing import List + +y: List[int] +if (y := []): + reveal_type(y) # N: Revealed type is "builtins.list[builtins.int]" +[builtins fixtures/list.pyi] From 181cbe88f1396f2f52770f59b6bbb13c6521980a Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Fri, 29 Sep 2023 00:42:38 +0100 Subject: [PATCH 074/144] Add more tests for variadic Callables (#16198) Supersedes https://github.com/python/mypy/pull/15254 Note the error message for one of the test is slightly different. Although it _may_ suggest that `Unpack[...]` is a valid type on its own, this error is kind of more consistent with old style `*args: int` annotations. --- test-data/unit/check-typevar-tuple.test | 49 +++++++++++++++++++++++++ 1 file changed, 49 insertions(+) diff --git a/test-data/unit/check-typevar-tuple.test b/test-data/unit/check-typevar-tuple.test index e8d7966029e3..850b7ef8a524 100644 --- a/test-data/unit/check-typevar-tuple.test +++ b/test-data/unit/check-typevar-tuple.test @@ -1604,3 +1604,52 @@ reveal_type(t[i]) # N: Revealed type is "builtins.int" t1: Tuple[int, Unpack[Tuple[int, ...]]] reveal_type(t1[i]) # N: Revealed type is "builtins.int" [builtins fixtures/tuple.pyi] + +[case testTypeVarTupleNotConcreteCallable] +from typing_extensions import Unpack, TypeVarTuple +from typing import Callable, TypeVar, Tuple + +T = TypeVar("T") +Args = TypeVarTuple("Args") +Args2 = TypeVarTuple("Args2") + +def submit(fn: Callable[[Unpack[Args]], T], *args: Unpack[Args]) -> T: + ... + +def submit2(fn: Callable[[int, Unpack[Args]], T], *args: Unpack[Tuple[int, Unpack[Args]]]) -> T: + ... + +def foo(func: Callable[[Unpack[Args]], T], *args: Unpack[Args]) -> T: + return submit(func, *args) + +def foo2(func: Callable[[Unpack[Args2]], T], *args: Unpack[Args2]) -> T: + return submit(func, *args) + +def foo3(func: Callable[[int, Unpack[Args2]], T], *args: Unpack[Args2]) -> T: + return submit2(func, 1, *args) + +def foo_bad(func: Callable[[Unpack[Args2]], T], *args: Unpack[Args2]) -> T: + return submit2(func, 1, *args) # E: Argument 1 to "submit2" has incompatible type "Callable[[VarArg(Unpack[Args2])], T]"; expected "Callable[[int, VarArg(Unpack[Args2])], T]" +[builtins fixtures/tuple.pyi] + +[case testTypeVarTupleParamSpecInteraction] +from typing_extensions import Unpack, TypeVarTuple, ParamSpec +from typing import Callable, TypeVar + +T = TypeVar("T") +Args = TypeVarTuple("Args") +Args2 = TypeVarTuple("Args2") +P = ParamSpec("P") + +def submit(fn: Callable[P, T], *args: P.args, **kwargs: P.kwargs) -> T: + ... + +def foo(func: Callable[[Unpack[Args]], T], *args: Unpack[Args]) -> T: + return submit(func, *args) + +def foo2(func: Callable[[Unpack[Args]], T], *args: Unpack[Args2]) -> T: + return submit(func, *args) # E: Argument 2 to "submit" has incompatible type "*Tuple[Unpack[Args2]]"; expected "Unpack[Args]" + +def foo3(func: Callable[[int, Unpack[Args2]], T], *args: Unpack[Args2]) -> T: + return submit(func, 1, *args) +[builtins fixtures/tuple.pyi] From acccdd8a25b019e6b08180e2f95417a29651435e Mon Sep 17 00:00:00 2001 From: Jelle Zijlstra Date: Sat, 30 Sep 2023 15:13:09 -0700 Subject: [PATCH 075/144] Fix error code on "Maybe you forgot to use await" note (#16203) Fixes #16202 --- mypy/checker.py | 6 +++--- mypy/checkexpr.py | 2 +- mypy/checkmember.py | 6 +++--- mypy/messages.py | 26 ++++++++++++++++++++------ test-data/unit/check-async-await.test | 27 +++++++++++++++++++++++++++ 5 files changed, 54 insertions(+), 13 deletions(-) diff --git a/mypy/checker.py b/mypy/checker.py index 95a65b0a8cd1..bdb636541db0 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -6237,7 +6237,7 @@ def check_subtype( assert call is not None if not is_subtype(subtype, call, options=self.options): self.msg.note_call(supertype, call, context, code=msg.code) - self.check_possible_missing_await(subtype, supertype, context) + self.check_possible_missing_await(subtype, supertype, context, code=msg.code) return False def get_precise_awaitable_type(self, typ: Type, local_errors: ErrorWatcher) -> Type | None: @@ -6271,7 +6271,7 @@ def checking_await_set(self) -> Iterator[None]: self.checking_missing_await = False def check_possible_missing_await( - self, subtype: Type, supertype: Type, context: Context + self, subtype: Type, supertype: Type, context: Context, code: ErrorCode | None ) -> None: """Check if the given type becomes a subtype when awaited.""" if self.checking_missing_await: @@ -6285,7 +6285,7 @@ def check_possible_missing_await( aw_type, supertype, context, msg=message_registry.INCOMPATIBLE_TYPES ): return - self.msg.possible_missing_await(context) + self.msg.possible_missing_await(context, code) def contains_none(self, t: Type) -> bool: t = get_proper_type(t) diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index c132b35e5a2a..df4077100efb 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -2563,7 +2563,7 @@ def check_arg( original_caller_type, callee_type, context, code=code ) if not self.msg.prefer_simple_messages(): - self.chk.check_possible_missing_await(caller_type, callee_type, context) + self.chk.check_possible_missing_await(caller_type, callee_type, context, code) def check_overload_call( self, diff --git a/mypy/checkmember.py b/mypy/checkmember.py index 1557b62917dc..5a4f3875ad04 100644 --- a/mypy/checkmember.py +++ b/mypy/checkmember.py @@ -272,11 +272,11 @@ def report_missing_attribute( mx: MemberContext, override_info: TypeInfo | None = None, ) -> Type: - res_type = mx.msg.has_no_attr(original_type, typ, name, mx.context, mx.module_symbol_table) + error_code = mx.msg.has_no_attr(original_type, typ, name, mx.context, mx.module_symbol_table) if not mx.msg.prefer_simple_messages(): if may_be_awaitable_attribute(name, typ, mx, override_info): - mx.msg.possible_missing_await(mx.context) - return res_type + mx.msg.possible_missing_await(mx.context, error_code) + return AnyType(TypeOfAny.from_error) # The several functions that follow implement analyze_member_access for various diff --git a/mypy/messages.py b/mypy/messages.py index 8bc190b7d66d..47ebd94f3d21 100644 --- a/mypy/messages.py +++ b/mypy/messages.py @@ -355,7 +355,7 @@ def has_no_attr( member: str, context: Context, module_symbol_table: SymbolTable | None = None, - ) -> Type: + ) -> ErrorCode | None: """Report a missing or non-accessible member. original_type is the top-level type on which the error occurred. @@ -370,44 +370,49 @@ def has_no_attr( directly available on original_type If member corresponds to an operator, use the corresponding operator - name in the messages. Return type Any. + name in the messages. Return the error code that was produced, if any. """ original_type = get_proper_type(original_type) typ = get_proper_type(typ) if isinstance(original_type, Instance) and original_type.type.has_readable_member(member): self.fail(f'Member "{member}" is not assignable', context) + return None elif member == "__contains__": self.fail( f"Unsupported right operand type for in ({format_type(original_type, self.options)})", context, code=codes.OPERATOR, ) + return codes.OPERATOR elif member in op_methods.values(): # Access to a binary operator member (e.g. _add). This case does # not handle indexing operations. for op, method in op_methods.items(): if method == member: self.unsupported_left_operand(op, original_type, context) - break + return codes.OPERATOR elif member == "__neg__": self.fail( f"Unsupported operand type for unary - ({format_type(original_type, self.options)})", context, code=codes.OPERATOR, ) + return codes.OPERATOR elif member == "__pos__": self.fail( f"Unsupported operand type for unary + ({format_type(original_type, self.options)})", context, code=codes.OPERATOR, ) + return codes.OPERATOR elif member == "__invert__": self.fail( f"Unsupported operand type for ~ ({format_type(original_type, self.options)})", context, code=codes.OPERATOR, ) + return codes.OPERATOR elif member == "__getitem__": # Indexed get. # TODO: Fix this consistently in format_type @@ -418,12 +423,14 @@ def has_no_attr( ), context, ) + return None else: self.fail( f"Value of type {format_type(original_type, self.options)} is not indexable", context, code=codes.INDEX, ) + return codes.INDEX elif member == "__setitem__": # Indexed set. self.fail( @@ -433,6 +440,7 @@ def has_no_attr( context, code=codes.INDEX, ) + return codes.INDEX elif member == "__call__": if isinstance(original_type, Instance) and ( original_type.type.fullname == "builtins.function" @@ -440,12 +448,14 @@ def has_no_attr( # "'function' not callable" is a confusing error message. # Explain that the problem is that the type of the function is not known. self.fail("Cannot call function of unknown type", context, code=codes.OPERATOR) + return codes.OPERATOR else: self.fail( message_registry.NOT_CALLABLE.format(format_type(original_type, self.options)), context, code=codes.OPERATOR, ) + return codes.OPERATOR else: # The non-special case: a missing ordinary attribute. extra = "" @@ -501,6 +511,7 @@ def has_no_attr( context, code=codes.ATTR_DEFINED, ) + return codes.ATTR_DEFINED elif isinstance(original_type, UnionType): # The checker passes "object" in lieu of "None" for attribute # checks, so we manually convert it back. @@ -518,6 +529,7 @@ def has_no_attr( context, code=codes.UNION_ATTR, ) + return codes.UNION_ATTR elif isinstance(original_type, TypeVarType): bound = get_proper_type(original_type.upper_bound) if isinstance(bound, UnionType): @@ -531,6 +543,7 @@ def has_no_attr( context, code=codes.UNION_ATTR, ) + return codes.UNION_ATTR else: self.fail( '{} has no attribute "{}"{}'.format( @@ -539,7 +552,8 @@ def has_no_attr( context, code=codes.ATTR_DEFINED, ) - return AnyType(TypeOfAny.from_error) + return codes.ATTR_DEFINED + return None def unsupported_operand_types( self, @@ -1107,8 +1121,8 @@ def unpacking_strings_disallowed(self, context: Context) -> None: def type_not_iterable(self, type: Type, context: Context) -> None: self.fail(f"{format_type(type, self.options)} object is not iterable", context) - def possible_missing_await(self, context: Context) -> None: - self.note('Maybe you forgot to use "await"?', context) + def possible_missing_await(self, context: Context, code: ErrorCode | None) -> None: + self.note('Maybe you forgot to use "await"?', context, code=code) def incompatible_operator_assignment(self, op: str, context: Context) -> None: self.fail(f"Result type of {op} incompatible in assignment", context) diff --git a/test-data/unit/check-async-await.test b/test-data/unit/check-async-await.test index 7afdbd687135..f0fa206645dd 100644 --- a/test-data/unit/check-async-await.test +++ b/test-data/unit/check-async-await.test @@ -165,6 +165,33 @@ async def f() -> None: [out] main:4: error: "List[int]" has no attribute "__aiter__" (not async iterable) +[case testAsyncForErrorNote] + +from typing import AsyncIterator, AsyncGenerator +async def g() -> AsyncGenerator[str, None]: + pass + +async def f() -> None: + async for x in g(): + pass +[builtins fixtures/async_await.pyi] +[typing fixtures/typing-async.pyi] +[out] +main:7: error: "Coroutine[Any, Any, AsyncGenerator[str, None]]" has no attribute "__aiter__" (not async iterable) +main:7: note: Maybe you forgot to use "await"? + +[case testAsyncForErrorCanBeIgnored] + +from typing import AsyncIterator, AsyncGenerator +async def g() -> AsyncGenerator[str, None]: + pass + +async def f() -> None: + async for x in g(): # type: ignore[attr-defined] + pass +[builtins fixtures/async_await.pyi] +[typing fixtures/typing-async.pyi] + [case testAsyncForTypeComments] from typing import AsyncIterator, Union From 7a62481c4ed4007a0323118d3e1b8727b2136434 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Sun, 1 Oct 2023 02:05:55 -0700 Subject: [PATCH 076/144] Sync typeshed (#16206) Source commit: https://github.com/python/typeshed/commit/559d31c4a33045310a30843dd7fac88a62cc5915 --- mypy/typeshed/stdlib/_ctypes.pyi | 6 ++ mypy/typeshed/stdlib/_curses.pyi | 23 ++--- mypy/typeshed/stdlib/_posixsubprocess.pyi | 16 +-- mypy/typeshed/stdlib/_typeshed/__init__.pyi | 8 +- mypy/typeshed/stdlib/abc.pyi | 3 +- mypy/typeshed/stdlib/ast.pyi | 8 ++ mypy/typeshed/stdlib/builtins.pyi | 74 ++++++++------ mypy/typeshed/stdlib/codecs.pyi | 8 +- mypy/typeshed/stdlib/collections/__init__.pyi | 9 ++ mypy/typeshed/stdlib/fcntl.pyi | 5 + mypy/typeshed/stdlib/http/server.pyi | 1 + mypy/typeshed/stdlib/logging/handlers.pyi | 2 + mypy/typeshed/stdlib/mmap.pyi | 2 + mypy/typeshed/stdlib/multiprocessing/util.pyi | 12 +-- mypy/typeshed/stdlib/os/__init__.pyi | 98 +++++++++++++++---- mypy/typeshed/stdlib/posix.pyi | 42 ++++++++ mypy/typeshed/stdlib/resource.pyi | 9 +- mypy/typeshed/stdlib/signal.pyi | 8 +- mypy/typeshed/stdlib/ssl.pyi | 2 - mypy/typeshed/stdlib/sys.pyi | 5 +- mypy/typeshed/stdlib/syslog.pyi | 6 +- mypy/typeshed/stdlib/termios.pyi | 10 +- mypy/typeshed/stdlib/tty.pyi | 15 ++- mypy/typeshed/stdlib/types.pyi | 40 ++++---- mypy/typeshed/stdlib/typing.pyi | 4 +- mypy/typeshed/stdlib/typing_extensions.pyi | 7 ++ mypy/typeshed/stdlib/unittest/case.pyi | 4 +- mypy/typeshed/stdlib/xml/sax/__init__.pyi | 19 ++-- mypy/typeshed/stdlib/xml/sax/handler.pyi | 27 ++--- mypy/typeshed/stdlib/xml/sax/saxutils.pyi | 64 ++++++------ mypy/typeshed/stdlib/xml/sax/xmlreader.pyi | 82 +++++++++------- mypy/typeshed/stdlib/xxlimited.pyi | 2 + 32 files changed, 404 insertions(+), 217 deletions(-) diff --git a/mypy/typeshed/stdlib/_ctypes.pyi b/mypy/typeshed/stdlib/_ctypes.pyi index 538c07d54aad..b48b1f7d318c 100644 --- a/mypy/typeshed/stdlib/_ctypes.pyi +++ b/mypy/typeshed/stdlib/_ctypes.pyi @@ -56,6 +56,12 @@ class _CData(metaclass=_CDataMeta): _b_base_: int _b_needsfree_: bool _objects: Mapping[Any, int] | None + # At runtime the following classmethods are available only on classes, not + # on instances. This can't be reflected properly in the type system: + # + # Structure.from_buffer(...) # valid at runtime + # Structure(...).from_buffer(...) # invalid at runtime + # @classmethod def from_buffer(cls, source: WriteableBuffer, offset: int = ...) -> Self: ... @classmethod diff --git a/mypy/typeshed/stdlib/_curses.pyi b/mypy/typeshed/stdlib/_curses.pyi index 61881fc09199..e2319a5fcc1f 100644 --- a/mypy/typeshed/stdlib/_curses.pyi +++ b/mypy/typeshed/stdlib/_curses.pyi @@ -276,12 +276,7 @@ if sys.platform != "win32": def can_change_color() -> bool: ... def cbreak(__flag: bool = True) -> None: ... def color_content(__color_number: int) -> tuple[int, int, int]: ... - # Changed in Python 3.8.8 and 3.9.2 - if sys.version_info >= (3, 8): - def color_pair(pair_number: int) -> int: ... - else: - def color_pair(__color_number: int) -> int: ... - + def color_pair(__pair_number: int) -> int: ... def curs_set(__visibility: int) -> int: ... def def_prog_mode() -> None: ... def def_shell_mode() -> None: ... @@ -366,7 +361,10 @@ if sys.platform != "win32": ) -> bytes: ... def typeahead(__fd: int) -> None: ... def unctrl(__ch: _ChType) -> bytes: ... - def unget_wch(__ch: int | str) -> None: ... + if sys.version_info < (3, 12) or sys.platform != "darwin": + # The support for macos was dropped in 3.12 + def unget_wch(__ch: int | str) -> None: ... + def ungetch(__ch: _ChType) -> None: ... def ungetmouse(__id: int, __x: int, __y: int, __z: int, __bstate: int) -> None: ... def update_lines_cols() -> None: ... @@ -441,10 +439,13 @@ if sys.platform != "win32": def getch(self) -> int: ... @overload def getch(self, y: int, x: int) -> int: ... - @overload - def get_wch(self) -> int | str: ... - @overload - def get_wch(self, y: int, x: int) -> int | str: ... + if sys.version_info < (3, 12) or sys.platform != "darwin": + # The support for macos was dropped in 3.12 + @overload + def get_wch(self) -> int | str: ... + @overload + def get_wch(self, y: int, x: int) -> int | str: ... + @overload def getkey(self) -> str: ... @overload diff --git a/mypy/typeshed/stdlib/_posixsubprocess.pyi b/mypy/typeshed/stdlib/_posixsubprocess.pyi index ca95336bb503..1708063720ba 100644 --- a/mypy/typeshed/stdlib/_posixsubprocess.pyi +++ b/mypy/typeshed/stdlib/_posixsubprocess.pyi @@ -6,15 +6,15 @@ from typing_extensions import SupportsIndex if sys.platform != "win32": def cloexec_pipe() -> tuple[int, int]: ... def fork_exec( - __process_args: Sequence[StrOrBytesPath] | None, + __args: Sequence[StrOrBytesPath] | None, __executable_list: Sequence[bytes], __close_fds: bool, - __fds_to_keep: tuple[int, ...], - __cwd_obj: str, - __env_list: Sequence[bytes] | None, + __pass_fds: tuple[int, ...], + __cwd: str, + __env: Sequence[bytes] | None, __p2cread: int, __p2cwrite: int, - __c2pred: int, + __c2pread: int, __c2pwrite: int, __errread: int, __errwrite: int, @@ -23,9 +23,9 @@ if sys.platform != "win32": __restore_signals: int, __call_setsid: int, __pgid_to_set: int, - __gid_object: SupportsIndex | None, - __groups_list: list[int] | None, - __uid_object: SupportsIndex | None, + __gid: SupportsIndex | None, + __extra_groups: list[int] | None, + __uid: SupportsIndex | None, __child_umask: int, __preexec_fn: Callable[[], None], __allow_vfork: bool, diff --git a/mypy/typeshed/stdlib/_typeshed/__init__.pyi b/mypy/typeshed/stdlib/_typeshed/__init__.pyi index 7ae67292e8cd..8e92138c748a 100644 --- a/mypy/typeshed/stdlib/_typeshed/__init__.pyi +++ b/mypy/typeshed/stdlib/_typeshed/__init__.pyi @@ -7,8 +7,8 @@ from collections.abc import Awaitable, Callable, Iterable, Sequence, Set as Abst from dataclasses import Field from os import PathLike from types import FrameType, TracebackType -from typing import Any, AnyStr, ClassVar, Generic, Protocol, TypeVar, overload -from typing_extensions import Buffer, Final, Literal, LiteralString, TypeAlias, final +from typing import Any, AnyStr, ClassVar, Generic, Protocol, SupportsFloat, SupportsInt, TypeVar, overload +from typing_extensions import Buffer, Final, Literal, LiteralString, SupportsIndex, TypeAlias, final _KT = TypeVar("_KT") _KT_co = TypeVar("_KT_co", covariant=True) @@ -312,3 +312,7 @@ TraceFunction: TypeAlias = Callable[[FrameType, str, Any], TraceFunction | None] # https://github.com/microsoft/pyright/issues/4339 class DataclassInstance(Protocol): __dataclass_fields__: ClassVar[dict[str, Field[Any]]] + +# Anything that can be passed to the int/float constructors +ConvertibleToInt: TypeAlias = str | ReadableBuffer | SupportsInt | SupportsIndex | SupportsTrunc +ConvertibleToFloat: TypeAlias = str | ReadableBuffer | SupportsFloat | SupportsIndex diff --git a/mypy/typeshed/stdlib/abc.pyi b/mypy/typeshed/stdlib/abc.pyi index 43893a298341..7fe1d09f7589 100644 --- a/mypy/typeshed/stdlib/abc.pyi +++ b/mypy/typeshed/stdlib/abc.pyi @@ -40,7 +40,8 @@ class abstractstaticmethod(staticmethod[_P, _R_co]): class abstractproperty(property): __isabstractmethod__: Literal[True] -class ABC(metaclass=ABCMeta): ... +class ABC(metaclass=ABCMeta): + __slots__ = () def get_cache_token() -> object: ... diff --git a/mypy/typeshed/stdlib/ast.pyi b/mypy/typeshed/stdlib/ast.pyi index 377138141340..a61b4e35fd56 100644 --- a/mypy/typeshed/stdlib/ast.pyi +++ b/mypy/typeshed/stdlib/ast.pyi @@ -138,8 +138,10 @@ class NodeVisitor: def visit_withitem(self, node: withitem) -> Any: ... if sys.version_info >= (3, 10): def visit_Match(self, node: Match) -> Any: ... + def visit_match_case(self, node: match_case) -> Any: ... def visit_MatchValue(self, node: MatchValue) -> Any: ... def visit_MatchSequence(self, node: MatchSequence) -> Any: ... + def visit_MatchSingleton(self, node: MatchSingleton) -> Any: ... def visit_MatchStar(self, node: MatchStar) -> Any: ... def visit_MatchMapping(self, node: MatchMapping) -> Any: ... def visit_MatchClass(self, node: MatchClass) -> Any: ... @@ -149,6 +151,12 @@ class NodeVisitor: if sys.version_info >= (3, 11): def visit_TryStar(self, node: TryStar) -> Any: ... + if sys.version_info >= (3, 12): + def visit_TypeVar(self, node: TypeVar) -> Any: ... + def visit_ParamSpec(self, node: ParamSpec) -> Any: ... + def visit_TypeVarTuple(self, node: TypeVarTuple) -> Any: ... + def visit_TypeAlias(self, node: TypeAlias) -> Any: ... + # visit methods for deprecated nodes def visit_ExtSlice(self, node: ExtSlice) -> Any: ... def visit_Index(self, node: Index) -> Any: ... diff --git a/mypy/typeshed/stdlib/builtins.pyi b/mypy/typeshed/stdlib/builtins.pyi index 9e413579e0fb..dedd72933028 100644 --- a/mypy/typeshed/stdlib/builtins.pyi +++ b/mypy/typeshed/stdlib/builtins.pyi @@ -5,6 +5,8 @@ import types from _collections_abc import dict_items, dict_keys, dict_values from _typeshed import ( AnyStr_co, + ConvertibleToFloat, + ConvertibleToInt, FileDescriptorOrPath, OpenBinaryMode, OpenBinaryModeReading, @@ -24,7 +26,6 @@ from _typeshed import ( SupportsRDivMod, SupportsRichComparison, SupportsRichComparisonT, - SupportsTrunc, SupportsWrite, ) from collections.abc import Awaitable, Callable, Iterable, Iterator, MutableSet, Reversible, Set as AbstractSet, Sized @@ -48,7 +49,6 @@ from typing import ( # noqa: Y022 SupportsBytes, SupportsComplex, SupportsFloat, - SupportsInt, TypeVar, overload, type_check_only, @@ -220,7 +220,7 @@ _LiteralInteger = _PositiveInteger | _NegativeInteger | Literal[0] # noqa: Y026 class int: @overload - def __new__(cls, __x: str | ReadableBuffer | SupportsInt | SupportsIndex | SupportsTrunc = ...) -> Self: ... + def __new__(cls, __x: ConvertibleToInt = ...) -> Self: ... @overload def __new__(cls, __x: str | bytes | bytearray, base: SupportsIndex) -> Self: ... if sys.version_info >= (3, 8): @@ -326,7 +326,7 @@ class int: def __index__(self) -> int: ... class float: - def __new__(cls, __x: SupportsFloat | SupportsIndex | str | ReadableBuffer = ...) -> Self: ... + def __new__(cls, __x: ConvertibleToFloat = ...) -> Self: ... def as_integer_ratio(self) -> tuple[int, int]: ... def hex(self) -> str: ... def is_integer(self) -> bool: ... @@ -774,7 +774,7 @@ class memoryview(Sequence[int]): def contiguous(self) -> bool: ... @property def nbytes(self) -> int: ... - def __init__(self, obj: ReadableBuffer) -> None: ... + def __new__(cls, obj: ReadableBuffer) -> Self: ... def __enter__(self) -> Self: ... def __exit__( self, __exc_type: type[BaseException] | None, __exc_val: BaseException | None, __exc_tb: TracebackType | None @@ -853,9 +853,9 @@ class slice: @property def stop(self) -> Any: ... @overload - def __init__(self, __stop: Any) -> None: ... + def __new__(cls, __stop: Any) -> Self: ... @overload - def __init__(self, __start: Any, __stop: Any, __step: Any = ...) -> None: ... + def __new__(cls, __start: Any, __stop: Any, __step: Any = ...) -> Self: ... def __eq__(self, __value: object) -> bool: ... __hash__: ClassVar[None] # type: ignore[assignment] def indices(self, __len: SupportsIndex) -> tuple[int, int, int]: ... @@ -1110,7 +1110,7 @@ class frozenset(AbstractSet[_T_co], Generic[_T_co]): def __class_getitem__(cls, __item: Any) -> GenericAlias: ... class enumerate(Iterator[tuple[int, _T]], Generic[_T]): - def __init__(self, iterable: Iterable[_T], start: int = ...) -> None: ... + def __new__(cls, iterable: Iterable[_T], start: int = ...) -> Self: ... def __iter__(self) -> Self: ... def __next__(self) -> tuple[int, _T]: ... if sys.version_info >= (3, 9): @@ -1125,9 +1125,9 @@ class range(Sequence[int]): @property def step(self) -> int: ... @overload - def __init__(self, __stop: SupportsIndex) -> None: ... + def __new__(cls, __stop: SupportsIndex) -> Self: ... @overload - def __init__(self, __start: SupportsIndex, __stop: SupportsIndex, __step: SupportsIndex = ...) -> None: ... + def __new__(cls, __start: SupportsIndex, __stop: SupportsIndex, __step: SupportsIndex = ...) -> Self: ... def count(self, __value: int) -> int: ... def index(self, __value: int) -> int: ... # type: ignore[override] def __len__(self) -> int: ... @@ -1320,11 +1320,11 @@ def exit(code: sys._ExitCode = None) -> NoReturn: ... class filter(Iterator[_T], Generic[_T]): @overload - def __init__(self, __function: None, __iterable: Iterable[_T | None]) -> None: ... + def __new__(cls, __function: None, __iterable: Iterable[_T | None]) -> Self: ... @overload - def __init__(self, __function: Callable[[_S], TypeGuard[_T]], __iterable: Iterable[_S]) -> None: ... + def __new__(cls, __function: Callable[[_S], TypeGuard[_T]], __iterable: Iterable[_S]) -> Self: ... @overload - def __init__(self, __function: Callable[[_T], Any], __iterable: Iterable[_T]) -> None: ... + def __new__(cls, __function: Callable[[_T], Any], __iterable: Iterable[_T]) -> Self: ... def __iter__(self) -> Self: ... def __next__(self) -> _T: ... @@ -1379,35 +1379,35 @@ def locals() -> dict[str, Any]: ... class map(Iterator[_S], Generic[_S]): @overload - def __init__(self, __func: Callable[[_T1], _S], __iter1: Iterable[_T1]) -> None: ... + def __new__(cls, __func: Callable[[_T1], _S], __iter1: Iterable[_T1]) -> Self: ... @overload - def __init__(self, __func: Callable[[_T1, _T2], _S], __iter1: Iterable[_T1], __iter2: Iterable[_T2]) -> None: ... + def __new__(cls, __func: Callable[[_T1, _T2], _S], __iter1: Iterable[_T1], __iter2: Iterable[_T2]) -> Self: ... @overload - def __init__( - self, __func: Callable[[_T1, _T2, _T3], _S], __iter1: Iterable[_T1], __iter2: Iterable[_T2], __iter3: Iterable[_T3] - ) -> None: ... + def __new__( + cls, __func: Callable[[_T1, _T2, _T3], _S], __iter1: Iterable[_T1], __iter2: Iterable[_T2], __iter3: Iterable[_T3] + ) -> Self: ... @overload - def __init__( - self, + def __new__( + cls, __func: Callable[[_T1, _T2, _T3, _T4], _S], __iter1: Iterable[_T1], __iter2: Iterable[_T2], __iter3: Iterable[_T3], __iter4: Iterable[_T4], - ) -> None: ... + ) -> Self: ... @overload - def __init__( - self, + def __new__( + cls, __func: Callable[[_T1, _T2, _T3, _T4, _T5], _S], __iter1: Iterable[_T1], __iter2: Iterable[_T2], __iter3: Iterable[_T3], __iter4: Iterable[_T4], __iter5: Iterable[_T5], - ) -> None: ... + ) -> Self: ... @overload - def __init__( - self, + def __new__( + cls, __func: Callable[..., _S], __iter1: Iterable[Any], __iter2: Iterable[Any], @@ -1416,7 +1416,7 @@ class map(Iterator[_S], Generic[_S]): __iter5: Iterable[Any], __iter6: Iterable[Any], *iterables: Iterable[Any], - ) -> None: ... + ) -> Self: ... def __iter__(self) -> Self: ... def __next__(self) -> _S: ... @@ -1725,6 +1725,8 @@ def vars(__object: Any = ...) -> dict[str, Any]: ... class zip(Iterator[_T_co], Generic[_T_co]): if sys.version_info >= (3, 10): + @overload + def __new__(cls, *, strict: bool = ...) -> zip[Any]: ... @overload def __new__(cls, __iter1: Iterable[_T1], *, strict: bool = ...) -> zip[tuple[_T1]]: ... @overload @@ -1767,6 +1769,8 @@ class zip(Iterator[_T_co], Generic[_T_co]): strict: bool = ..., ) -> zip[tuple[Any, ...]]: ... else: + @overload + def __new__(cls) -> zip[Any]: ... @overload def __new__(cls, __iter1: Iterable[_T1]) -> zip[tuple[_T1]]: ... @overload @@ -1812,11 +1816,17 @@ def __import__( ) -> types.ModuleType: ... def __build_class__(__func: Callable[[], _Cell | Any], __name: str, *bases: Any, metaclass: Any = ..., **kwds: Any) -> Any: ... -# Actually the type of Ellipsis is , but since it's -# not exposed anywhere under that name, we make it private here. -@final -@type_check_only -class ellipsis: ... +if sys.version_info >= (3, 10): + # In Python 3.10, EllipsisType is exposed publicly in the types module. + @final + class ellipsis: ... + +else: + # Actually the type of Ellipsis is , but since it's + # not exposed anywhere under that name, we make it private here. + @final + @type_check_only + class ellipsis: ... Ellipsis: ellipsis diff --git a/mypy/typeshed/stdlib/codecs.pyi b/mypy/typeshed/stdlib/codecs.pyi index c9b6a4a82da6..f8c92392a599 100644 --- a/mypy/typeshed/stdlib/codecs.pyi +++ b/mypy/typeshed/stdlib/codecs.pyi @@ -78,16 +78,16 @@ class _Stream(_WritableStream, _ReadableStream, Protocol): ... # They were much more common in Python 2 than in Python 3. class _Encoder(Protocol): - def __call__(self, input: str, errors: str = ...) -> tuple[bytes, int]: ... # signature of Codec().encode + def __call__(self, __input: str, __errors: str = ...) -> tuple[bytes, int]: ... # signature of Codec().encode class _Decoder(Protocol): - def __call__(self, input: bytes, errors: str = ...) -> tuple[str, int]: ... # signature of Codec().decode + def __call__(self, __input: bytes, __errors: str = ...) -> tuple[str, int]: ... # signature of Codec().decode class _StreamReader(Protocol): - def __call__(self, stream: _ReadableStream, errors: str = ...) -> StreamReader: ... + def __call__(self, __stream: _ReadableStream, __errors: str = ...) -> StreamReader: ... class _StreamWriter(Protocol): - def __call__(self, stream: _WritableStream, errors: str = ...) -> StreamWriter: ... + def __call__(self, __stream: _WritableStream, __errors: str = ...) -> StreamWriter: ... class _IncrementalEncoder(Protocol): def __call__(self, errors: str = ...) -> IncrementalEncoder: ... diff --git a/mypy/typeshed/stdlib/collections/__init__.pyi b/mypy/typeshed/stdlib/collections/__init__.pyi index 3b8d92f78612..1d560117a54f 100644 --- a/mypy/typeshed/stdlib/collections/__init__.pyi +++ b/mypy/typeshed/stdlib/collections/__init__.pyi @@ -373,6 +373,15 @@ class OrderedDict(dict[_KT, _VT], Reversible[_KT], Generic[_KT, _VT]): @overload def setdefault(self, key: _KT, default: _VT) -> _VT: ... def __eq__(self, __value: object) -> bool: ... + if sys.version_info >= (3, 9): + @overload + def __or__(self, __value: dict[_KT, _VT]) -> Self: ... + @overload + def __or__(self, __value: dict[_T1, _T2]) -> OrderedDict[_KT | _T1, _VT | _T2]: ... + @overload + def __ror__(self, __value: dict[_KT, _VT]) -> Self: ... + @overload + def __ror__(self, __value: dict[_T1, _T2]) -> OrderedDict[_KT | _T1, _VT | _T2]: ... # type: ignore[misc] class defaultdict(dict[_KT, _VT], Generic[_KT, _VT]): default_factory: Callable[[], _VT] | None diff --git a/mypy/typeshed/stdlib/fcntl.pyi b/mypy/typeshed/stdlib/fcntl.pyi index 6aec7515f330..56fd5679a1c8 100644 --- a/mypy/typeshed/stdlib/fcntl.pyi +++ b/mypy/typeshed/stdlib/fcntl.pyi @@ -101,6 +101,11 @@ if sys.platform != "win32": I_STR: int I_SWROPT: int I_UNLINK: int + + if sys.version_info >= (3, 12) and sys.platform == "linux": + FICLONE: int + FICLONERANGE: int + @overload def fcntl(__fd: FileDescriptorLike, __cmd: int, __arg: int = 0) -> int: ... @overload diff --git a/mypy/typeshed/stdlib/http/server.pyi b/mypy/typeshed/stdlib/http/server.pyi index c9700f70e791..22c33bc3787a 100644 --- a/mypy/typeshed/stdlib/http/server.pyi +++ b/mypy/typeshed/stdlib/http/server.pyi @@ -54,6 +54,7 @@ class SimpleHTTPRequestHandler(BaseHTTPRequestHandler): extensions_map: dict[str, str] if sys.version_info >= (3, 12): index_pages: ClassVar[tuple[str, ...]] + directory: str def __init__( self, request: socketserver._RequestType, diff --git a/mypy/typeshed/stdlib/logging/handlers.pyi b/mypy/typeshed/stdlib/logging/handlers.pyi index ad5bf392b50f..2280dbad4c5d 100644 --- a/mypy/typeshed/stdlib/logging/handlers.pyi +++ b/mypy/typeshed/stdlib/logging/handlers.pyi @@ -7,6 +7,7 @@ from collections.abc import Callable from logging import FileHandler, Handler, LogRecord from re import Pattern from socket import SocketKind, socket +from threading import Thread from typing import Any, ClassVar, Protocol, TypeVar _T = TypeVar("_T") @@ -264,6 +265,7 @@ class QueueListener: handlers: tuple[Handler, ...] # undocumented respect_handler_level: bool # undocumented queue: _QueueLike[Any] # undocumented + _thread: Thread | None # undocumented def __init__(self, queue: _QueueLike[Any], *handlers: Handler, respect_handler_level: bool = False) -> None: ... def dequeue(self, block: bool) -> LogRecord: ... def prepare(self, record: LogRecord) -> Any: ... diff --git a/mypy/typeshed/stdlib/mmap.pyi b/mypy/typeshed/stdlib/mmap.pyi index 38e1924392c4..09319980692f 100644 --- a/mypy/typeshed/stdlib/mmap.pyi +++ b/mypy/typeshed/stdlib/mmap.pyi @@ -16,6 +16,8 @@ if sys.platform == "linux": MAP_EXECUTABLE: int if sys.version_info >= (3, 10): MAP_POPULATE: int +if sys.version_info >= (3, 11) and sys.platform != "win32" and sys.platform != "darwin": + MAP_STACK: int if sys.platform != "win32": MAP_ANON: int diff --git a/mypy/typeshed/stdlib/multiprocessing/util.pyi b/mypy/typeshed/stdlib/multiprocessing/util.pyi index 7ca650511e51..aeb46f85a327 100644 --- a/mypy/typeshed/stdlib/multiprocessing/util.pyi +++ b/mypy/typeshed/stdlib/multiprocessing/util.pyi @@ -1,9 +1,8 @@ import threading -from _typeshed import Incomplete, ReadableBuffer, SupportsTrunc, Unused +from _typeshed import ConvertibleToInt, Incomplete, Unused from collections.abc import Callable, Iterable, Mapping, MutableMapping, Sequence from logging import Logger, _Level as _LoggingLevel -from typing import Any, SupportsInt -from typing_extensions import SupportsIndex +from typing import Any __all__ = [ "sub_debug", @@ -77,9 +76,4 @@ class ForkAwareLocal(threading.local): ... MAXFD: int def close_all_fds_except(fds: Iterable[int]) -> None: ... -def spawnv_passfds( - path: bytes, - # args is anything that can be passed to the int constructor - args: Sequence[str | ReadableBuffer | SupportsInt | SupportsIndex | SupportsTrunc], - passfds: Sequence[int], -) -> int: ... +def spawnv_passfds(path: bytes, args: Sequence[ConvertibleToInt], passfds: Sequence[int]) -> int: ... diff --git a/mypy/typeshed/stdlib/os/__init__.pyi b/mypy/typeshed/stdlib/os/__init__.pyi index 961858ce3c19..fa4c55011eba 100644 --- a/mypy/typeshed/stdlib/os/__init__.pyi +++ b/mypy/typeshed/stdlib/os/__init__.pyi @@ -2,6 +2,7 @@ import sys from _typeshed import ( AnyStr_co, BytesPath, + FileDescriptor, FileDescriptorLike, FileDescriptorOrPath, GenericPath, @@ -121,6 +122,12 @@ if sys.platform == "linux": GRND_NONBLOCK: int GRND_RANDOM: int +if sys.platform == "darwin" and sys.version_info >= (3, 12): + PRIO_DARWIN_BG: int + PRIO_DARWIN_NONUI: int + PRIO_DARWIN_PROCESS: int + PRIO_DARWIN_THREAD: int + SEEK_SET: int SEEK_CUR: int SEEK_END: int @@ -252,12 +259,14 @@ environ: _Environ[str] if sys.platform != "win32": environb: _Environ[bytes] +if sys.version_info >= (3, 11) or sys.platform != "win32": + EX_OK: int + if sys.platform != "win32": confstr_names: dict[str, int] pathconf_names: dict[str, int] sysconf_names: dict[str, int] - EX_OK: int EX_USAGE: int EX_DATAERR: int EX_NOINPUT: int @@ -339,6 +348,11 @@ class stat_result(structseq[float], tuple[int, int, int, int, int, int, int, flo if sys.version_info >= (3, 8): @property def st_reparse_tag(self) -> int: ... + if sys.version_info >= (3, 12): + @property + def st_birthtime(self) -> float: ... # time of file creation in seconds + @property + def st_birthtime_ns(self) -> int: ... # time of file creation in nanoseconds else: @property def st_blocks(self) -> int: ... # number of blocks allocated for file @@ -347,13 +361,13 @@ class stat_result(structseq[float], tuple[int, int, int, int, int, int, int, flo @property def st_rdev(self) -> int: ... # type of device if an inode device if sys.platform != "linux": - # These properties are available on MacOS, but not on Windows or Ubuntu. + # These properties are available on MacOS, but not Ubuntu. # On other Unix systems (such as FreeBSD), the following attributes may be # available (but may be only filled out if root tries to use them): @property def st_gen(self) -> int: ... # file generation number @property - def st_birthtime(self) -> int: ... # time of file creation + def st_birthtime(self) -> float: ... # time of file creation in seconds if sys.platform == "darwin": @property def st_flags(self) -> int: ... # user defined flags for file @@ -484,8 +498,8 @@ if sys.platform != "win32": def setpgid(__pid: int, __pgrp: int) -> None: ... def setregid(__rgid: int, __egid: int) -> None: ... if sys.platform != "darwin": - def setresgid(rgid: int, egid: int, sgid: int) -> None: ... - def setresuid(ruid: int, euid: int, suid: int) -> None: ... + def setresgid(__rgid: int, __egid: int, __sgid: int) -> None: ... + def setresuid(__ruid: int, __euid: int, __suid: int) -> None: ... def setreuid(__ruid: int, __euid: int) -> None: ... def getsid(__pid: int) -> int: ... @@ -614,13 +628,15 @@ def open(path: StrOrBytesPath, flags: int, mode: int = 0o777, *, dir_fd: int | N def pipe() -> tuple[int, int]: ... def read(__fd: int, __length: int) -> bytes: ... +if sys.version_info >= (3, 12) or sys.platform != "win32": + def get_blocking(__fd: int) -> bool: ... + def set_blocking(__fd: int, __blocking: bool) -> None: ... + if sys.platform != "win32": def fchmod(fd: int, mode: int) -> None: ... def fchown(fd: int, uid: int, gid: int) -> None: ... def fpathconf(__fd: int, __name: str | int) -> int: ... def fstatvfs(__fd: int) -> statvfs_result: ... - def get_blocking(__fd: int) -> bool: ... - def set_blocking(__fd: int, __blocking: bool) -> None: ... def lockf(__fd: int, __command: int, __length: int) -> None: ... def openpty() -> tuple[int, int]: ... # some flavors of Unix if sys.platform != "darwin": @@ -641,18 +657,20 @@ if sys.platform != "win32": RWF_SYNC: int RWF_HIPRI: int RWF_NOWAIT: int - @overload - def sendfile(out_fd: int, in_fd: int, offset: int | None, count: int) -> int: ... - @overload - def sendfile( - out_fd: int, - in_fd: int, - offset: int, - count: int, - headers: Sequence[ReadableBuffer] = ..., - trailers: Sequence[ReadableBuffer] = ..., - flags: int = 0, - ) -> int: ... # FreeBSD and Mac OS X only + + if sys.platform == "linux": + def sendfile(out_fd: FileDescriptor, in_fd: FileDescriptor, offset: int | None, count: int) -> int: ... + else: + def sendfile( + out_fd: FileDescriptor, + in_fd: FileDescriptor, + offset: int, + count: int, + headers: Sequence[ReadableBuffer] = ..., + trailers: Sequence[ReadableBuffer] = ..., + flags: int = 0, + ) -> int: ... # FreeBSD and Mac OS X only + def readv(__fd: int, __buffers: SupportsLenAndGetItem[WriteableBuffer]) -> int: ... def writev(__fd: int, __buffers: SupportsLenAndGetItem[ReadableBuffer]) -> int: ... @@ -1042,3 +1060,45 @@ if sys.version_info >= (3, 9): if sys.platform == "linux": def pidfd_open(pid: int, flags: int = ...) -> int: ... + +if sys.version_info >= (3, 12) and sys.platform == "win32": + def listdrives() -> list[str]: ... + def listmounts(volume: str) -> list[str]: ... + def listvolumes() -> list[str]: ... + +if sys.version_info >= (3, 10) and sys.platform == "linux": + EFD_CLOEXEC: int + EFD_NONBLOCK: int + EFD_SEMAPHORE: int + SPLICE_F_MORE: int + SPLICE_F_MOVE: int + SPLICE_F_NONBLOCK: int + def eventfd(initval: int, flags: int = 524288) -> FileDescriptor: ... + def eventfd_read(fd: FileDescriptor) -> int: ... + def eventfd_write(fd: FileDescriptor, value: int) -> None: ... + def splice( + src: FileDescriptor, + dst: FileDescriptor, + count: int, + offset_src: int | None = ..., + offset_dst: int | None = ..., + flags: int = 0, + ) -> int: ... + +if sys.version_info >= (3, 12) and sys.platform == "linux": + CLONE_FILES: int + CLONE_FS: int + CLONE_NEWCGROUP: int + CLONE_NEWIPC: int + CLONE_NEWNET: int + CLONE_NEWNS: int + CLONE_NEWPID: int + CLONE_NEWTIME: int + CLONE_NEWUSER: int + CLONE_NEWUTS: int + CLONE_SIGHAND: int + CLONE_SYSVSEM: int + CLONE_THREAD: int + CLONE_VM: int + def unshare(flags: int) -> None: ... + def setns(fd: FileDescriptorLike, nstype: int = 0) -> None: ... diff --git a/mypy/typeshed/stdlib/posix.pyi b/mypy/typeshed/stdlib/posix.pyi index ffd96757586b..ab6bf2e63be5 100644 --- a/mypy/typeshed/stdlib/posix.pyi +++ b/mypy/typeshed/stdlib/posix.pyi @@ -236,6 +236,20 @@ if sys.platform != "win32": removexattr as removexattr, setxattr as setxattr, ) + + if sys.version_info >= (3, 10): + from os import ( + EFD_CLOEXEC as EFD_CLOEXEC, + EFD_NONBLOCK as EFD_NONBLOCK, + EFD_SEMAPHORE as EFD_SEMAPHORE, + SPLICE_F_MORE as SPLICE_F_MORE, + SPLICE_F_MOVE as SPLICE_F_MOVE, + SPLICE_F_NONBLOCK as SPLICE_F_NONBLOCK, + eventfd as eventfd, + eventfd_read as eventfd_read, + eventfd_write as eventfd_write, + splice as splice, + ) else: from os import chflags as chflags, lchflags as lchflags, lchmod as lchmod @@ -314,6 +328,34 @@ if sys.platform != "win32": if sys.platform != "darwin": from os import RWF_DSYNC as RWF_DSYNC, RWF_HIPRI as RWF_HIPRI, RWF_NOWAIT as RWF_NOWAIT, RWF_SYNC as RWF_SYNC + if sys.version_info >= (3, 12) and sys.platform == "linux": + from os import ( + CLONE_FILES as CLONE_FILES, + CLONE_FS as CLONE_FS, + CLONE_NEWCGROUP as CLONE_NEWCGROUP, + CLONE_NEWIPC as CLONE_NEWIPC, + CLONE_NEWNET as CLONE_NEWNET, + CLONE_NEWNS as CLONE_NEWNS, + CLONE_NEWPID as CLONE_NEWPID, + CLONE_NEWTIME as CLONE_NEWTIME, + CLONE_NEWUSER as CLONE_NEWUSER, + CLONE_NEWUTS as CLONE_NEWUTS, + CLONE_SIGHAND as CLONE_SIGHAND, + CLONE_SYSVSEM as CLONE_SYSVSEM, + CLONE_THREAD as CLONE_THREAD, + CLONE_VM as CLONE_VM, + setns as setns, + unshare as unshare, + ) + + if sys.version_info >= (3, 12) and sys.platform == "darwin": + from os import ( + PRIO_DARWIN_BG as PRIO_DARWIN_BG, + PRIO_DARWIN_NONUI as PRIO_DARWIN_NONUI, + PRIO_DARWIN_PROCESS as PRIO_DARWIN_PROCESS, + PRIO_DARWIN_THREAD as PRIO_DARWIN_THREAD, + ) + # Not same as os.environ or os.environb # Because of this variable, we can't do "from posix import *" in os/__init__.pyi environ: dict[bytes, bytes] diff --git a/mypy/typeshed/stdlib/resource.pyi b/mypy/typeshed/stdlib/resource.pyi index f2e979ff89af..57cefb4681ac 100644 --- a/mypy/typeshed/stdlib/resource.pyi +++ b/mypy/typeshed/stdlib/resource.pyi @@ -1,6 +1,5 @@ import sys from _typeshed import structseq -from typing import overload from typing_extensions import Final, final if sys.platform != "win32": @@ -86,8 +85,8 @@ if sys.platform != "win32": def getrusage(__who: int) -> struct_rusage: ... def setrlimit(__resource: int, __limits: tuple[int, int]) -> None: ... if sys.platform == "linux": - @overload - def prlimit(pid: int, resource: int, limits: tuple[int, int]) -> tuple[int, int]: ... - @overload - def prlimit(pid: int, resource: int) -> tuple[int, int]: ... + if sys.version_info >= (3, 12): + def prlimit(__pid: int, __resource: int, __limits: tuple[int, int] | None = None) -> tuple[int, int]: ... + else: + def prlimit(__pid: int, __resource: int, __limits: tuple[int, int] = ...) -> tuple[int, int]: ... error = OSError diff --git a/mypy/typeshed/stdlib/signal.pyi b/mypy/typeshed/stdlib/signal.pyi index 4c961a0c9aab..72c78f1b69f5 100644 --- a/mypy/typeshed/stdlib/signal.pyi +++ b/mypy/typeshed/stdlib/signal.pyi @@ -170,8 +170,12 @@ else: @property def si_band(self) -> int: ... - def sigtimedwait(sigset: Iterable[int], timeout: float) -> struct_siginfo | None: ... - def sigwaitinfo(sigset: Iterable[int]) -> struct_siginfo: ... + if sys.version_info >= (3, 10): + def sigtimedwait(__sigset: Iterable[int], __timeout: float) -> struct_siginfo | None: ... + def sigwaitinfo(__sigset: Iterable[int]) -> struct_siginfo: ... + else: + def sigtimedwait(sigset: Iterable[int], timeout: float) -> struct_siginfo | None: ... + def sigwaitinfo(sigset: Iterable[int]) -> struct_siginfo: ... if sys.version_info >= (3, 8): def strsignal(__signalnum: _SIGNUM) -> str | None: ... diff --git a/mypy/typeshed/stdlib/ssl.pyi b/mypy/typeshed/stdlib/ssl.pyi index 73762cd75e79..faf667afb475 100644 --- a/mypy/typeshed/stdlib/ssl.pyi +++ b/mypy/typeshed/stdlib/ssl.pyi @@ -203,7 +203,6 @@ class Options(enum.IntFlag): OP_ENABLE_MIDDLEBOX_COMPAT: int if sys.version_info >= (3, 12): OP_LEGACY_SERVER_CONNECT: int - if sys.version_info >= (3, 12) and sys.platform != "linux": OP_ENABLE_KTLS: int if sys.version_info >= (3, 11): OP_IGNORE_UNEXPECTED_EOF: int @@ -227,7 +226,6 @@ if sys.version_info >= (3, 8): OP_ENABLE_MIDDLEBOX_COMPAT: Options if sys.version_info >= (3, 12): OP_LEGACY_SERVER_CONNECT: Options -if sys.version_info >= (3, 12) and sys.platform != "linux": OP_ENABLE_KTLS: Options if sys.version_info >= (3, 11): OP_IGNORE_UNEXPECTED_EOF: Options diff --git a/mypy/typeshed/stdlib/sys.pyi b/mypy/typeshed/stdlib/sys.pyi index ca049124053a..a5e819d53326 100644 --- a/mypy/typeshed/stdlib/sys.pyi +++ b/mypy/typeshed/stdlib/sys.pyi @@ -225,9 +225,10 @@ class _thread_info(_UninstantiableStructseq, tuple[_ThreadInfoName, _ThreadInfoL def version(self) -> str | None: ... thread_info: _thread_info +_ReleaseLevel: TypeAlias = Literal["alpha", "beta", "candidate", "final"] @final -class _version_info(_UninstantiableStructseq, tuple[int, int, int, str, int]): +class _version_info(_UninstantiableStructseq, tuple[int, int, int, _ReleaseLevel, int]): @property def major(self) -> int: ... @property @@ -235,7 +236,7 @@ class _version_info(_UninstantiableStructseq, tuple[int, int, int, str, int]): @property def micro(self) -> int: ... @property - def releaselevel(self) -> str: ... + def releaselevel(self) -> _ReleaseLevel: ... @property def serial(self) -> int: ... diff --git a/mypy/typeshed/stdlib/syslog.pyi b/mypy/typeshed/stdlib/syslog.pyi index cfa8df887c1b..0b769301a482 100644 --- a/mypy/typeshed/stdlib/syslog.pyi +++ b/mypy/typeshed/stdlib/syslog.pyi @@ -36,11 +36,11 @@ if sys.platform != "win32": LOG_USER: Literal[8] LOG_UUCP: Literal[64] LOG_WARNING: Literal[4] - def LOG_MASK(a: int) -> int: ... - def LOG_UPTO(a: int) -> int: ... + def LOG_MASK(__pri: int) -> int: ... + def LOG_UPTO(__pri: int) -> int: ... def closelog() -> None: ... def openlog(ident: str = ..., logoption: int = ..., facility: int = ...) -> None: ... - def setlogmask(x: int) -> int: ... + def setlogmask(__maskpri: int) -> int: ... @overload def syslog(priority: int, message: str) -> None: ... @overload diff --git a/mypy/typeshed/stdlib/termios.pyi b/mypy/typeshed/stdlib/termios.pyi index bf8d7bee2473..776396cce407 100644 --- a/mypy/typeshed/stdlib/termios.pyi +++ b/mypy/typeshed/stdlib/termios.pyi @@ -3,10 +3,12 @@ from _typeshed import FileDescriptorLike from typing import Any from typing_extensions import TypeAlias -if sys.platform != "win32": - # Must be a list of length 7, containing 6 ints and a list of NCCS 1-character bytes or ints. - _Attr: TypeAlias = list[int | list[bytes | int]] +# Must be a list of length 7, containing 6 ints and a list of NCCS 1-character bytes or ints. +_Attr: TypeAlias = list[int | list[bytes | int]] | list[int | list[bytes]] | list[int | list[int]] +# Same as _Attr for return types; we use Any to avoid a union. +_AttrReturn: TypeAlias = list[Any] +if sys.platform != "win32": B0: int B1000000: int B110: int @@ -252,7 +254,7 @@ if sys.platform != "win32": XCASE: int XTABS: int - def tcgetattr(__fd: FileDescriptorLike) -> list[Any]: ... # Returns _Attr; we use Any to avoid a union in the return type + def tcgetattr(__fd: FileDescriptorLike) -> _AttrReturn: ... def tcsetattr(__fd: FileDescriptorLike, __when: int, __attributes: _Attr) -> None: ... def tcsendbreak(__fd: FileDescriptorLike, __duration: int) -> None: ... def tcdrain(__fd: FileDescriptorLike) -> None: ... diff --git a/mypy/typeshed/stdlib/tty.pyi b/mypy/typeshed/stdlib/tty.pyi index 43f2e1cf9087..add0d57a8d4b 100644 --- a/mypy/typeshed/stdlib/tty.pyi +++ b/mypy/typeshed/stdlib/tty.pyi @@ -1,9 +1,16 @@ import sys +import termios from typing import IO from typing_extensions import TypeAlias if sys.platform != "win32": __all__ = ["setraw", "setcbreak"] + if sys.version_info >= (3, 12): + __all__ += ["cfmakeraw", "cfmakecbreak"] + + _ModeSetterReturn: TypeAlias = termios._AttrReturn + else: + _ModeSetterReturn: TypeAlias = None _FD: TypeAlias = int | IO[str] @@ -15,5 +22,9 @@ if sys.platform != "win32": ISPEED: int OSPEED: int CC: int - def setraw(fd: _FD, when: int = 2) -> None: ... - def setcbreak(fd: _FD, when: int = 2) -> None: ... + def setraw(fd: _FD, when: int = 2) -> _ModeSetterReturn: ... + def setcbreak(fd: _FD, when: int = 2) -> _ModeSetterReturn: ... + + if sys.version_info >= (3, 12): + def cfmakeraw(mode: termios._Attr) -> None: ... + def cfmakecbreak(mode: termios._Attr) -> None: ... diff --git a/mypy/typeshed/stdlib/types.pyi b/mypy/typeshed/stdlib/types.pyi index 2f4bd1a88047..8559063834c9 100644 --- a/mypy/typeshed/stdlib/types.pyi +++ b/mypy/typeshed/stdlib/types.pyi @@ -69,7 +69,7 @@ _VT_co = TypeVar("_VT_co", covariant=True) @final class _Cell: if sys.version_info >= (3, 8): - def __init__(self, __contents: object = ...) -> None: ... + def __new__(cls, __contents: object = ...) -> Self: ... def __eq__(self, __value: object) -> bool: ... __hash__: ClassVar[None] # type: ignore[assignment] @@ -96,14 +96,14 @@ class FunctionType: __type_params__: tuple[TypeVar | ParamSpec | TypeVarTuple, ...] __module__: str - def __init__( - self, + def __new__( + cls, code: CodeType, globals: dict[str, Any], name: str | None = ..., argdefs: tuple[object, ...] | None = ..., closure: tuple[_Cell, ...] | None = ..., - ) -> None: ... + ) -> Self: ... def __call__(self, *args: Any, **kwargs: Any) -> Any: ... @overload def __get__(self, __instance: None, __owner: type) -> FunctionType: ... @@ -162,8 +162,8 @@ class CodeType: def co_positions(self) -> Iterable[tuple[int | None, int | None, int | None, int | None]]: ... if sys.version_info >= (3, 11): - def __init__( - self, + def __new__( + cls, __argcount: int, __posonlyargcount: int, __kwonlyargcount: int, @@ -182,10 +182,10 @@ class CodeType: __exceptiontable: bytes, __freevars: tuple[str, ...] = ..., __cellvars: tuple[str, ...] = ..., - ) -> None: ... + ) -> Self: ... elif sys.version_info >= (3, 10): - def __init__( - self, + def __new__( + cls, __argcount: int, __posonlyargcount: int, __kwonlyargcount: int, @@ -202,10 +202,10 @@ class CodeType: __linetable: bytes, __freevars: tuple[str, ...] = ..., __cellvars: tuple[str, ...] = ..., - ) -> None: ... + ) -> Self: ... elif sys.version_info >= (3, 8): - def __init__( - self, + def __new__( + cls, __argcount: int, __posonlyargcount: int, __kwonlyargcount: int, @@ -222,10 +222,10 @@ class CodeType: __lnotab: bytes, __freevars: tuple[str, ...] = ..., __cellvars: tuple[str, ...] = ..., - ) -> None: ... + ) -> Self: ... else: - def __init__( - self, + def __new__( + cls, __argcount: int, __kwonlyargcount: int, __nlocals: int, @@ -241,7 +241,7 @@ class CodeType: __lnotab: bytes, __freevars: tuple[str, ...] = ..., __cellvars: tuple[str, ...] = ..., - ) -> None: ... + ) -> Self: ... if sys.version_info >= (3, 11): def replace( self, @@ -311,7 +311,7 @@ class CodeType: @final class MappingProxyType(Mapping[_KT, _VT_co], Generic[_KT, _VT_co]): __hash__: ClassVar[None] # type: ignore[assignment] - def __init__(self, mapping: SupportsKeysAndGetItem[_KT, _VT_co]) -> None: ... + def __new__(cls, mapping: SupportsKeysAndGetItem[_KT, _VT_co]) -> Self: ... def __getitem__(self, __key: _KT) -> _VT_co: ... def __iter__(self) -> Iterator[_KT]: ... def __len__(self) -> int: ... @@ -444,7 +444,7 @@ class MethodType: def __name__(self) -> str: ... # inherited from the added function @property def __qualname__(self) -> str: ... # inherited from the added function - def __init__(self, __func: Callable[..., Any], __obj: object) -> None: ... + def __new__(cls, __func: Callable[..., Any], __obj: object) -> Self: ... def __call__(self, *args: Any, **kwargs: Any) -> Any: ... def __eq__(self, __value: object) -> bool: ... def __hash__(self) -> int: ... @@ -513,7 +513,7 @@ class ClassMethodDescriptorType: @final class TracebackType: - def __init__(self, tb_next: TracebackType | None, tb_frame: FrameType, tb_lasti: int, tb_lineno: int) -> None: ... + def __new__(cls, tb_next: TracebackType | None, tb_frame: FrameType, tb_lasti: int, tb_lineno: int) -> Self: ... tb_next: TracebackType | None # the rest are read-only even in 3.7 @property @@ -610,7 +610,7 @@ if sys.version_info >= (3, 9): def __args__(self) -> tuple[Any, ...]: ... @property def __parameters__(self) -> tuple[Any, ...]: ... - def __init__(self, origin: type, args: Any) -> None: ... + def __new__(cls, origin: type, args: Any) -> Self: ... def __getitem__(self, __typeargs: Any) -> GenericAlias: ... def __eq__(self, __value: object) -> bool: ... def __hash__(self) -> int: ... diff --git a/mypy/typeshed/stdlib/typing.pyi b/mypy/typeshed/stdlib/typing.pyi index 2c1ebe6d7f95..6deb0ffd02b3 100644 --- a/mypy/typeshed/stdlib/typing.pyi +++ b/mypy/typeshed/stdlib/typing.pyi @@ -709,8 +709,10 @@ class IO(Iterator[AnyStr], Generic[AnyStr]): # See #8726 @property def mode(self) -> str: ... + # Usually str, but may be bytes if a bytes path was passed to open(). See #10737. + # If PEP 696 becomes available, we may want to use a defaulted TypeVar here. @property - def name(self) -> str: ... + def name(self) -> str | Any: ... @abstractmethod def close(self) -> None: ... @property diff --git a/mypy/typeshed/stdlib/typing_extensions.pyi b/mypy/typeshed/stdlib/typing_extensions.pyi index 9320dc50b6bb..b5e2341cd020 100644 --- a/mypy/typeshed/stdlib/typing_extensions.pyi +++ b/mypy/typeshed/stdlib/typing_extensions.pyi @@ -149,6 +149,7 @@ __all__ = [ "Collection", "Container", "Dict", + "Doc", "ForwardRef", "FrozenSet", "Generator", @@ -489,3 +490,9 @@ if sys.version_info >= (3, 13): else: def is_protocol(__tp: type) -> bool: ... def get_protocol_members(__tp: type) -> frozenset[str]: ... + +class Doc: + documentation: str + def __init__(self, __documentation: str) -> None: ... + def __hash__(self) -> int: ... + def __eq__(self, other: object) -> bool: ... diff --git a/mypy/typeshed/stdlib/unittest/case.pyi b/mypy/typeshed/stdlib/unittest/case.pyi index 1f58f266ee89..aa04e16d62ec 100644 --- a/mypy/typeshed/stdlib/unittest/case.pyi +++ b/mypy/typeshed/stdlib/unittest/case.pyi @@ -126,9 +126,9 @@ class TestCase: @overload def assertLess(self, a: _T, b: SupportsDunderGT[_T], msg: Any = None) -> None: ... @overload - def assertLessEqual(self, a: SupportsDunderLT[_T], b: _T, msg: Any = None) -> None: ... + def assertLessEqual(self, a: SupportsDunderLE[_T], b: _T, msg: Any = None) -> None: ... @overload - def assertLessEqual(self, a: _T, b: SupportsDunderGT[_T], msg: Any = None) -> None: ... + def assertLessEqual(self, a: _T, b: SupportsDunderGE[_T], msg: Any = None) -> None: ... # `assertRaises`, `assertRaisesRegex`, and `assertRaisesRegexp` # are not using `ParamSpec` intentionally, # because they might be used with explicitly wrong arg types to raise some error in tests. diff --git a/mypy/typeshed/stdlib/xml/sax/__init__.pyi b/mypy/typeshed/stdlib/xml/sax/__init__.pyi index 8bcf902df8d8..f726eae0516f 100644 --- a/mypy/typeshed/stdlib/xml/sax/__init__.pyi +++ b/mypy/typeshed/stdlib/xml/sax/__init__.pyi @@ -2,12 +2,18 @@ import sys from _typeshed import ReadableBuffer, StrPath, SupportsRead, _T_co from collections.abc import Iterable from typing import Any, NoReturn, Protocol +from typing_extensions import TypeAlias from xml.sax.handler import ContentHandler as ContentHandler, ErrorHandler as ErrorHandler from xml.sax.xmlreader import Locator, XMLReader class _SupportsReadClose(SupportsRead[_T_co], Protocol[_T_co]): def close(self) -> None: ... +if sys.version_info >= (3, 8): + _Source: TypeAlias = StrPath | _SupportsReadClose[bytes] | _SupportsReadClose[str] +else: + _Source: TypeAlias = str | _SupportsReadClose[bytes] | _SupportsReadClose[str] + class SAXException(Exception): def __init__(self, msg: str, exception: Exception | None = None) -> None: ... def getMessage(self) -> str: ... @@ -28,20 +34,13 @@ class SAXReaderNotAvailable(SAXNotSupportedException): ... default_parser_list: list[str] if sys.version_info >= (3, 8): + def make_parser(parser_list: Iterable[str] = ()) -> XMLReader: ... - def parse( - source: StrPath | _SupportsReadClose[bytes] | _SupportsReadClose[str], - handler: ContentHandler, - errorHandler: ErrorHandler = ..., - ) -> None: ... else: + def make_parser(parser_list: list[str] = []) -> XMLReader: ... - def parse( - source: str | _SupportsReadClose[bytes] | _SupportsReadClose[str], - handler: ContentHandler, - errorHandler: ErrorHandler = ..., - ) -> None: ... +def parse(source: _Source, handler: ContentHandler, errorHandler: ErrorHandler = ...) -> None: ... def parseString(string: ReadableBuffer | str, handler: ContentHandler, errorHandler: ErrorHandler | None = ...) -> None: ... def _create_parser(parser_name: str) -> XMLReader: ... diff --git a/mypy/typeshed/stdlib/xml/sax/handler.pyi b/mypy/typeshed/stdlib/xml/sax/handler.pyi index 63b725bd6da6..30fe31d51374 100644 --- a/mypy/typeshed/stdlib/xml/sax/handler.pyi +++ b/mypy/typeshed/stdlib/xml/sax/handler.pyi @@ -1,5 +1,6 @@ import sys from typing import NoReturn +from xml.sax import xmlreader version: str @@ -9,19 +10,19 @@ class ErrorHandler: def warning(self, exception: BaseException) -> None: ... class ContentHandler: - def setDocumentLocator(self, locator): ... - def startDocument(self): ... - def endDocument(self): ... - def startPrefixMapping(self, prefix, uri): ... - def endPrefixMapping(self, prefix): ... - def startElement(self, name, attrs): ... - def endElement(self, name): ... - def startElementNS(self, name, qname, attrs): ... - def endElementNS(self, name, qname): ... - def characters(self, content): ... - def ignorableWhitespace(self, whitespace): ... - def processingInstruction(self, target, data): ... - def skippedEntity(self, name): ... + def setDocumentLocator(self, locator: xmlreader.Locator) -> None: ... + def startDocument(self) -> None: ... + def endDocument(self) -> None: ... + def startPrefixMapping(self, prefix: str | None, uri: str) -> None: ... + def endPrefixMapping(self, prefix) -> None: ... + def startElement(self, name: str, attrs: xmlreader.AttributesImpl) -> None: ... + def endElement(self, name: str) -> None: ... + def startElementNS(self, name: tuple[str, str], qname: str, attrs: xmlreader.AttributesNSImpl) -> None: ... + def endElementNS(self, name: tuple[str, str], qname: str) -> None: ... + def characters(self, content: str) -> None: ... + def ignorableWhitespace(self, whitespace: str) -> None: ... + def processingInstruction(self, target: str, data: str) -> None: ... + def skippedEntity(self, name: str) -> None: ... class DTDHandler: def notationDecl(self, name, publicId, systemId): ... diff --git a/mypy/typeshed/stdlib/xml/sax/saxutils.pyi b/mypy/typeshed/stdlib/xml/sax/saxutils.pyi index 0d9223770c6a..06e03a1e4d06 100644 --- a/mypy/typeshed/stdlib/xml/sax/saxutils.pyi +++ b/mypy/typeshed/stdlib/xml/sax/saxutils.pyi @@ -2,7 +2,7 @@ from _typeshed import SupportsWrite from codecs import StreamReaderWriter, StreamWriter from collections.abc import Mapping from io import RawIOBase, TextIOBase -from xml.sax import handler, xmlreader +from xml.sax import _Source, handler, xmlreader def escape(data: str, entities: Mapping[str, str] = {}) -> str: ... def unescape(data: str, entities: Mapping[str, str] = {}) -> str: ... @@ -15,46 +15,46 @@ class XMLGenerator(handler.ContentHandler): encoding: str = "iso-8859-1", short_empty_elements: bool = False, ) -> None: ... - def startDocument(self): ... - def endDocument(self): ... - def startPrefixMapping(self, prefix, uri): ... - def endPrefixMapping(self, prefix): ... - def startElement(self, name, attrs): ... - def endElement(self, name): ... - def startElementNS(self, name, qname, attrs): ... - def endElementNS(self, name, qname): ... - def characters(self, content): ... - def ignorableWhitespace(self, content): ... - def processingInstruction(self, target, data): ... + def startDocument(self) -> None: ... + def endDocument(self) -> None: ... + def startPrefixMapping(self, prefix: str | None, uri: str) -> None: ... + def endPrefixMapping(self, prefix: str | None) -> None: ... + def startElement(self, name: str, attrs: xmlreader.AttributesImpl) -> None: ... + def endElement(self, name: str) -> None: ... + def startElementNS(self, name: tuple[str, str], qname: str, attrs: xmlreader.AttributesNSImpl) -> None: ... + def endElementNS(self, name: tuple[str, str], qname: str) -> None: ... + def characters(self, content: str) -> None: ... + def ignorableWhitespace(self, content: str) -> None: ... + def processingInstruction(self, target: str, data: str) -> None: ... class XMLFilterBase(xmlreader.XMLReader): def __init__(self, parent: xmlreader.XMLReader | None = None) -> None: ... def error(self, exception): ... def fatalError(self, exception): ... def warning(self, exception): ... - def setDocumentLocator(self, locator): ... - def startDocument(self): ... - def endDocument(self): ... - def startPrefixMapping(self, prefix, uri): ... - def endPrefixMapping(self, prefix): ... - def startElement(self, name, attrs): ... - def endElement(self, name): ... - def startElementNS(self, name, qname, attrs): ... - def endElementNS(self, name, qname): ... - def characters(self, content): ... - def ignorableWhitespace(self, chars): ... - def processingInstruction(self, target, data): ... - def skippedEntity(self, name): ... + def setDocumentLocator(self, locator: xmlreader.Locator) -> None: ... + def startDocument(self) -> None: ... + def endDocument(self) -> None: ... + def startPrefixMapping(self, prefix: str | None, uri: str) -> None: ... + def endPrefixMapping(self, prefix: str | None) -> None: ... + def startElement(self, name: str, attrs: xmlreader.AttributesImpl) -> None: ... + def endElement(self, name: str) -> None: ... + def startElementNS(self, name: tuple[str, str], qname: str, attrs: xmlreader.AttributesNSImpl) -> None: ... + def endElementNS(self, name: tuple[str, str], qname: str) -> None: ... + def characters(self, content: str) -> None: ... + def ignorableWhitespace(self, chars: str) -> None: ... + def processingInstruction(self, target: str, data: str) -> None: ... + def skippedEntity(self, name: str) -> None: ... def notationDecl(self, name, publicId, systemId): ... def unparsedEntityDecl(self, name, publicId, systemId, ndata): ... def resolveEntity(self, publicId, systemId): ... - def parse(self, source): ... + def parse(self, source: _Source) -> None: ... def setLocale(self, locale): ... - def getFeature(self, name): ... - def setFeature(self, name, state): ... - def getProperty(self, name): ... - def setProperty(self, name, value): ... - def getParent(self): ... - def setParent(self, parent): ... + def getFeature(self, name: str) -> object: ... + def setFeature(self, name: str, state: object) -> None: ... + def getProperty(self, name: str) -> object: ... + def setProperty(self, name: str, value: object) -> None: ... + def getParent(self) -> xmlreader.XMLReader: ... + def setParent(self, parent: xmlreader.XMLReader) -> None: ... def prepare_input_source(source, base=""): ... diff --git a/mypy/typeshed/stdlib/xml/sax/xmlreader.pyi b/mypy/typeshed/stdlib/xml/sax/xmlreader.pyi index 0bf167b04a37..74d2efb010cd 100644 --- a/mypy/typeshed/stdlib/xml/sax/xmlreader.pyi +++ b/mypy/typeshed/stdlib/xml/sax/xmlreader.pyi @@ -1,20 +1,23 @@ from collections.abc import Mapping +from typing import overload +from typing_extensions import Self, TypeAlias +from xml.sax.handler import ContentHandler, DTDHandler, EntityResolver, ErrorHandler class XMLReader: def parse(self, source): ... - def getContentHandler(self): ... - def setContentHandler(self, handler): ... - def getDTDHandler(self): ... - def setDTDHandler(self, handler): ... - def getEntityResolver(self): ... - def setEntityResolver(self, resolver): ... - def getErrorHandler(self): ... - def setErrorHandler(self, handler): ... + def getContentHandler(self) -> ContentHandler: ... + def setContentHandler(self, handler: ContentHandler) -> None: ... + def getDTDHandler(self) -> DTDHandler: ... + def setDTDHandler(self, handler: DTDHandler) -> None: ... + def getEntityResolver(self) -> EntityResolver: ... + def setEntityResolver(self, resolver: EntityResolver) -> None: ... + def getErrorHandler(self) -> ErrorHandler: ... + def setErrorHandler(self, handler: ErrorHandler) -> None: ... def setLocale(self, locale): ... - def getFeature(self, name): ... - def setFeature(self, name, state): ... - def getProperty(self, name): ... - def setProperty(self, name, value): ... + def getFeature(self, name: str) -> object: ... + def setFeature(self, name: str, state: object) -> None: ... + def getProperty(self, name: str) -> object: ... + def setProperty(self, name: str, value: object) -> None: ... class IncrementalParser(XMLReader): def __init__(self, bufsize: int = 65536) -> None: ... @@ -45,27 +48,40 @@ class InputSource: class AttributesImpl: def __init__(self, attrs: Mapping[str, str]) -> None: ... - def getLength(self): ... - def getType(self, name): ... - def getValue(self, name): ... - def getValueByQName(self, name): ... - def getNameByQName(self, name): ... - def getQNameByName(self, name): ... - def getNames(self): ... - def getQNames(self): ... + def getLength(self) -> int: ... + def getType(self, name: str) -> str: ... + def getValue(self, name: str) -> str: ... + def getValueByQName(self, name: str) -> str: ... + def getNameByQName(self, name: str) -> str: ... + def getQNameByName(self, name: str) -> str: ... + def getNames(self) -> list[str]: ... + def getQNames(self) -> list[str]: ... def __len__(self) -> int: ... - def __getitem__(self, name): ... - def keys(self): ... - def __contains__(self, name): ... - def get(self, name, alternative=None): ... - def copy(self): ... - def items(self): ... - def values(self): ... + def __getitem__(self, name: str) -> str: ... + def keys(self) -> list[str]: ... + def __contains__(self, name: str) -> bool: ... + @overload + def get(self, name: str, alternative: None = None) -> str | None: ... + @overload + def get(self, name: str, alternative: str) -> str: ... + def copy(self) -> Self: ... + def items(self) -> list[tuple[str, str]]: ... + def values(self) -> list[str]: ... + +_NSName: TypeAlias = tuple[str | None, str] class AttributesNSImpl(AttributesImpl): - def __init__(self, attrs: Mapping[tuple[str, str], str], qnames: Mapping[tuple[str, str], str]) -> None: ... - def getValueByQName(self, name): ... - def getNameByQName(self, name): ... - def getQNameByName(self, name): ... - def getQNames(self): ... - def copy(self): ... + def __init__(self, attrs: Mapping[_NSName, str], qnames: Mapping[_NSName, str]) -> None: ... + def getType(self, name: _NSName) -> str: ... # type: ignore[override] + def getValue(self, name: _NSName) -> str: ... # type: ignore[override] + def getNameByQName(self, name: str) -> _NSName: ... # type: ignore[override] + def getQNameByName(self, name: _NSName) -> str: ... # type: ignore[override] + def getNames(self) -> list[_NSName]: ... # type: ignore[override] + def __getitem__(self, name: _NSName) -> str: ... # type: ignore[override] + def keys(self) -> list[_NSName]: ... # type: ignore[override] + def __contains__(self, name: _NSName) -> bool: ... # type: ignore[override] + @overload # type: ignore[override] + def get(self, name: _NSName, alternative: None = None) -> str | None: ... + @overload # type: ignore[override] + def get(self, name: _NSName, alternative: str) -> str: ... + def items(self) -> list[tuple[_NSName, str]]: ... # type: ignore[override] diff --git a/mypy/typeshed/stdlib/xxlimited.pyi b/mypy/typeshed/stdlib/xxlimited.pyi index b2fb72ad2c0b..d4f41bbaf22a 100644 --- a/mypy/typeshed/stdlib/xxlimited.pyi +++ b/mypy/typeshed/stdlib/xxlimited.pyi @@ -7,6 +7,8 @@ class Str: ... @final class Xxo: def demo(self) -> None: ... + if sys.version_info >= (3, 11) and sys.platform != "win32": + x_exports: int def foo(__i: int, __j: int) -> Any: ... def new() -> Xxo: ... From 3d3e482e03c1efeaca9a1033acf06f56c1dfdf86 Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Sun, 1 Oct 2023 02:06:33 -0700 Subject: [PATCH 077/144] Fix cases of type object handling for overloads (#16168) Fixes most of #12320. I didn't add tests for every code path because it's niche. I also didn't fix everything, in particular the cases where we proceed to use `ret_type` --- mypy/checker.py | 4 ++-- mypy/checkexpr.py | 8 ++++---- mypy/messages.py | 2 +- mypy/plugins/proper_plugin.py | 3 +-- mypy/typeops.py | 2 +- test-data/unit/check-abstract.test | 11 ++++++++++- test-data/unit/pythoneval.test | 4 ++-- 7 files changed, 21 insertions(+), 13 deletions(-) diff --git a/mypy/checker.py b/mypy/checker.py index bdb636541db0..1a7a7e25d525 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -2986,7 +2986,7 @@ def check_assignment( p_rvalue_type = get_proper_type(rvalue_type) p_lvalue_type = get_proper_type(lvalue_type) if ( - isinstance(p_rvalue_type, CallableType) + isinstance(p_rvalue_type, FunctionLike) and p_rvalue_type.is_type_obj() and ( p_rvalue_type.type_object().is_abstract @@ -3771,7 +3771,7 @@ def split_around_star( def type_is_iterable(self, type: Type) -> bool: type = get_proper_type(type) - if isinstance(type, CallableType) and type.is_type_obj(): + if isinstance(type, FunctionLike) and type.is_type_obj(): type = type.fallback return is_subtype( type, self.named_generic_type("typing.Iterable", [AnyType(TypeOfAny.special_form)]) diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index df4077100efb..e81fba9bc9ef 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -694,7 +694,7 @@ def check_runtime_protocol_test(self, e: CallExpr) -> None: for expr in mypy.checker.flatten(e.args[1]): tp = get_proper_type(self.chk.lookup_type(expr)) if ( - isinstance(tp, CallableType) + isinstance(tp, FunctionLike) and tp.is_type_obj() and tp.type_object().is_protocol and not tp.type_object().runtime_protocol @@ -704,7 +704,7 @@ def check_runtime_protocol_test(self, e: CallExpr) -> None: def check_protocol_issubclass(self, e: CallExpr) -> None: for expr in mypy.checker.flatten(e.args[1]): tp = get_proper_type(self.chk.lookup_type(expr)) - if isinstance(tp, CallableType) and tp.is_type_obj() and tp.type_object().is_protocol: + if isinstance(tp, FunctionLike) and tp.is_type_obj() and tp.type_object().is_protocol: attr_members = non_method_protocol_members(tp.type_object()) if attr_members: self.chk.msg.report_non_method_protocol(tp.type_object(), attr_members, e) @@ -4190,7 +4190,7 @@ def visit_index_with_type( elif isinstance(left_type, TypedDictType): return self.visit_typeddict_index_expr(left_type, e.index) elif ( - isinstance(left_type, CallableType) + isinstance(left_type, FunctionLike) and left_type.is_type_obj() and left_type.type_object().is_enum ): @@ -5832,7 +5832,7 @@ def has_abstract_type_part(self, caller_type: ProperType, callee_type: ProperTyp def has_abstract_type(self, caller_type: ProperType, callee_type: ProperType) -> bool: return ( - isinstance(caller_type, CallableType) + isinstance(caller_type, FunctionLike) and isinstance(callee_type, TypeType) and caller_type.is_type_obj() and (caller_type.type_object().is_abstract or caller_type.type_object().is_protocol) diff --git a/mypy/messages.py b/mypy/messages.py index 47ebd94f3d21..5d03bf1babb9 100644 --- a/mypy/messages.py +++ b/mypy/messages.py @@ -416,7 +416,7 @@ def has_no_attr( elif member == "__getitem__": # Indexed get. # TODO: Fix this consistently in format_type - if isinstance(original_type, CallableType) and original_type.is_type_obj(): + if isinstance(original_type, FunctionLike) and original_type.is_type_obj(): self.fail( "The type {} is not generic and not indexable".format( format_type(original_type, self.options) diff --git a/mypy/plugins/proper_plugin.py b/mypy/plugins/proper_plugin.py index ab93f0d126db..a1fd05272b65 100644 --- a/mypy/plugins/proper_plugin.py +++ b/mypy/plugins/proper_plugin.py @@ -17,7 +17,6 @@ from mypy.subtypes import is_proper_subtype from mypy.types import ( AnyType, - CallableType, FunctionLike, Instance, NoneTyp, @@ -131,7 +130,7 @@ def is_dangerous_target(typ: ProperType) -> bool: """Is this a dangerous target (right argument) for an isinstance() check?""" if isinstance(typ, TupleType): return any(is_dangerous_target(get_proper_type(t)) for t in typ.items) - if isinstance(typ, CallableType) and typ.is_type_obj(): + if isinstance(typ, FunctionLike) and typ.is_type_obj(): return typ.type_object().has_base("mypy.types.Type") return False diff --git a/mypy/typeops.py b/mypy/typeops.py index 10efa32c4b91..37817933a397 100644 --- a/mypy/typeops.py +++ b/mypy/typeops.py @@ -989,7 +989,7 @@ def custom_special_method(typ: Type, name: str, check_all: bool = False) -> bool return any(custom_special_method(t, name) for t in typ.items) if isinstance(typ, TupleType): return custom_special_method(tuple_fallback(typ), name, check_all) - if isinstance(typ, CallableType) and typ.is_type_obj(): + if isinstance(typ, FunctionLike) and typ.is_type_obj(): # Look up __method__ on the metaclass for class objects. return custom_special_method(typ.fallback, name, check_all) if isinstance(typ, AnyType): diff --git a/test-data/unit/check-abstract.test b/test-data/unit/check-abstract.test index 299074050baa..7f91eb8e7145 100644 --- a/test-data/unit/check-abstract.test +++ b/test-data/unit/check-abstract.test @@ -241,7 +241,7 @@ f(GoodAlias) [case testInstantiationAbstractsInTypeForVariables] # flags: --no-strict-optional -from typing import Type +from typing import Type, overload from abc import abstractmethod class A: @@ -269,6 +269,15 @@ if int(): var_old = B # E: Can only assign concrete classes to a variable of type "Type[A]" if int(): var_old = C # OK + +class D(A): + @overload + def __new__(cls, a) -> "D": ... + @overload + def __new__(cls) -> "D": ... + def __new__(cls, a=None) -> "D": ... +if int(): + var = D # E: Can only assign concrete classes to a variable of type "Type[A]" [out] [case testInstantiationAbstractsInTypeForClassMethods] diff --git a/test-data/unit/pythoneval.test b/test-data/unit/pythoneval.test index c5be30eac1b7..3d8e8d09a5ad 100644 --- a/test-data/unit/pythoneval.test +++ b/test-data/unit/pythoneval.test @@ -1801,9 +1801,9 @@ C = str | int D: TypeAlias = str | int [out] _testTypeAliasNotSupportedWithNewStyleUnion.py:3: error: Invalid type alias: expression is not a valid type -_testTypeAliasNotSupportedWithNewStyleUnion.py:3: error: Value of type "Type[type]" is not indexable +_testTypeAliasNotSupportedWithNewStyleUnion.py:3: error: The type "Type[type]" is not generic and not indexable _testTypeAliasNotSupportedWithNewStyleUnion.py:4: error: Invalid type alias: expression is not a valid type -_testTypeAliasNotSupportedWithNewStyleUnion.py:4: error: Value of type "Type[type]" is not indexable +_testTypeAliasNotSupportedWithNewStyleUnion.py:4: error: The type "Type[type]" is not generic and not indexable _testTypeAliasNotSupportedWithNewStyleUnion.py:5: error: Invalid type alias: expression is not a valid type _testTypeAliasNotSupportedWithNewStyleUnion.py:5: error: Unsupported left operand type for | ("Type[str]") _testTypeAliasNotSupportedWithNewStyleUnion.py:6: error: Invalid type alias: expression is not a valid type From 99ba048f4887eb0fbd55cde3f4243f6c177cbf7e Mon Sep 17 00:00:00 2001 From: Thomas Grainger Date: Sun, 1 Oct 2023 13:56:12 -0700 Subject: [PATCH 078/144] tuple slice should not propagate fallback (#16154) Fixes #8776 --- mypy/checkexpr.py | 2 +- mypy/types.py | 9 +++++++-- test-data/unit/check-literal.test | 9 +++++---- test-data/unit/check-tuples.test | 10 ++++++++++ 4 files changed, 23 insertions(+), 7 deletions(-) diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index e81fba9bc9ef..a2141680b6cb 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -4271,7 +4271,7 @@ def visit_tuple_slice_helper(self, left_type: TupleType, slic: SliceExpr) -> Typ items: list[Type] = [] for b, e, s in itertools.product(begin, end, stride): - item = left_type.slice(b, e, s) + item = left_type.slice(b, e, s, fallback=self.named_type("builtins.tuple")) if item is None: self.chk.fail(message_registry.AMBIGUOUS_SLICE_OF_VARIADIC_TUPLE, slic) return AnyType(TypeOfAny.from_error) diff --git a/mypy/types.py b/mypy/types.py index 9817043db6c2..34ea96be25ee 100644 --- a/mypy/types.py +++ b/mypy/types.py @@ -2416,7 +2416,12 @@ def copy_modified( items = self.items return TupleType(items, fallback, self.line, self.column) - def slice(self, begin: int | None, end: int | None, stride: int | None) -> TupleType | None: + def slice( + self, begin: int | None, end: int | None, stride: int | None, *, fallback: Instance | None + ) -> TupleType | None: + if fallback is None: + fallback = self.partial_fallback + if any(isinstance(t, UnpackType) for t in self.items): total = len(self.items) unpack_index = find_unpack_in_list(self.items) @@ -2462,7 +2467,7 @@ def slice(self, begin: int | None, end: int | None, stride: int | None) -> Tuple return None else: slice_items = self.items[begin:end:stride] - return TupleType(slice_items, self.partial_fallback, self.line, self.column, self.implicit) + return TupleType(slice_items, fallback, self.line, self.column, self.implicit) class TypedDictType(ProperType): diff --git a/test-data/unit/check-literal.test b/test-data/unit/check-literal.test index 08c709c6b777..d9ad68385ad1 100644 --- a/test-data/unit/check-literal.test +++ b/test-data/unit/check-literal.test @@ -1872,8 +1872,9 @@ reveal_type(tup2[idx3]) # N: Revealed type is "__main__.D" reveal_type(tup2[idx4]) # N: Revealed type is "__main__.E" reveal_type(tup2[idx_neg1]) # N: Revealed type is "__main__.E" tup2[idx5] # E: Tuple index out of range -reveal_type(tup2[idx2:idx4]) # N: Revealed type is "Tuple[__main__.C, __main__.D, fallback=__main__.Tup2Class]" -reveal_type(tup2[::idx2]) # N: Revealed type is "Tuple[__main__.A, __main__.C, __main__.E, fallback=__main__.Tup2Class]" +reveal_type(tup2[idx2:idx4]) # N: Revealed type is "Tuple[__main__.C, __main__.D]" +reveal_type(tup2[::idx2]) # N: Revealed type is "Tuple[__main__.A, __main__.C, __main__.E]" +tup3: Tup2Class = tup2[:] # E: Incompatible types in assignment (expression has type "Tuple[A, B, C, D, E]", variable has type "Tup2Class") [builtins fixtures/slice.pyi] [case testLiteralIntelligentIndexingTypedDict] @@ -1977,8 +1978,8 @@ reveal_type(tup1[0::idx1]) # N: Revealed type is "Union[Tuple[__main__.A, _ tup1[idx_bad] # E: Tuple index out of range reveal_type(tup2[idx1]) # N: Revealed type is "Union[__main__.B, __main__.C]" -reveal_type(tup2[idx1:idx2]) # N: Revealed type is "Union[Tuple[__main__.B, __main__.C, fallback=__main__.Tup2Class], Tuple[__main__.B, __main__.C, __main__.D, fallback=__main__.Tup2Class], Tuple[__main__.C, fallback=__main__.Tup2Class], Tuple[__main__.C, __main__.D, fallback=__main__.Tup2Class]]" -reveal_type(tup2[0::idx1]) # N: Revealed type is "Union[Tuple[__main__.A, __main__.B, __main__.C, __main__.D, __main__.E, fallback=__main__.Tup2Class], Tuple[__main__.A, __main__.C, __main__.E, fallback=__main__.Tup2Class]]" +reveal_type(tup2[idx1:idx2]) # N: Revealed type is "Union[Tuple[__main__.B, __main__.C], Tuple[__main__.B, __main__.C, __main__.D], Tuple[__main__.C], Tuple[__main__.C, __main__.D]]" +reveal_type(tup2[0::idx1]) # N: Revealed type is "Union[Tuple[__main__.A, __main__.B, __main__.C, __main__.D, __main__.E], Tuple[__main__.A, __main__.C, __main__.E]]" tup2[idx_bad] # E: Tuple index out of range [builtins fixtures/slice.pyi] [out] diff --git a/test-data/unit/check-tuples.test b/test-data/unit/check-tuples.test index 9dfee38bc0c6..1447321c0c49 100644 --- a/test-data/unit/check-tuples.test +++ b/test-data/unit/check-tuples.test @@ -1681,3 +1681,13 @@ def g(t: Tuple): reveal_type(zip(*t)) # N: Revealed type is "typing.Iterator[builtins.tuple[Any, ...]]" reveal_type(zip(t)) # N: Revealed type is "typing.Iterator[Tuple[Any]]" [builtins fixtures/tuple.pyi] + +[case testTupleSubclassSlice] +from typing import Tuple + +class A: ... + +class tuple_aa_subclass(Tuple[A, A]): ... + +inst_tuple_aa_subclass: tuple_aa_subclass = tuple_aa_subclass((A(), A()))[:] # E: Incompatible types in assignment (expression has type "Tuple[A, A]", variable has type "tuple_aa_subclass") +[builtins fixtures/tuple.pyi] From bcd4ff231554102a6698615882074e440ebfc3c9 Mon Sep 17 00:00:00 2001 From: Alex Waygood Date: Sun, 1 Oct 2023 23:48:53 +0100 Subject: [PATCH 079/144] stubtest: hint when args in stub need to be keyword-only (#16210) --- mypy/stubtest.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/mypy/stubtest.py b/mypy/stubtest.py index a5028581f7a1..e80ea4eac71f 100644 --- a/mypy/stubtest.py +++ b/mypy/stubtest.py @@ -890,7 +890,10 @@ def _verify_signature( # If the variable is in runtime.kwonly, it's just mislabelled as not a # keyword-only argument if stub_arg.variable.name not in runtime.kwonly: - yield f'runtime does not have argument "{stub_arg.variable.name}"' + msg = f'runtime does not have argument "{stub_arg.variable.name}"' + if runtime.varkw is not None: + msg += ". Maybe you forgot to make it keyword-only in the stub?" + yield msg else: yield f'stub argument "{stub_arg.variable.name}" is not keyword-only' if stub.varpos is not None: From 96803e0817c751e82fe88695647e20a8f050dee9 Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Tue, 3 Oct 2023 02:43:29 -0700 Subject: [PATCH 080/144] Add meta test for new diff logic (#16211) Follow up to #16112 --- mypy/test/helpers.py | 23 +++++++++------ mypy/test/meta/test_diff_helper.py | 47 ++++++++++++++++++++++++++++++ 2 files changed, 61 insertions(+), 9 deletions(-) create mode 100644 mypy/test/meta/test_diff_helper.py diff --git a/mypy/test/helpers.py b/mypy/test/helpers.py index a53e16e27dfa..dc34931427ec 100644 --- a/mypy/test/helpers.py +++ b/mypy/test/helpers.py @@ -8,7 +8,7 @@ import shutil import sys import time -from typing import Any, Callable, Iterable, Iterator, Pattern +from typing import IO, Any, Callable, Iterable, Iterator, Pattern # Exporting Suite as alias to TestCase for backwards compatibility # TODO: avoid aliasing - import and subclass TestCase directly @@ -70,7 +70,12 @@ def diff_ranges( def render_diff_range( - ranges: list[tuple[int, int]], content: list[str], colour: str | None = None + ranges: list[tuple[int, int]], + content: list[str], + *, + colour: str | None = None, + output: IO[str] = sys.stderr, + indent: int = 2, ) -> None: for i, line_range in enumerate(ranges): is_matching = i % 2 == 1 @@ -83,20 +88,20 @@ def render_diff_range( and j < len(lines) - 3 ): if j == 3: - sys.stderr.write(" ...\n") + output.write(" " * indent + "...\n") continue if not is_matching and colour: - sys.stderr.write(colour) + output.write(colour) - sys.stderr.write(" " + line) + output.write(" " * indent + line) if not is_matching: if colour: - sys.stderr.write("\033[0m") - sys.stderr.write(" (diff)") + output.write("\033[0m") + output.write(" (diff)") - sys.stderr.write("\n") + output.write("\n") def assert_string_arrays_equal(expected: list[str], actual: list[str], msg: str) -> None: @@ -129,7 +134,7 @@ def assert_string_arrays_equal(expected: list[str], actual: list[str], msg: str) sys.stderr.write( "Update the test output using --update-data -n0 " - "(you can additionally use the -k selector to update only specific tests)" + "(you can additionally use the -k selector to update only specific tests)\n" ) pytest.fail(msg, pytrace=False) diff --git a/mypy/test/meta/test_diff_helper.py b/mypy/test/meta/test_diff_helper.py new file mode 100644 index 000000000000..047751fee1d2 --- /dev/null +++ b/mypy/test/meta/test_diff_helper.py @@ -0,0 +1,47 @@ +import io + +from mypy.test.helpers import Suite, diff_ranges, render_diff_range + + +class DiffHelperSuite(Suite): + def test_render_diff_range(self) -> None: + expected = ["hello", "world"] + actual = ["goodbye", "world"] + + expected_ranges, actual_ranges = diff_ranges(expected, actual) + + output = io.StringIO() + render_diff_range(expected_ranges, expected, output=output) + assert output.getvalue() == " hello (diff)\n world\n" + output = io.StringIO() + render_diff_range(actual_ranges, actual, output=output) + assert output.getvalue() == " goodbye (diff)\n world\n" + + expected = ["a", "b", "c", "d", "e", "f", "g", "h", "circle", "i", "j"] + actual = ["a", "b", "c", "d", "e", "f", "g", "h", "square", "i", "j"] + + expected_ranges, actual_ranges = diff_ranges(expected, actual) + + output = io.StringIO() + render_diff_range(expected_ranges, expected, output=output, indent=0) + assert output.getvalue() == "a\nb\nc\n...\nf\ng\nh\ncircle (diff)\ni\nj\n" + output = io.StringIO() + render_diff_range(actual_ranges, actual, output=output, indent=0) + assert output.getvalue() == "a\nb\nc\n...\nf\ng\nh\nsquare (diff)\ni\nj\n" + + def test_diff_ranges(self) -> None: + a = ["hello", "world"] + b = ["hello", "world"] + + assert diff_ranges(a, b) == ( + [(0, 0), (0, 2), (2, 2), (2, 2)], + [(0, 0), (0, 2), (2, 2), (2, 2)], + ) + + a = ["hello", "world"] + b = ["goodbye", "world"] + + assert diff_ranges(a, b) == ( + [(0, 1), (1, 2), (2, 2), (2, 2)], + [(0, 1), (1, 2), (2, 2), (2, 2)], + ) From d839a0b1013873e27eae334a21b56fa57cd5e178 Mon Sep 17 00:00:00 2001 From: Eli Schwartz Date: Wed, 4 Oct 2023 03:31:18 -0400 Subject: [PATCH 081/144] tests: avoid leaving artifacts in the source tree (#16201) When running the mypy unittests, most of the time any output files are produced into a temporary directory and cleaned up. In one case, it wasn't. Fix this for test_capi. --- mypyc/lib-rt/setup.py | 29 ++++++++++++++++++++- mypyc/test/test_external.py | 50 ++++++++++++++++++------------------- 2 files changed, 53 insertions(+), 26 deletions(-) diff --git a/mypyc/lib-rt/setup.py b/mypyc/lib-rt/setup.py index a31b705cd723..ef81b794c9bd 100644 --- a/mypyc/lib-rt/setup.py +++ b/mypyc/lib-rt/setup.py @@ -5,7 +5,10 @@ from __future__ import annotations +import os +import subprocess import sys +from distutils.command.build_ext import build_ext from distutils.core import Extension, setup from typing import Any @@ -17,6 +20,30 @@ kwargs = {} compile_args = ["--std=c++11"] + +class build_ext_custom(build_ext): + def get_library_names(self): + return ["gtest"] + + def run(self): + gtest_dir = os.path.abspath( + os.path.join(os.path.dirname(__file__), "..", "external", "googletest") + ) + + os.makedirs(self.build_temp, exist_ok=True) + + # Build Google Test, the C++ framework we use for testing C code. + # The source code for Google Test is copied to this repository. + subprocess.check_call( + ["make", "-f", os.path.join(gtest_dir, "make", "Makefile"), f"GTEST_DIR={gtest_dir}"], + cwd=self.build_temp, + ) + + self.library_dirs = [self.build_temp] + + return build_ext.run(self) + + setup( name="test_capi", version="0.1", @@ -34,10 +61,10 @@ ], depends=["CPy.h", "mypyc_util.h", "pythonsupport.h"], extra_compile_args=["-Wno-unused-function", "-Wno-sign-compare"] + compile_args, - library_dirs=["../external/googletest/make"], libraries=["gtest"], include_dirs=["../external/googletest", "../external/googletest/include"], **kwargs, ) ], + cmdclass={"build_ext": build_ext_custom}, ) diff --git a/mypyc/test/test_external.py b/mypyc/test/test_external.py index 6deabd81255e..22eb8019133c 100644 --- a/mypyc/test/test_external.py +++ b/mypyc/test/test_external.py @@ -5,6 +5,7 @@ import os import subprocess import sys +import tempfile import unittest base_dir = os.path.join(os.path.dirname(__file__), "..", "..") @@ -16,34 +17,33 @@ class TestExternal(unittest.TestCase): @unittest.skipIf(sys.platform.startswith("win"), "rt tests don't work on windows") def test_c_unit_test(self) -> None: """Run C unit tests in a subprocess.""" - # Build Google Test, the C++ framework we use for testing C code. - # The source code for Google Test is copied to this repository. cppflags: list[str] = [] env = os.environ.copy() if sys.platform == "darwin": cppflags += ["-mmacosx-version-min=10.10", "-stdlib=libc++"] env["CPPFLAGS"] = " ".join(cppflags) - subprocess.check_call( - ["make", "libgtest.a"], - env=env, - cwd=os.path.join(base_dir, "mypyc", "external", "googletest", "make"), - ) # Build Python wrapper for C unit tests. - env = os.environ.copy() - env["CPPFLAGS"] = " ".join(cppflags) - status = subprocess.check_call( - [sys.executable, "setup.py", "build_ext", "--inplace"], - env=env, - cwd=os.path.join(base_dir, "mypyc", "lib-rt"), - ) - # Run C unit tests. - env = os.environ.copy() - if "GTEST_COLOR" not in os.environ: - env["GTEST_COLOR"] = "yes" # Use fancy colors - status = subprocess.call( - [sys.executable, "-c", "import sys, test_capi; sys.exit(test_capi.run_tests())"], - env=env, - cwd=os.path.join(base_dir, "mypyc", "lib-rt"), - ) - if status != 0: - raise AssertionError("make test: C unit test failure") + + with tempfile.TemporaryDirectory() as tmpdir: + status = subprocess.check_call( + [ + sys.executable, + "setup.py", + "build_ext", + f"--build-lib={tmpdir}", + f"--build-temp={tmpdir}", + ], + env=env, + cwd=os.path.join(base_dir, "mypyc", "lib-rt"), + ) + # Run C unit tests. + env = os.environ.copy() + if "GTEST_COLOR" not in os.environ: + env["GTEST_COLOR"] = "yes" # Use fancy colors + status = subprocess.call( + [sys.executable, "-c", "import sys, test_capi; sys.exit(test_capi.run_tests())"], + env=env, + cwd=tmpdir, + ) + if status != 0: + raise AssertionError("make test: C unit test failure") From b1ba661122dc39d9bbc53cf5df334c9f56b1a729 Mon Sep 17 00:00:00 2001 From: Anthony Sottile Date: Wed, 4 Oct 2023 03:43:37 -0400 Subject: [PATCH 082/144] __qualname__ and __module__ are available in class bodies (#16215) Resolves #10570 Resolves #6473 --- mypy/semanal.py | 5 ++++- test-data/unit/check-classes.test | 8 ++++++++ 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/mypy/semanal.py b/mypy/semanal.py index 6e103e5d382c..a476b62b31ec 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -5569,7 +5569,7 @@ def lookup( if not suppress_errors: self.name_not_defined(name, ctx) return None - # 2. Class attributes (if within class definition) + # 2a. Class attributes (if within class definition) if self.type and not self.is_func_scope() and name in self.type.names: node = self.type.names[name] if not node.implicit: @@ -5579,6 +5579,9 @@ def lookup( # Defined through self.x assignment implicit_name = True implicit_node = node + # 2b. Class attributes __qualname__ and __module__ + if self.type and not self.is_func_scope() and name in {"__qualname__", "__module__"}: + return SymbolTableNode(MDEF, Var(name, self.str_type())) # 3. Local (function) scopes for table in reversed(self.locals): if table is not None and name in table: diff --git a/test-data/unit/check-classes.test b/test-data/unit/check-classes.test index 4bc1e50f7be9..cd60ec7c9a9c 100644 --- a/test-data/unit/check-classes.test +++ b/test-data/unit/check-classes.test @@ -8001,3 +8001,11 @@ f5(1) # E: Argument 1 to "f5" has incompatible type "int"; expected "Integral" # N: Types from "numbers" aren't supported for static type checking \ # N: See https://peps.python.org/pep-0484/#the-numeric-tower \ # N: Consider using a protocol instead, such as typing.SupportsFloat + +[case testImplicitClassScopedNames] +class C: + reveal_type(__module__) # N: Revealed type is "builtins.str" + reveal_type(__qualname__) # N: Revealed type is "builtins.str" + def f(self) -> None: + __module__ # E: Name "__module__" is not defined + __qualname__ # E: Name "__qualname__" is not defined From a1df3353a7bc0d7ff7b3459e95d0f9684b325e9b Mon Sep 17 00:00:00 2001 From: Alex Waygood Date: Wed, 4 Oct 2023 14:49:05 +0100 Subject: [PATCH 083/144] Bump ruff and black to their latest versions (#16221) Closes #16218 --- .pre-commit-config.yaml | 4 ++-- mypy/build.py | 2 +- mypy/main.py | 2 +- mypy/metastore.py | 2 +- mypy/plugins/common.py | 2 +- pyproject.toml | 1 + setup.py | 2 +- test-requirements.txt | 4 ++-- 8 files changed, 10 insertions(+), 9 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 8650a2868cd6..e92d498fa3cc 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -6,11 +6,11 @@ repos: - id: trailing-whitespace - id: end-of-file-fixer - repo: https://github.com/psf/black-pre-commit-mirror - rev: 23.7.0 # must match test-requirements.txt + rev: 23.9.1 # must match test-requirements.txt hooks: - id: black - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.0.281 # must match test-requirements.txt + rev: v0.0.292 # must match test-requirements.txt hooks: - id: ruff args: [--exit-non-zero-on-fix] diff --git a/mypy/build.py b/mypy/build.py index 39629c2dc455..b481cc6ad0dc 100644 --- a/mypy/build.py +++ b/mypy/build.py @@ -3024,7 +3024,7 @@ def dump_graph(graph: Graph, stdout: TextIO | None = None) -> None: if state.path: try: size = os.path.getsize(state.path) - except os.error: + except OSError: pass node.sizes[mod] = size for dep in state.dependencies: diff --git a/mypy/main.py b/mypy/main.py index 3eb8a76a6de3..dff1a0362ba2 100644 --- a/mypy/main.py +++ b/mypy/main.py @@ -33,7 +33,7 @@ def stat_proxy(path: str) -> os.stat_result: try: st = orig_stat(path) - except os.error as err: + except OSError as err: print(f"stat({path!r}) -> {err}") raise else: diff --git a/mypy/metastore.py b/mypy/metastore.py index 16cbd5adc9c8..0547f94cd671 100644 --- a/mypy/metastore.py +++ b/mypy/metastore.py @@ -112,7 +112,7 @@ def write(self, name: str, data: str, mtime: float | None = None) -> bool: if mtime is not None: os.utime(path, times=(mtime, mtime)) - except os.error: + except OSError: return False return True diff --git a/mypy/plugins/common.py b/mypy/plugins/common.py index 84d50b7086c6..03041bfcebcd 100644 --- a/mypy/plugins/common.py +++ b/mypy/plugins/common.py @@ -154,7 +154,7 @@ def find_shallow_matching_overload_item(overload: Overloaded, call: CallExpr) -> ): ok = False break - elif isinstance(arg_type, LiteralType) and type(arg_type.value) is bool: + elif isinstance(arg_type, LiteralType) and isinstance(arg_type.value, bool): if not any(parse_bool(arg) == arg_type.value for arg in args): ok = False break diff --git a/pyproject.toml b/pyproject.toml index 1d6562756e22..de32618f1a39 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -58,6 +58,7 @@ unfixable = [ "F601", # automatic fix might obscure issue "F602", # automatic fix might obscure issue "B018", # automatic fix might obscure issue + "UP036", # sometimes it's better to just noqa this ] extend-exclude = [ diff --git a/setup.py b/setup.py index bbb655ea4537..7e7793a406d0 100644 --- a/setup.py +++ b/setup.py @@ -8,7 +8,7 @@ import sys from typing import TYPE_CHECKING, Any -if sys.version_info < (3, 8, 0): +if sys.version_info < (3, 8, 0): # noqa: UP036 sys.stderr.write("ERROR: You need Python 3.8 or later to use mypy.\n") exit(1) diff --git a/test-requirements.txt b/test-requirements.txt index 6f7bec0375ad..bdaad16fa88e 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -1,7 +1,7 @@ -r mypy-requirements.txt -r build-requirements.txt attrs>=18.0 -black==23.7.0 # must match version in .pre-commit-config.yaml +black==23.9.1 # must match version in .pre-commit-config.yaml filelock>=3.3.0 # lxml 4.9.3 switched to manylinux_2_28, the wheel builder still uses manylinux2014 lxml>=4.9.1,<4.9.3; (python_version<'3.11' or sys_platform!='win32') and python_version<'3.12' @@ -11,6 +11,6 @@ psutil>=4.0 pytest>=7.4.0 pytest-xdist>=1.34.0 pytest-cov>=2.10.0 -ruff==0.0.280 # must match version in .pre-commit-config.yaml +ruff==0.0.292 # must match version in .pre-commit-config.yaml setuptools>=65.5.1 tomli>=1.1.0 # needed even on py311+ so the self check passes with --python-version 3.7 From 10dfafe089a75dc117586ebab35723da66309398 Mon Sep 17 00:00:00 2001 From: Alex Waygood Date: Wed, 4 Oct 2023 18:58:28 +0100 Subject: [PATCH 084/144] Remove stubs packages from `stubinfo.py` where the runtime package has added a `py.typed` file (#16226) All of these stubs packages have been removed from typeshed, due to the runtime package having added a `py.typed` file. --- mypy/stubinfo.py | 9 --------- test-data/unit/pythoneval.test | 12 ++++++------ 2 files changed, 6 insertions(+), 15 deletions(-) diff --git a/mypy/stubinfo.py b/mypy/stubinfo.py index 0d76a6215238..9d8dfbe43f37 100644 --- a/mypy/stubinfo.py +++ b/mypy/stubinfo.py @@ -26,17 +26,14 @@ def stub_distribution_name(prefix: str) -> str: "croniter": "types-croniter", "dataclasses": "types-dataclasses", "dateparser": "types-dateparser", - "datetimerange": "types-DateTimeRange", "dateutil": "types-python-dateutil", "decorator": "types-decorator", "deprecated": "types-Deprecated", "docutils": "types-docutils", "first": "types-first", - "geoip2": "types-geoip2", "gflags": "types-python-gflags", "google.protobuf": "types-protobuf", "markdown": "types-Markdown", - "maxminddb": "types-maxminddb", "mock": "types-mock", "OpenSSL": "types-pyOpenSSL", "paramiko": "types-paramiko", @@ -80,8 +77,6 @@ def stub_distribution_name(prefix: str) -> str: "PIL": "types-Pillow", "PyInstaller": "types-pyinstaller", "Xlib": "types-python-xlib", - "annoy": "types-annoy", - "appdirs": "types-appdirs", "aws_xray_sdk": "types-aws-xray-sdk", "babel": "types-babel", "backports.ssl_match_hostname": "types-backports.ssl_match_hostname", @@ -96,7 +91,6 @@ def stub_distribution_name(prefix: str) -> str: "consolemenu": "types-console-menu", "crontab": "types-python-crontab", "d3dshot": "types-D3DShot", - "dj_database_url": "types-dj-database-url", "dockerfile_parse": "types-dockerfile-parse", "docopt": "types-docopt", "editdistance": "types-editdistance", @@ -111,7 +105,6 @@ def stub_distribution_name(prefix: str) -> str: "flake8_typing_imports": "types-flake8-typing-imports", "flask_cors": "types-Flask-Cors", "flask_migrate": "types-Flask-Migrate", - "flask_sqlalchemy": "types-Flask-SQLAlchemy", "fpdf": "types-fpdf2", "gdb": "types-gdb", "google.cloud.ndb": "types-google-cloud-ndb", @@ -162,7 +155,6 @@ def stub_distribution_name(prefix: str) -> str: "tree_sitter": "types-tree-sitter", "tree_sitter_languages": "types-tree-sitter-languages", "ttkthemes": "types-ttkthemes", - "urllib3": "types-urllib3", "vobject": "types-vobject", "whatthepatch": "types-whatthepatch", "win32": "types-pywin32", @@ -172,7 +164,6 @@ def stub_distribution_name(prefix: str) -> str: "win32comext": "types-pywin32", "win32gui": "types-pywin32", "xmltodict": "types-xmltodict", - "xxhash": "types-xxhash", "zxcvbn": "types-zxcvbn", # Stub packages that are not from typeshed # Since these can be installed automatically via --install-types, we have a high trust bar diff --git a/test-data/unit/pythoneval.test b/test-data/unit/pythoneval.test index 3d8e8d09a5ad..7dd2b2f76f8c 100644 --- a/test-data/unit/pythoneval.test +++ b/test-data/unit/pythoneval.test @@ -1568,24 +1568,24 @@ note: A user-defined top-level module with name "typing" is not supported # flags: --ignore-missing-imports import scribe # No Python 3 stubs available for scribe from scribe import x -import maxminddb # Python 3 stubs available for maxminddb +import docutils # Python 3 stubs available for docutils import foobar_asdf import jack # This has a stubs package but was never bundled with mypy, so ignoring works [out] -_testIgnoreImportIfNoPython3StubAvailable.py:4: error: Library stubs not installed for "maxminddb" -_testIgnoreImportIfNoPython3StubAvailable.py:4: note: Hint: "python3 -m pip install types-maxminddb" +_testIgnoreImportIfNoPython3StubAvailable.py:4: error: Library stubs not installed for "docutils" +_testIgnoreImportIfNoPython3StubAvailable.py:4: note: Hint: "python3 -m pip install types-docutils" _testIgnoreImportIfNoPython3StubAvailable.py:4: note: (or run "mypy --install-types" to install all missing stub packages) _testIgnoreImportIfNoPython3StubAvailable.py:4: note: See https://mypy.readthedocs.io/en/stable/running_mypy.html#missing-imports [case testNoPython3StubAvailable] import scribe from scribe import x -import maxminddb +import docutils [out] _testNoPython3StubAvailable.py:1: error: Cannot find implementation or library stub for module named "scribe" _testNoPython3StubAvailable.py:1: note: See https://mypy.readthedocs.io/en/stable/running_mypy.html#missing-imports -_testNoPython3StubAvailable.py:3: error: Library stubs not installed for "maxminddb" -_testNoPython3StubAvailable.py:3: note: Hint: "python3 -m pip install types-maxminddb" +_testNoPython3StubAvailable.py:3: error: Library stubs not installed for "docutils" +_testNoPython3StubAvailable.py:3: note: Hint: "python3 -m pip install types-docutils" _testNoPython3StubAvailable.py:3: note: (or run "mypy --install-types" to install all missing stub packages) From d54e8b30301620ce5cc59a0c304b8423f07a7b60 Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Sun, 8 Oct 2023 19:32:51 +0100 Subject: [PATCH 085/144] Support variadic tuple packing/unpacking (#16205) This is includes also related things such as tuple concatenation, special-cased tuple "re-packing", and star tuple unpacking in homogeneous collections. It looks like we are very close to the finish line (the only major missing feature is type narrowing using `len()`, apart from this I just need to do couple technical things, and make one final search for missed code paths). Some notes: * Unfortunately, star items on l.h.s create lists at runtime. This means there are various cases where `list[object]` is the best type we can have. * Note I now infer "precise" types for expressions like `(x, *y, z)`, where `y` is say `tuple[int, ...]`. This may cause errors for code that previously worked (when we will turn this feature on). For example `(1, *[], 2)[42]` will be an error. As usual, I propose to try to be strict, and relax if people will complain (FWIW, I expect very few false positives from this). * It may look like `Unpack` can now "leak" if it was never used explicitly. This is not the case, it is just that experimental features are enabled in tests. * There are couple minor changes that affect code without variadic types. Previously tuple type context was used inconsistently for situations with star unpacks, I clean it up a bit (for my tests). Also I infer `Any`-like l.h.s types after an error in tuple unpacking (when needed) to avoid extra "Cannot determine type" errors in my tests. --- mypy/argmap.py | 23 ++- mypy/checker.py | 107 ++++++++++++- mypy/checkexpr.py | 113 +++++++++++++- mypy/constraints.py | 6 +- mypy/message_registry.py | 3 + mypyc/irbuild/mapper.py | 8 +- test-data/unit/check-tuples.test | 8 +- test-data/unit/check-typevar-tuple.test | 192 ++++++++++++++++++++++++ 8 files changed, 437 insertions(+), 23 deletions(-) diff --git a/mypy/argmap.py b/mypy/argmap.py index ec8463fd0625..e6700c9f1092 100644 --- a/mypy/argmap.py +++ b/mypy/argmap.py @@ -14,6 +14,8 @@ Type, TypedDictType, TypeOfAny, + TypeVarTupleType, + UnpackType, get_proper_type, ) @@ -174,6 +176,7 @@ def expand_actual_type( actual_kind: nodes.ArgKind, formal_name: str | None, formal_kind: nodes.ArgKind, + allow_unpack: bool = False, ) -> Type: """Return the actual (caller) type(s) of a formal argument with the given kinds. @@ -189,6 +192,11 @@ def expand_actual_type( original_actual = actual_type actual_type = get_proper_type(actual_type) if actual_kind == nodes.ARG_STAR: + if isinstance(actual_type, TypeVarTupleType): + # This code path is hit when *Ts is passed to a callable and various + # special-handling didn't catch this. The best thing we can do is to use + # the upper bound. + actual_type = get_proper_type(actual_type.upper_bound) if isinstance(actual_type, Instance) and actual_type.args: from mypy.subtypes import is_subtype @@ -209,7 +217,20 @@ def expand_actual_type( self.tuple_index = 1 else: self.tuple_index += 1 - return actual_type.items[self.tuple_index - 1] + item = actual_type.items[self.tuple_index - 1] + if isinstance(item, UnpackType) and not allow_unpack: + # An upack item that doesn't have special handling, use upper bound as above. + unpacked = get_proper_type(item.type) + if isinstance(unpacked, TypeVarTupleType): + fallback = get_proper_type(unpacked.upper_bound) + else: + fallback = unpacked + assert ( + isinstance(fallback, Instance) + and fallback.type.fullname == "builtins.tuple" + ) + item = fallback.args[0] + return item elif isinstance(actual_type, ParamSpecType): # ParamSpec is valid in *args but it can't be unpacked. return actual_type diff --git a/mypy/checker.py b/mypy/checker.py index 1a7a7e25d525..e1b65a95ae98 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -205,10 +205,13 @@ TypeType, TypeVarId, TypeVarLikeType, + TypeVarTupleType, TypeVarType, UnboundType, UninhabitedType, UnionType, + UnpackType, + find_unpack_in_list, flatten_nested_unions, get_proper_type, get_proper_types, @@ -3430,6 +3433,37 @@ def is_assignable_slot(self, lvalue: Lvalue, typ: Type | None) -> bool: return all(self.is_assignable_slot(lvalue, u) for u in typ.items) return False + def flatten_rvalues(self, rvalues: list[Expression]) -> list[Expression]: + """Flatten expression list by expanding those * items that have tuple type. + + For each regular type item in the tuple type use a TempNode(), for an Unpack + item use a corresponding StarExpr(TempNode()). + """ + new_rvalues = [] + for rv in rvalues: + if not isinstance(rv, StarExpr): + new_rvalues.append(rv) + continue + typ = get_proper_type(self.expr_checker.accept(rv.expr)) + if not isinstance(typ, TupleType): + new_rvalues.append(rv) + continue + for t in typ.items: + if not isinstance(t, UnpackType): + new_rvalues.append(TempNode(t)) + else: + unpacked = get_proper_type(t.type) + if isinstance(unpacked, TypeVarTupleType): + fallback = unpacked.upper_bound + else: + assert ( + isinstance(unpacked, Instance) + and unpacked.type.fullname == "builtins.tuple" + ) + fallback = unpacked + new_rvalues.append(StarExpr(TempNode(fallback))) + return new_rvalues + def check_assignment_to_multiple_lvalues( self, lvalues: list[Lvalue], @@ -3439,18 +3473,16 @@ def check_assignment_to_multiple_lvalues( ) -> None: if isinstance(rvalue, (TupleExpr, ListExpr)): # Recursively go into Tuple or List expression rhs instead of - # using the type of rhs, because this allowed more fine grained + # using the type of rhs, because this allows more fine-grained # control in cases like: a, b = [int, str] where rhs would get # type List[object] rvalues: list[Expression] = [] iterable_type: Type | None = None last_idx: int | None = None - for idx_rval, rval in enumerate(rvalue.items): + for idx_rval, rval in enumerate(self.flatten_rvalues(rvalue.items)): if isinstance(rval, StarExpr): typs = get_proper_type(self.expr_checker.accept(rval.expr)) - if isinstance(typs, TupleType): - rvalues.extend([TempNode(typ) for typ in typs.items]) - elif self.type_is_iterable(typs) and isinstance(typs, Instance): + if self.type_is_iterable(typs) and isinstance(typs, Instance): if iterable_type is not None and iterable_type != self.iterable_item_type( typs, rvalue ): @@ -3517,8 +3549,32 @@ def check_assignment_to_multiple_lvalues( self.check_multi_assignment(lvalues, rvalue, context, infer_lvalue_type) def check_rvalue_count_in_assignment( - self, lvalues: list[Lvalue], rvalue_count: int, context: Context + self, + lvalues: list[Lvalue], + rvalue_count: int, + context: Context, + rvalue_unpack: int | None = None, ) -> bool: + if rvalue_unpack is not None: + if not any(isinstance(e, StarExpr) for e in lvalues): + self.fail("Variadic tuple unpacking requires a star target", context) + return False + if len(lvalues) > rvalue_count: + self.fail(message_registry.TOO_MANY_TARGETS_FOR_VARIADIC_UNPACK, context) + return False + left_star_index = next(i for i, lv in enumerate(lvalues) if isinstance(lv, StarExpr)) + left_prefix = left_star_index + left_suffix = len(lvalues) - left_star_index - 1 + right_prefix = rvalue_unpack + right_suffix = rvalue_count - rvalue_unpack - 1 + if left_suffix > right_suffix or left_prefix > right_prefix: + # Case of asymmetric unpack like: + # rv: tuple[int, *Ts, int, int] + # x, y, *xs, z = rv + # it is technically valid, but is tricky to reason about. + # TODO: support this (at least if the r.h.s. unpack is a homogeneous tuple). + self.fail(message_registry.TOO_MANY_TARGETS_FOR_VARIADIC_UNPACK, context) + return True if any(isinstance(lvalue, StarExpr) for lvalue in lvalues): if len(lvalues) - 1 > rvalue_count: self.msg.wrong_number_values_to_unpack(rvalue_count, len(lvalues) - 1, context) @@ -3552,6 +3608,13 @@ def check_multi_assignment( if len(relevant_items) == 1: rvalue_type = get_proper_type(relevant_items[0]) + if ( + isinstance(rvalue_type, TupleType) + and find_unpack_in_list(rvalue_type.items) is not None + ): + # Normalize for consistent handling with "old-style" homogeneous tuples. + rvalue_type = expand_type(rvalue_type, {}) + if isinstance(rvalue_type, AnyType): for lv in lvalues: if isinstance(lv, StarExpr): @@ -3663,7 +3726,10 @@ def check_multi_assignment_from_tuple( undefined_rvalue: bool, infer_lvalue_type: bool = True, ) -> None: - if self.check_rvalue_count_in_assignment(lvalues, len(rvalue_type.items), context): + rvalue_unpack = find_unpack_in_list(rvalue_type.items) + if self.check_rvalue_count_in_assignment( + lvalues, len(rvalue_type.items), context, rvalue_unpack=rvalue_unpack + ): star_index = next( (i for i, lv in enumerate(lvalues) if isinstance(lv, StarExpr)), len(lvalues) ) @@ -3708,12 +3774,37 @@ def check_multi_assignment_from_tuple( self.check_assignment(lv, self.temp_node(rv_type, context), infer_lvalue_type) if star_lv: list_expr = ListExpr( - [self.temp_node(rv_type, context) for rv_type in star_rv_types] + [ + self.temp_node(rv_type, context) + if not isinstance(rv_type, UnpackType) + else StarExpr(self.temp_node(rv_type.type, context)) + for rv_type in star_rv_types + ] ) list_expr.set_line(context) self.check_assignment(star_lv.expr, list_expr, infer_lvalue_type) for lv, rv_type in zip(right_lvs, right_rv_types): self.check_assignment(lv, self.temp_node(rv_type, context), infer_lvalue_type) + else: + # Store meaningful Any types for lvalues, errors are already given + # by check_rvalue_count_in_assignment() + if infer_lvalue_type: + for lv in lvalues: + if ( + isinstance(lv, NameExpr) + and isinstance(lv.node, Var) + and lv.node.type is None + ): + lv.node.type = AnyType(TypeOfAny.from_error) + elif isinstance(lv, StarExpr): + if ( + isinstance(lv.expr, NameExpr) + and isinstance(lv.expr.node, Var) + and lv.expr.node.type is None + ): + lv.expr.node.type = self.named_generic_type( + "builtins.list", [AnyType(TypeOfAny.from_error)] + ) def lvalue_type_for_inference(self, lvalues: list[Lvalue], rvalue_type: TupleType) -> Type: star_index = next( diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index a2141680b6cb..fd155ff87379 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -95,6 +95,7 @@ YieldExpr, YieldFromExpr, ) +from mypy.options import TYPE_VAR_TUPLE from mypy.plugin import ( FunctionContext, FunctionSigContext, @@ -2510,7 +2511,11 @@ def check_argument_types( ) self.msg.invalid_keyword_var_arg(actual_type, is_mapping, context) expanded_actual = mapper.expand_actual_type( - actual_type, actual_kind, callee.arg_names[i], callee_arg_kind + actual_type, + actual_kind, + callee.arg_names[i], + callee_arg_kind, + allow_unpack=isinstance(callee_arg_type, UnpackType), ) check_arg( expanded_actual, @@ -3338,7 +3343,45 @@ def visit_op_expr(self, e: OpExpr) -> Type: if isinstance(proper_right_type, TupleType): right_radd_method = proper_right_type.partial_fallback.type.get("__radd__") if right_radd_method is None: - return self.concat_tuples(proper_left_type, proper_right_type) + # One cannot have two variadic items in the same tuple. + if ( + find_unpack_in_list(proper_left_type.items) is None + or find_unpack_in_list(proper_right_type.items) is None + ): + return self.concat_tuples(proper_left_type, proper_right_type) + elif ( + TYPE_VAR_TUPLE in self.chk.options.enable_incomplete_feature + and isinstance(proper_right_type, Instance) + and self.chk.type_is_iterable(proper_right_type) + ): + # Handle tuple[X, Y] + tuple[Z, ...] = tuple[X, Y, *tuple[Z, ...]]. + right_radd_method = proper_right_type.type.get("__radd__") + if ( + right_radd_method is None + and proper_left_type.partial_fallback.type.fullname == "builtins.tuple" + and find_unpack_in_list(proper_left_type.items) is None + ): + item_type = self.chk.iterable_item_type(proper_right_type, e) + mapped = self.chk.named_generic_type("builtins.tuple", [item_type]) + return proper_left_type.copy_modified( + items=proper_left_type.items + [UnpackType(mapped)] + ) + if TYPE_VAR_TUPLE in self.chk.options.enable_incomplete_feature: + # Handle tuple[X, ...] + tuple[Y, Z] = tuple[*tuple[X, ...], Y, Z]. + if ( + e.op == "+" + and isinstance(proper_left_type, Instance) + and proper_left_type.type.fullname == "builtins.tuple" + ): + proper_right_type = get_proper_type(self.accept(e.right)) + if ( + isinstance(proper_right_type, TupleType) + and proper_right_type.partial_fallback.type.fullname == "builtins.tuple" + and find_unpack_in_list(proper_right_type.items) is None + ): + return proper_right_type.copy_modified( + items=[UnpackType(proper_left_type)] + proper_right_type.items + ) if e.op in operators.op_methods: method = operators.op_methods[e.op] @@ -4721,6 +4764,19 @@ def check_lst_expr(self, e: ListExpr | SetExpr | TupleExpr, fullname: str, tag: )[0] return remove_instance_last_known_values(out) + def tuple_context_matches(self, expr: TupleExpr, ctx: TupleType) -> bool: + ctx_unpack_index = find_unpack_in_list(ctx.items) + if ctx_unpack_index is None: + # For fixed tuples accept everything that can possibly match, even if this + # requires all star items to be empty. + return len([e for e in expr.items if not isinstance(e, StarExpr)]) <= len(ctx.items) + # For variadic context, the only easy case is when structure matches exactly. + # TODO: try using tuple type context in more cases. + if len([e for e in expr.items if not isinstance(e, StarExpr)]) != 1: + return False + expr_star_index = next(i for i, lv in enumerate(expr.items) if isinstance(lv, StarExpr)) + return len(expr.items) == len(ctx.items) and ctx_unpack_index == expr_star_index + def visit_tuple_expr(self, e: TupleExpr) -> Type: """Type check a tuple expression.""" # Try to determine type context for type inference. @@ -4730,7 +4786,7 @@ def visit_tuple_expr(self, e: TupleExpr) -> Type: tuples_in_context = [ t for t in get_proper_types(type_context.items) - if (isinstance(t, TupleType) and len(t.items) == len(e.items)) + if (isinstance(t, TupleType) and self.tuple_context_matches(e, t)) or is_named_instance(t, TUPLE_LIKE_INSTANCE_NAMES) ] if len(tuples_in_context) == 1: @@ -4740,7 +4796,7 @@ def visit_tuple_expr(self, e: TupleExpr) -> Type: # more than one. Either way, we can't decide on a context. pass - if isinstance(type_context, TupleType): + if isinstance(type_context, TupleType) and self.tuple_context_matches(e, type_context): type_context_items = type_context.items elif type_context and is_named_instance(type_context, TUPLE_LIKE_INSTANCE_NAMES): assert isinstance(type_context, Instance) @@ -4751,6 +4807,11 @@ def visit_tuple_expr(self, e: TupleExpr) -> Type: # items that match a position in e, and we'll worry about type # mismatches later. + unpack_in_context = False + if type_context_items is not None: + unpack_in_context = find_unpack_in_list(type_context_items) is not None + seen_unpack_in_items = False + # Infer item types. Give up if there's a star expression # that's not a Tuple. items: list[Type] = [] @@ -4763,12 +4824,44 @@ def visit_tuple_expr(self, e: TupleExpr) -> Type: # TupleExpr, flatten it, so we can benefit from the # context? Counterargument: Why would anyone write # (1, *(2, 3)) instead of (1, 2, 3) except in a test? - tt = self.accept(item.expr) + if unpack_in_context: + # Note: this logic depends on full structure match in tuple_context_matches(). + assert type_context_items + ctx_item = type_context_items[j] + assert isinstance(ctx_item, UnpackType) + ctx = ctx_item.type + else: + ctx = None + tt = self.accept(item.expr, ctx) tt = get_proper_type(tt) if isinstance(tt, TupleType): + if find_unpack_in_list(tt.items) is not None: + if seen_unpack_in_items: + # Multiple unpack items are not allowed in tuples, + # fall back to instance type. + return self.check_lst_expr(e, "builtins.tuple", "") + else: + seen_unpack_in_items = True items.extend(tt.items) - j += len(tt.items) + # Note: this logic depends on full structure match in tuple_context_matches(). + if unpack_in_context: + j += 1 + else: + # If there is an unpack in expressions, but not in context, this will + # result in an error later, just do something predictable here. + j += len(tt.items) else: + if ( + TYPE_VAR_TUPLE in self.chk.options.enable_incomplete_feature + and not seen_unpack_in_items + ): + # Handle (x, *y, z), where y is e.g. tuple[Y, ...]. + if isinstance(tt, Instance) and self.chk.type_is_iterable(tt): + item_type = self.chk.iterable_item_type(tt, e) + mapped = self.chk.named_generic_type("builtins.tuple", [item_type]) + items.append(UnpackType(mapped)) + seen_unpack_in_items = True + continue # A star expression that's not a Tuple. # Treat the whole thing as a variable-length tuple. return self.check_lst_expr(e, "builtins.tuple", "") @@ -4781,7 +4874,13 @@ def visit_tuple_expr(self, e: TupleExpr) -> Type: items.append(tt) # This is a partial fallback item type. A precise type will be calculated on demand. fallback_item = AnyType(TypeOfAny.special_form) - return TupleType(items, self.chk.named_generic_type("builtins.tuple", [fallback_item])) + result: ProperType = TupleType( + items, self.chk.named_generic_type("builtins.tuple", [fallback_item]) + ) + if seen_unpack_in_items: + # Return already normalized tuple type just in case. + result = expand_type(result, {}) + return result def fast_dict_type(self, e: DictExpr) -> Type | None: """ diff --git a/mypy/constraints.py b/mypy/constraints.py index ebd6765e8e82..58d0f4dbed29 100644 --- a/mypy/constraints.py +++ b/mypy/constraints.py @@ -156,7 +156,11 @@ def infer_constraints_for_callable( continue expanded_actual = mapper.expand_actual_type( - actual_arg_type, arg_kinds[actual], callee.arg_names[i], callee.arg_kinds[i] + actual_arg_type, + arg_kinds[actual], + callee.arg_names[i], + callee.arg_kinds[i], + allow_unpack=True, ) if arg_kinds[actual] != ARG_STAR or isinstance( diff --git a/mypy/message_registry.py b/mypy/message_registry.py index d75a1fab1b66..dc46eb503390 100644 --- a/mypy/message_registry.py +++ b/mypy/message_registry.py @@ -84,6 +84,9 @@ def with_additional_msg(self, info: str) -> ErrorMessage: MUST_HAVE_NONE_RETURN_TYPE: Final = ErrorMessage('The return type of "{}" must be None') TUPLE_INDEX_OUT_OF_RANGE: Final = ErrorMessage("Tuple index out of range") AMBIGUOUS_SLICE_OF_VARIADIC_TUPLE: Final = ErrorMessage("Ambiguous slice of a variadic tuple") +TOO_MANY_TARGETS_FOR_VARIADIC_UNPACK: Final = ErrorMessage( + "Too many assignment targets for variadic unpack" +) INVALID_SLICE_INDEX: Final = ErrorMessage("Slice index must be an integer, SupportsIndex or None") CANNOT_INFER_LAMBDA_TYPE: Final = ErrorMessage("Cannot infer type of lambda") CANNOT_ACCESS_INIT: Final = ( diff --git a/mypyc/irbuild/mapper.py b/mypyc/irbuild/mapper.py index 5b77b4b1537b..a3abbb1f84fb 100644 --- a/mypyc/irbuild/mapper.py +++ b/mypyc/irbuild/mapper.py @@ -19,6 +19,7 @@ UnboundType, UninhabitedType, UnionType, + find_unpack_in_list, get_proper_type, ) from mypyc.ir.class_ir import ClassIR @@ -112,8 +113,11 @@ def type_to_rtype(self, typ: Type | None) -> RType: return object_rprimitive elif isinstance(typ, TupleType): # Use our unboxed tuples for raw tuples but fall back to - # being boxed for NamedTuple. - if typ.partial_fallback.type.fullname == "builtins.tuple": + # being boxed for NamedTuple or for variadic tuples. + if ( + typ.partial_fallback.type.fullname == "builtins.tuple" + and find_unpack_in_list(typ.items) is None + ): return RTuple([self.type_to_rtype(t) for t in typ.items]) else: return tuple_rprimitive diff --git a/test-data/unit/check-tuples.test b/test-data/unit/check-tuples.test index 1447321c0c49..76225360a7c1 100644 --- a/test-data/unit/check-tuples.test +++ b/test-data/unit/check-tuples.test @@ -1100,15 +1100,15 @@ reveal_type(b) # N: Revealed type is "Tuple[builtins.int, builtins.int, builtin [case testTupleWithStarExpr2] a = [1] b = (0, *a) -reveal_type(b) # N: Revealed type is "builtins.tuple[builtins.int, ...]" +reveal_type(b) # N: Revealed type is "Tuple[builtins.int, Unpack[builtins.tuple[builtins.int, ...]]]" [builtins fixtures/tuple.pyi] [case testTupleWithStarExpr3] a = [''] b = (0, *a) -reveal_type(b) # N: Revealed type is "builtins.tuple[builtins.object, ...]" +reveal_type(b) # N: Revealed type is "Tuple[builtins.int, Unpack[builtins.tuple[builtins.str, ...]]]" c = (*a, '') -reveal_type(c) # N: Revealed type is "builtins.tuple[builtins.str, ...]" +reveal_type(c) # N: Revealed type is "Tuple[Unpack[builtins.tuple[builtins.str, ...]], builtins.str]" [builtins fixtures/tuple.pyi] [case testTupleWithStarExpr4] @@ -1333,7 +1333,7 @@ reveal_type(subtup if int() else tup2) # N: Revealed type is "builtins.tuple[bu [case testTupleWithUndersizedContext] a = ([1], 'x') if int(): - a = ([], 'x', 1) # E: Incompatible types in assignment (expression has type "Tuple[List[int], str, int]", variable has type "Tuple[List[int], str]") + a = ([], 'x', 1) # E: Incompatible types in assignment (expression has type "Tuple[List[Never], str, int]", variable has type "Tuple[List[int], str]") [builtins fixtures/tuple.pyi] [case testTupleWithOversizedContext] diff --git a/test-data/unit/check-typevar-tuple.test b/test-data/unit/check-typevar-tuple.test index 850b7ef8a524..0212518bdec0 100644 --- a/test-data/unit/check-typevar-tuple.test +++ b/test-data/unit/check-typevar-tuple.test @@ -1527,6 +1527,198 @@ x = c1 x = c2 [builtins fixtures/tuple.pyi] +[case testUnpackingVariadicTuplesTypeVar] +from typing import Tuple +from typing_extensions import TypeVarTuple, Unpack + +Ts = TypeVarTuple("Ts") +def foo(arg: Tuple[int, Unpack[Ts], str]) -> None: + x1, y1, z1 = arg # E: Variadic tuple unpacking requires a star target + reveal_type(x1) # N: Revealed type is "Any" + reveal_type(y1) # N: Revealed type is "Any" + reveal_type(z1) # N: Revealed type is "Any" + x2, *y2, z2 = arg + reveal_type(x2) # N: Revealed type is "builtins.int" + reveal_type(y2) # N: Revealed type is "builtins.list[builtins.object]" + reveal_type(z2) # N: Revealed type is "builtins.str" + x3, *y3 = arg + reveal_type(x3) # N: Revealed type is "builtins.int" + reveal_type(y3) # N: Revealed type is "builtins.list[builtins.object]" + *y4, z4 = arg + reveal_type(y4) # N: Revealed type is "builtins.list[builtins.object]" + reveal_type(z4) # N: Revealed type is "builtins.str" + x5, xx5, *y5, z5, zz5 = arg # E: Too many assignment targets for variadic unpack + reveal_type(x5) # N: Revealed type is "Any" + reveal_type(xx5) # N: Revealed type is "Any" + reveal_type(y5) # N: Revealed type is "builtins.list[Any]" + reveal_type(z5) # N: Revealed type is "Any" + reveal_type(zz5) # N: Revealed type is "Any" +[builtins fixtures/tuple.pyi] + +[case testUnpackingVariadicTuplesHomogeneous] +from typing import Tuple +from typing_extensions import Unpack + +def bar(arg: Tuple[int, Unpack[Tuple[float, ...]], str]) -> None: + x1, y1, z1 = arg # E: Variadic tuple unpacking requires a star target + reveal_type(x1) # N: Revealed type is "Any" + reveal_type(y1) # N: Revealed type is "Any" + reveal_type(z1) # N: Revealed type is "Any" + x2, *y2, z2 = arg + reveal_type(x2) # N: Revealed type is "builtins.int" + reveal_type(y2) # N: Revealed type is "builtins.list[builtins.float]" + reveal_type(z2) # N: Revealed type is "builtins.str" + x3, *y3 = arg + reveal_type(x3) # N: Revealed type is "builtins.int" + reveal_type(y3) # N: Revealed type is "builtins.list[builtins.object]" + *y4, z4 = arg + reveal_type(y4) # N: Revealed type is "builtins.list[builtins.float]" + reveal_type(z4) # N: Revealed type is "builtins.str" + x5, xx5, *y5, z5, zz5 = arg # E: Too many assignment targets for variadic unpack + reveal_type(x5) # N: Revealed type is "Any" + reveal_type(xx5) # N: Revealed type is "Any" + reveal_type(y5) # N: Revealed type is "builtins.list[Any]" + reveal_type(z5) # N: Revealed type is "Any" + reveal_type(zz5) # N: Revealed type is "Any" +[builtins fixtures/tuple.pyi] + +[case testRepackingVariadicTuplesTypeVar] +from typing import Tuple +from typing_extensions import TypeVarTuple, Unpack + +Ts = TypeVarTuple("Ts") +def foo(arg: Tuple[int, Unpack[Ts], str]) -> None: + x1, *y1, z1 = *arg, + reveal_type(x1) # N: Revealed type is "builtins.int" + reveal_type(y1) # N: Revealed type is "builtins.list[builtins.object]" + reveal_type(z1) # N: Revealed type is "builtins.str" + x2, *y2, z2 = 1, *arg, 2 + reveal_type(x2) # N: Revealed type is "builtins.int" + reveal_type(y2) # N: Revealed type is "builtins.list[builtins.object]" + reveal_type(z2) # N: Revealed type is "builtins.int" + x3, *y3 = *arg, 42 + reveal_type(x3) # N: Revealed type is "builtins.int" + reveal_type(y3) # N: Revealed type is "builtins.list[builtins.object]" + *y4, z4 = 42, *arg + reveal_type(y4) # N: Revealed type is "builtins.list[builtins.object]" + reveal_type(z4) # N: Revealed type is "builtins.str" + x5, xx5, *y5, z5, zz5 = 1, *arg, 2 + reveal_type(x5) # N: Revealed type is "builtins.int" + reveal_type(xx5) # N: Revealed type is "builtins.int" + reveal_type(y5) # N: Revealed type is "builtins.list[builtins.object]" + reveal_type(z5) # N: Revealed type is "builtins.str" + reveal_type(zz5) # N: Revealed type is "builtins.int" +[builtins fixtures/tuple.pyi] + +[case testRepackingVariadicTuplesHomogeneous] +from typing import Tuple +from typing_extensions import Unpack + +def foo(arg: Tuple[int, Unpack[Tuple[float, ...]], str]) -> None: + x1, *y1, z1 = *arg, + reveal_type(x1) # N: Revealed type is "builtins.int" + reveal_type(y1) # N: Revealed type is "builtins.list[builtins.float]" + reveal_type(z1) # N: Revealed type is "builtins.str" + x2, *y2, z2 = 1, *arg, 2 + reveal_type(x2) # N: Revealed type is "builtins.int" + reveal_type(y2) # N: Revealed type is "builtins.list[builtins.object]" + reveal_type(z2) # N: Revealed type is "builtins.int" + x3, *y3 = *arg, 42 + reveal_type(x3) # N: Revealed type is "builtins.int" + reveal_type(y3) # N: Revealed type is "builtins.list[builtins.object]" + *y4, z4 = 42, *arg + reveal_type(y4) # N: Revealed type is "builtins.list[builtins.float]" + reveal_type(z4) # N: Revealed type is "builtins.str" + x5, xx5, *y5, z5, zz5 = 1, *arg, 2 + reveal_type(x5) # N: Revealed type is "builtins.int" + reveal_type(xx5) # N: Revealed type is "builtins.int" + reveal_type(y5) # N: Revealed type is "builtins.list[builtins.float]" + reveal_type(z5) # N: Revealed type is "builtins.str" + reveal_type(zz5) # N: Revealed type is "builtins.int" +[builtins fixtures/tuple.pyi] + +[case testPackingVariadicTuplesTypeVar] +from typing import Tuple +from typing_extensions import TypeVarTuple, Unpack + +Ts = TypeVarTuple("Ts") +def foo(arg: Tuple[int, Unpack[Ts], str]) -> None: + x = *arg, + reveal_type(x) # N: Revealed type is "Tuple[builtins.int, Unpack[Ts`-1], builtins.str]" + y = 1, *arg, 2 + reveal_type(y) # N: Revealed type is "Tuple[builtins.int, builtins.int, Unpack[Ts`-1], builtins.str, builtins.int]" + z = (*arg, *arg) + reveal_type(z) # N: Revealed type is "builtins.tuple[builtins.object, ...]" +[builtins fixtures/tuple.pyi] + +[case testPackingVariadicTuplesHomogeneous] +from typing import Tuple +from typing_extensions import Unpack + +a: Tuple[float, ...] +b: Tuple[int, Unpack[Tuple[float, ...]], str] + +x = *a, +reveal_type(x) # N: Revealed type is "builtins.tuple[builtins.float, ...]" +y = 1, *a, 2 +reveal_type(y) # N: Revealed type is "Tuple[builtins.int, Unpack[builtins.tuple[builtins.float, ...]], builtins.int]" +z = (*a, *a) +reveal_type(z) # N: Revealed type is "builtins.tuple[builtins.float, ...]" + +x2 = *b, +reveal_type(x2) # N: Revealed type is "Tuple[builtins.int, Unpack[builtins.tuple[builtins.float, ...]], builtins.str]" +y2 = 1, *b, 2 +reveal_type(y2) # N: Revealed type is "Tuple[builtins.int, builtins.int, Unpack[builtins.tuple[builtins.float, ...]], builtins.str, builtins.int]" +z2 = (*b, *b) +reveal_type(z2) # N: Revealed type is "builtins.tuple[builtins.object, ...]" +[builtins fixtures/tuple.pyi] + +[case testVariadicTupleInListSetExpr] +from typing import Tuple +from typing_extensions import TypeVarTuple, Unpack + +vt: Tuple[int, Unpack[Tuple[float, ...]], int] +reveal_type([1, *vt]) # N: Revealed type is "builtins.list[builtins.float]" +reveal_type({1, *vt}) # N: Revealed type is "builtins.set[builtins.float]" + +Ts = TypeVarTuple("Ts") +def foo(arg: Tuple[int, Unpack[Ts], str]) -> None: + reveal_type([1, *arg]) # N: Revealed type is "builtins.list[builtins.object]" + reveal_type({1, *arg}) # N: Revealed type is "builtins.set[builtins.object]" +[builtins fixtures/isinstancelist.pyi] + +[case testVariadicTupleInTupleContext] +from typing import Tuple, Optional +from typing_extensions import TypeVarTuple, Unpack + +Ts = TypeVarTuple("Ts") +def test(x: Optional[Tuple[Unpack[Ts]]] = None) -> Tuple[Unpack[Ts]]: ... + +vt: Tuple[int, Unpack[Tuple[float, ...]], int] +vt = 1, *test(), 2 # OK, type context is used +vt2 = 1, *test(), 2 # E: Need type annotation for "vt2" +[builtins fixtures/tuple.pyi] + +[case testVariadicTupleConcatenation] +from typing import Tuple +from typing_extensions import TypeVarTuple, Unpack + +vtf: Tuple[float, ...] +vt: Tuple[int, Unpack[Tuple[float, ...]], int] + +reveal_type(vt + (1, 2)) # N: Revealed type is "Tuple[builtins.int, Unpack[builtins.tuple[builtins.float, ...]], builtins.int, Literal[1]?, Literal[2]?]" +reveal_type((1, 2) + vt) # N: Revealed type is "Tuple[Literal[1]?, Literal[2]?, builtins.int, Unpack[builtins.tuple[builtins.float, ...]], builtins.int]" +reveal_type(vt + vt) # N: Revealed type is "builtins.tuple[builtins.float, ...]" +reveal_type(vtf + (1, 2)) # N: Revealed type is "Tuple[Unpack[builtins.tuple[builtins.float, ...]], Literal[1]?, Literal[2]?]" +reveal_type((1, 2) + vtf) # N: Revealed type is "Tuple[Literal[1]?, Literal[2]?, Unpack[builtins.tuple[builtins.float, ...]]]" + +Ts = TypeVarTuple("Ts") +def foo(arg: Tuple[int, Unpack[Ts], str]) -> None: + reveal_type(arg + (1, 2)) # N: Revealed type is "Tuple[builtins.int, Unpack[Ts`-1], builtins.str, Literal[1]?, Literal[2]?]" + reveal_type((1, 2) + arg) # N: Revealed type is "Tuple[Literal[1]?, Literal[2]?, builtins.int, Unpack[Ts`-1], builtins.str]" + reveal_type(arg + arg) # N: Revealed type is "builtins.tuple[builtins.object, ...]" +[builtins fixtures/tuple.pyi] + [case testTypeVarTupleAnyOverload] from typing import Any, Generic, overload, Tuple from typing_extensions import TypeVarTuple, Unpack From 3c7bdb22407dea87039e9fd3c551df157794c9f0 Mon Sep 17 00:00:00 2001 From: Nikita Sobolev Date: Sun, 8 Oct 2023 21:36:58 +0300 Subject: [PATCH 086/144] Use SPDX license identifier (#16230) It does not change the license itself, only its identifier in `setup.py`, so external tools can read it better. Full list: https://spdx.org/licenses/ Closes https://github.com/python/mypy/issues/16228 --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 7e7793a406d0..5dba26fb10e0 100644 --- a/setup.py +++ b/setup.py @@ -202,7 +202,7 @@ def run(self): author="Jukka Lehtosalo", author_email="jukka.lehtosalo@iki.fi", url="https://www.mypy-lang.org/", - license="MIT License", + license="MIT", py_modules=[], ext_modules=ext_modules, packages=find_packages(), From ff7ac75387d3b5c7d0eaa4573bf2a0723bf3a3fc Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Sun, 8 Oct 2023 11:39:01 -0700 Subject: [PATCH 087/144] Add an extra for mypyc dependencies (#16229) Fixes #15579 --- mypyc/doc/getting_started.rst | 6 +++--- setup.py | 1 + 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/mypyc/doc/getting_started.rst b/mypyc/doc/getting_started.rst index 2db8aae149ec..adc617419ffa 100644 --- a/mypyc/doc/getting_started.rst +++ b/mypyc/doc/getting_started.rst @@ -38,17 +38,17 @@ Installation ------------ Mypyc is shipped as part of the mypy distribution. Install mypy like -this (you need Python 3.5 or later): +this (you need Python 3.8 or later): .. code-block:: - $ python3 -m pip install -U mypy + $ python3 -m pip install -U 'mypy[mypyc]' On some systems you need to use this instead: .. code-block:: - $ python -m pip install -U mypy + $ python -m pip install -U 'mypy[mypyc]' Example program --------------- diff --git a/setup.py b/setup.py index 5dba26fb10e0..dcbdc96b3ccf 100644 --- a/setup.py +++ b/setup.py @@ -227,6 +227,7 @@ def run(self): # Same here. extras_require={ "dmypy": "psutil >= 4.0", + "mypyc": "setuptools >= 50", "python2": "", "reports": "lxml", "install-types": "pip", From e87b62fcda423a9cd6db9076f66459fe47491568 Mon Sep 17 00:00:00 2001 From: KotlinIsland <65446343+KotlinIsland@users.noreply.github.com> Date: Tue, 10 Oct 2023 00:00:29 +1000 Subject: [PATCH 088/144] =?UTF-8?q?(=F0=9F=8E=81)=20drop=20'dev'=20from=20?= =?UTF-8?q?3.12=20in=20the=20CI=20(#16239)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: KotlinIsland --- .github/workflows/test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 3bcd9e059589..afa5d5823ea9 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -64,7 +64,7 @@ jobs: tox_extra_args: "-n 2" test_mypyc: true - name: Test suite with py312-ubuntu, mypyc-compiled - python: '3.12-dev' + python: '3.12' arch: x64 os: ubuntu-latest toxenv: py From 8b6d21373f44959d8aa194723e871e5468ad5c71 Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Mon, 9 Oct 2023 23:33:18 -0700 Subject: [PATCH 089/144] Fix partially defined in the case of missing type maps (#15995) Thanks AlexWaygood for sending me on this adventure. This took me a while for me to debug! When we don't need to warn about unreachable code, we don't end up calling `self.is_noop_for_reachability(s)` (which is meant to tell us whether the code should be warned about or is `raise AssertionError` or `typing.assert_never(never)` or something). https://github.com/python/mypy/blob/6f650cff9ab21f81069e0ae30c92eae94219ea63/mypy/checker.py#L2748 This innocuous check has a side effect that turns out to be important for the partially undefined checks. These checks work by reaching into the type map populated by the checker. But if we never actually ended up analysing the code, we never populate the type map. This therefore changes things to assume that if we couldn't find the expression in the type map, it's probably because it was unreachable. --- mypy/partially_defined.py | 2 +- test-data/unit/check-possibly-undefined.test | 17 +++++++++++++++++ 2 files changed, 18 insertions(+), 1 deletion(-) diff --git a/mypy/partially_defined.py b/mypy/partially_defined.py index 47cbd671f168..b7f577110fa8 100644 --- a/mypy/partially_defined.py +++ b/mypy/partially_defined.py @@ -506,7 +506,7 @@ def visit_break_stmt(self, o: BreakStmt) -> None: self.tracker.skip_branch() def visit_expression_stmt(self, o: ExpressionStmt) -> None: - if isinstance(self.type_map.get(o.expr, None), UninhabitedType): + if isinstance(self.type_map.get(o.expr, None), (UninhabitedType, type(None))): self.tracker.skip_branch() super().visit_expression_stmt(o) diff --git a/test-data/unit/check-possibly-undefined.test b/test-data/unit/check-possibly-undefined.test index ebceef88b537..ae277949c049 100644 --- a/test-data/unit/check-possibly-undefined.test +++ b/test-data/unit/check-possibly-undefined.test @@ -1026,3 +1026,20 @@ class B: else: # Same as above but in a loop. b = a # E: Name "a" may be undefined + +[case testUnreachableCausingMissingTypeMap] +# flags: --enable-error-code possibly-undefined --enable-error-code used-before-def --no-warn-unreachable +# Regression test for https://github.com/python/mypy/issues/15958 +from typing import Union, NoReturn + +def assert_never(__x: NoReturn) -> NoReturn: ... + +def foo(x: Union[int, str]) -> None: + if isinstance(x, str): + f = "foo" + elif isinstance(x, int): + f = "bar" + else: + assert_never(x) + f # OK +[builtins fixtures/tuple.pyi] From 2c1009ed7cde0247c859bbccb852490b8c91bd97 Mon Sep 17 00:00:00 2001 From: Valentin Stanciu <250871+svalentin@users.noreply.github.com> Date: Thu, 12 Oct 2023 11:58:18 +0100 Subject: [PATCH 090/144] show dmypy errors post serving (#16250) After dmypy starts serving, stdout and stderr gets captured. If we have an error, we assume we can send it to the client. However, if we have an error outside of client communication, that error is lost. The easiest way to see this is to run dmypy in daemonize mode, run a check once, then Control-C to send a KeyboardInterrupt. That exception is not printed though it should. After this change you can clearly see it. ``` term1$ python3 -m mypy.dmypy daemon term2$ python3 -m mypy.dmypy check -v test.py [... some output ...] term1$ [Control-C] ^CTraceback (most recent call last): File "/home/svalentin/src/mypy-svalentin/mypy/dmypy_server.py", line 220, in serve with server: File "/home/svalentin/src/mypy-svalentin/mypy/ipc.py", line 232, in __enter__ self.connection, _ = self.sock.accept() File "/usr/lib/python3.8/socket.py", line 292, in accept fd, addr = self._accept() KeyboardInterrupt Traceback (most recent call last): File "/usr/lib/python3.8/runpy.py", line 194, in _run_module_as_main return _run_code(code, main_globals, None, File "/usr/lib/python3.8/runpy.py", line 87, in _run_code exec(code, run_globals) File "/home/svalentin/src/mypy-svalentin/mypy/dmypy/__main__.py", line 6, in console_entry() File "/home/svalentin/src/mypy-svalentin/mypy/dmypy/client.py", line 748, in console_entry main(sys.argv[1:]) File "/home/svalentin/src/mypy-svalentin/mypy/dmypy/client.py", line 275, in main args.action(args) File "/home/svalentin/src/mypy-svalentin/mypy/dmypy/client.py", line 629, in do_daemon Server(options, args.status_file, timeout=args.timeout).serve() File "/home/svalentin/src/mypy-svalentin/mypy/dmypy_server.py", line 220, in serve with server: File "/home/svalentin/src/mypy-svalentin/mypy/ipc.py", line 232, in __enter__ self.connection, _ = self.sock.accept() File "/usr/lib/python3.8/socket.py", line 292, in accept fd, addr = self._accept() KeyboardInterrupt ``` --- mypy/dmypy_server.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/mypy/dmypy_server.py b/mypy/dmypy_server.py index a50ebc5415ba..faa9a23fadfb 100644 --- a/mypy/dmypy_server.py +++ b/mypy/dmypy_server.py @@ -210,6 +210,8 @@ def serve(self) -> None: """Serve requests, synchronously (no thread or fork).""" command = None server = IPCServer(CONNECTION_NAME, self.timeout) + orig_stdout = sys.stdout + orig_stderr = sys.stderr try: with open(self.status_file, "w") as f: json.dump({"pid": os.getpid(), "connection_name": server.connection_name}, f) @@ -252,6 +254,10 @@ def serve(self) -> None: reset_global_state() sys.exit(0) finally: + # Revert stdout/stderr so we can see any errors. + sys.stdout = orig_stdout + sys.stderr = orig_stderr + # If the final command is something other than a clean # stop, remove the status file. (We can't just # simplify the logic and always remove the file, since From 72605dc12a89b9c12a502ebfad494b4b9d9b5160 Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Thu, 12 Oct 2023 21:25:30 +0100 Subject: [PATCH 091/144] Fix crash on ParamSpec unification (#16251) Fixes https://github.com/python/mypy/issues/16245 Fixes https://github.com/python/mypy/issues/16248 Unfortunately I was a bit reckless with parentheses, but in my defense `unify_generic_callable()` is kind of broken for long time, as it can return "solutions" like ```{1: T`1}```. We need a more principled approach there (IIRC there is already an issue about this in the scope of `--new-type-inference`). (The fix is quite trivial so I am not going to wait for review too long to save time, unless there will be some issues in `mypy_primer` etc.) --- mypy/expandtype.py | 10 +++-- mypy/types.py | 10 ----- .../unit/check-parameter-specification.test | 37 +++++++++++++++++++ 3 files changed, 43 insertions(+), 14 deletions(-) diff --git a/mypy/expandtype.py b/mypy/expandtype.py index b233561e19c2..4acb51e22268 100644 --- a/mypy/expandtype.py +++ b/mypy/expandtype.py @@ -241,7 +241,7 @@ def visit_param_spec(self, t: ParamSpecType) -> Type: return repl.copy_modified( flavor=t.flavor, prefix=t.prefix.copy_modified( - arg_types=self.expand_types(t.prefix.arg_types + repl.prefix.arg_types), + arg_types=self.expand_types(t.prefix.arg_types) + repl.prefix.arg_types, arg_kinds=t.prefix.arg_kinds + repl.prefix.arg_kinds, arg_names=t.prefix.arg_names + repl.prefix.arg_names, ), @@ -249,7 +249,7 @@ def visit_param_spec(self, t: ParamSpecType) -> Type: elif isinstance(repl, Parameters): assert t.flavor == ParamSpecFlavor.BARE return Parameters( - self.expand_types(t.prefix.arg_types + repl.arg_types), + self.expand_types(t.prefix.arg_types) + repl.arg_types, t.prefix.arg_kinds + repl.arg_kinds, t.prefix.arg_names + repl.arg_names, variables=[*t.prefix.variables, *repl.variables], @@ -333,12 +333,14 @@ def visit_callable_type(self, t: CallableType) -> CallableType: # the replacement is ignored. if isinstance(repl, Parameters): # We need to expand both the types in the prefix and the ParamSpec itself - t = t.expand_param_spec(repl) return t.copy_modified( - arg_types=self.expand_types(t.arg_types), + arg_types=self.expand_types(t.arg_types[:-2]) + repl.arg_types, + arg_kinds=t.arg_kinds[:-2] + repl.arg_kinds, + arg_names=t.arg_names[:-2] + repl.arg_names, ret_type=t.ret_type.accept(self), type_guard=(t.type_guard.accept(self) if t.type_guard is not None else None), imprecise_arg_kinds=(t.imprecise_arg_kinds or repl.imprecise_arg_kinds), + variables=[*repl.variables, *t.variables], ) elif isinstance(repl, ParamSpecType): # We're substituting one ParamSpec for another; this can mean that the prefix diff --git a/mypy/types.py b/mypy/types.py index 34ea96be25ee..09ba68aae88a 100644 --- a/mypy/types.py +++ b/mypy/types.py @@ -2069,16 +2069,6 @@ def param_spec(self) -> ParamSpecType | None: prefix = Parameters(self.arg_types[:-2], self.arg_kinds[:-2], self.arg_names[:-2]) return arg_type.copy_modified(flavor=ParamSpecFlavor.BARE, prefix=prefix) - def expand_param_spec(self, c: Parameters) -> CallableType: - variables = c.variables - return self.copy_modified( - arg_types=self.arg_types[:-2] + c.arg_types, - arg_kinds=self.arg_kinds[:-2] + c.arg_kinds, - arg_names=self.arg_names[:-2] + c.arg_names, - is_ellipsis_args=c.is_ellipsis_args, - variables=[*variables, *self.variables], - ) - def with_unpacked_kwargs(self) -> NormalizedCallableType: if not self.unpack_kwargs: return cast(NormalizedCallableType, self) diff --git a/test-data/unit/check-parameter-specification.test b/test-data/unit/check-parameter-specification.test index da831d29dd43..bb7859070f00 100644 --- a/test-data/unit/check-parameter-specification.test +++ b/test-data/unit/check-parameter-specification.test @@ -1976,3 +1976,40 @@ g(cb, y=0, x='a') # OK g(cb, y='a', x=0) # E: Argument "y" to "g" has incompatible type "str"; expected "int" \ # E: Argument "x" to "g" has incompatible type "int"; expected "str" [builtins fixtures/paramspec.pyi] + +[case testParamSpecNoCrashOnUnificationAlias] +import mod +[file mod.pyi] +from typing import Callable, Protocol, TypeVar, overload +from typing_extensions import ParamSpec + +P = ParamSpec("P") +R_co = TypeVar("R_co", covariant=True) +Handler = Callable[P, R_co] + +class HandlerDecorator(Protocol): + def __call__(self, handler: Handler[P, R_co]) -> Handler[P, R_co]: ... + +@overload +def event(event_handler: Handler[P, R_co]) -> Handler[P, R_co]: ... +@overload +def event(namespace: str, *args, **kwargs) -> HandlerDecorator: ... +[builtins fixtures/paramspec.pyi] + +[case testParamSpecNoCrashOnUnificationCallable] +import mod +[file mod.pyi] +from typing import Callable, Protocol, TypeVar, overload +from typing_extensions import ParamSpec + +P = ParamSpec("P") +R_co = TypeVar("R_co", covariant=True) + +class HandlerDecorator(Protocol): + def __call__(self, handler: Callable[P, R_co]) -> Callable[P, R_co]: ... + +@overload +def event(event_handler: Callable[P, R_co]) -> Callable[P, R_co]: ... +@overload +def event(namespace: str, *args, **kwargs) -> HandlerDecorator: ... +[builtins fixtures/paramspec.pyi] From fbc48afccdf47de43fba73f2bc0eaf43a3f7b310 Mon Sep 17 00:00:00 2001 From: Alex Waygood Date: Fri, 13 Oct 2023 11:28:41 +0200 Subject: [PATCH 092/144] Fix `coverage` config (#16258) fixes #16255 --- pyproject.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index de32618f1a39..c43253fed982 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -109,13 +109,13 @@ xfail_strict = true [tool.coverage.run] branch = true -source = "mypy" +source = ["mypy"] parallel = true [tool.coverage.report] show_missing = true skip_covered = true -omit = 'mypy/test/*' +omit = ['mypy/test/*'] exclude_lines = [ '\#\s*pragma: no cover', '^\s*raise AssertionError\b', From 2e52e98fd2873775a58616c097e93c96f58fc991 Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Fri, 13 Oct 2023 11:30:54 +0100 Subject: [PATCH 093/144] Fix crash on ParamSpec unification (for real) (#16259) Fixes https://github.com/python/mypy/issues/16257 Parenthesis strike back. I hope this is the last place where I had put them wrong. --- mypy/expandtype.py | 3 +- .../unit/check-parameter-specification.test | 33 +++++++++++++++++++ 2 files changed, 35 insertions(+), 1 deletion(-) diff --git a/mypy/expandtype.py b/mypy/expandtype.py index 4acb51e22268..44716e6da013 100644 --- a/mypy/expandtype.py +++ b/mypy/expandtype.py @@ -348,7 +348,8 @@ def visit_callable_type(self, t: CallableType) -> CallableType: prefix = repl.prefix clean_repl = repl.copy_modified(prefix=Parameters([], [], [])) return t.copy_modified( - arg_types=self.expand_types(t.arg_types[:-2] + prefix.arg_types) + arg_types=self.expand_types(t.arg_types[:-2]) + + prefix.arg_types + [ clean_repl.with_flavor(ParamSpecFlavor.ARGS), clean_repl.with_flavor(ParamSpecFlavor.KWARGS), diff --git a/test-data/unit/check-parameter-specification.test b/test-data/unit/check-parameter-specification.test index bb7859070f00..5b6024da687e 100644 --- a/test-data/unit/check-parameter-specification.test +++ b/test-data/unit/check-parameter-specification.test @@ -2013,3 +2013,36 @@ def event(event_handler: Callable[P, R_co]) -> Callable[P, R_co]: ... @overload def event(namespace: str, *args, **kwargs) -> HandlerDecorator: ... [builtins fixtures/paramspec.pyi] + +[case testParamSpecNoCrashOnUnificationPrefix] +from typing import Any, Callable, TypeVar, overload +from typing_extensions import ParamSpec, Concatenate + +T = TypeVar("T") +U = TypeVar("U") +V = TypeVar("V") +W = TypeVar("W") +P = ParamSpec("P") + +@overload +def call( + func: Callable[Concatenate[T, P], U], + x: T, + *args: Any, + **kwargs: Any, +) -> U: ... +@overload +def call( + func: Callable[Concatenate[T, U, P], V], + x: T, + y: U, + *args: Any, + **kwargs: Any, +) -> V: ... +def call(*args: Any, **kwargs: Any) -> Any: ... + +def test1(x: int) -> str: ... +def test2(x: int, y: int) -> str: ... +reveal_type(call(test1, 1)) # N: Revealed type is "builtins.str" +reveal_type(call(test2, 1, 2)) # N: Revealed type is "builtins.str" +[builtins fixtures/paramspec.pyi] From feb0fa75ca7f3abb1217d94f6ffb55994b9a31c8 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Sun, 15 Oct 2023 00:33:28 -0700 Subject: [PATCH 094/144] Sync typeshed (#16266) --- mypy/typeshed/stdlib/VERSIONS | 1 + mypy/typeshed/stdlib/_ast.pyi | 2 +- mypy/typeshed/stdlib/_ctypes.pyi | 2 + mypy/typeshed/stdlib/_curses.pyi | 3 +- mypy/typeshed/stdlib/_locale.pyi | 100 +++++++++ mypy/typeshed/stdlib/_msi.pyi | 1 + mypy/typeshed/stdlib/_winapi.pyi | 1 + mypy/typeshed/stdlib/argparse.pyi | 6 +- mypy/typeshed/stdlib/asyncio/tasks.pyi | 68 ++++++- .../stdlib/asyncio/windows_events.pyi | 12 +- mypy/typeshed/stdlib/locale.pyi | 191 +++++++++--------- mypy/typeshed/stdlib/mimetypes.pyi | 3 +- mypy/typeshed/stdlib/mmap.pyi | 2 +- mypy/typeshed/stdlib/msilib/text.pyi | 2 +- mypy/typeshed/stdlib/msvcrt.pyi | 6 +- mypy/typeshed/stdlib/os/__init__.pyi | 19 +- mypy/typeshed/stdlib/posix.pyi | 5 +- mypy/typeshed/stdlib/select.pyi | 4 +- mypy/typeshed/stdlib/selectors.pyi | 18 +- mypy/typeshed/stdlib/signal.pyi | 10 +- mypy/typeshed/stdlib/socket.pyi | 4 +- mypy/typeshed/stdlib/ssl.pyi | 6 +- mypy/typeshed/stdlib/subprocess.pyi | 1 + mypy/typeshed/stdlib/winreg.pyi | 2 + mypy/typeshed/stdlib/winsound.pyi | 1 + mypy/typeshed/stdlib/zipfile.pyi | 36 +++- 26 files changed, 363 insertions(+), 143 deletions(-) create mode 100644 mypy/typeshed/stdlib/_locale.pyi diff --git a/mypy/typeshed/stdlib/VERSIONS b/mypy/typeshed/stdlib/VERSIONS index 49433e346765..9d4636a29a1d 100644 --- a/mypy/typeshed/stdlib/VERSIONS +++ b/mypy/typeshed/stdlib/VERSIONS @@ -35,6 +35,7 @@ _dummy_threading: 2.7-3.8 _heapq: 2.7- _imp: 3.0- _json: 2.7- +_locale: 2.7- _markupbase: 2.7- _msi: 2.7- _operator: 3.4- diff --git a/mypy/typeshed/stdlib/_ast.pyi b/mypy/typeshed/stdlib/_ast.pyi index 05e2a08fdc88..402b770c0462 100644 --- a/mypy/typeshed/stdlib/_ast.pyi +++ b/mypy/typeshed/stdlib/_ast.pyi @@ -602,7 +602,7 @@ if sys.version_info >= (3, 12): name: _Identifier class TypeAlias(stmt): - __match_args__ = ("name", "typeparams", "value") + __match_args__ = ("name", "type_params", "value") name: Name type_params: list[type_param] value: expr diff --git a/mypy/typeshed/stdlib/_ctypes.pyi b/mypy/typeshed/stdlib/_ctypes.pyi index b48b1f7d318c..8a891971e9f1 100644 --- a/mypy/typeshed/stdlib/_ctypes.pyi +++ b/mypy/typeshed/stdlib/_ctypes.pyi @@ -44,6 +44,8 @@ if sys.platform == "win32": def FormatError(code: int = ...) -> str: ... def get_last_error() -> int: ... def set_last_error(value: int) -> int: ... + def LoadLibrary(__name: str, __load_flags: int = 0) -> int: ... + def FreeLibrary(__handle: int) -> None: ... class _CDataMeta(type): # By default mypy complains about the following two methods, because strictly speaking cls diff --git a/mypy/typeshed/stdlib/_curses.pyi b/mypy/typeshed/stdlib/_curses.pyi index e2319a5fcc1f..3604f7abedb5 100644 --- a/mypy/typeshed/stdlib/_curses.pyi +++ b/mypy/typeshed/stdlib/_curses.pyi @@ -61,7 +61,8 @@ if sys.platform != "win32": A_DIM: int A_HORIZONTAL: int A_INVIS: int - A_ITALIC: int + if sys.platform != "darwin": + A_ITALIC: int A_LEFT: int A_LOW: int A_NORMAL: int diff --git a/mypy/typeshed/stdlib/_locale.pyi b/mypy/typeshed/stdlib/_locale.pyi new file mode 100644 index 000000000000..2b2fe03e4510 --- /dev/null +++ b/mypy/typeshed/stdlib/_locale.pyi @@ -0,0 +1,100 @@ +import sys +from _typeshed import StrPath +from collections.abc import Iterable, Mapping + +LC_CTYPE: int +LC_COLLATE: int +LC_TIME: int +LC_MONETARY: int +LC_NUMERIC: int +LC_ALL: int +CHAR_MAX: int + +def setlocale(category: int, locale: str | Iterable[str | None] | None = None) -> str: ... +def localeconv() -> Mapping[str, int | str | list[int]]: ... + +if sys.version_info >= (3, 11): + def getencoding() -> str: ... + +def strcoll(__os1: str, __os2: str) -> int: ... +def strxfrm(__string: str) -> str: ... + +# native gettext functions +# https://docs.python.org/3/library/locale.html#access-to-message-catalogs +# https://github.com/python/cpython/blob/f4c03484da59049eb62a9bf7777b963e2267d187/Modules/_localemodule.c#L626 +if sys.platform != "win32": + LC_MESSAGES: int + + ABDAY_1: int + ABDAY_2: int + ABDAY_3: int + ABDAY_4: int + ABDAY_5: int + ABDAY_6: int + ABDAY_7: int + + ABMON_1: int + ABMON_2: int + ABMON_3: int + ABMON_4: int + ABMON_5: int + ABMON_6: int + ABMON_7: int + ABMON_8: int + ABMON_9: int + ABMON_10: int + ABMON_11: int + ABMON_12: int + + DAY_1: int + DAY_2: int + DAY_3: int + DAY_4: int + DAY_5: int + DAY_6: int + DAY_7: int + + ERA: int + ERA_D_T_FMT: int + ERA_D_FMT: int + ERA_T_FMT: int + + MON_1: int + MON_2: int + MON_3: int + MON_4: int + MON_5: int + MON_6: int + MON_7: int + MON_8: int + MON_9: int + MON_10: int + MON_11: int + MON_12: int + + CODESET: int + D_T_FMT: int + D_FMT: int + T_FMT: int + T_FMT_AMPM: int + AM_STR: int + PM_STR: int + + RADIXCHAR: int + THOUSEP: int + YESEXPR: int + NOEXPR: int + CRNCYSTR: int + ALT_DIGITS: int + + def nl_langinfo(__key: int) -> str: ... + + # This is dependent on `libintl.h` which is a part of `gettext` + # system dependency. These functions might be missing. + # But, we always say that they are present. + def gettext(__msg: str) -> str: ... + def dgettext(__domain: str | None, __msg: str) -> str: ... + def dcgettext(__domain: str | None, __msg: str, __category: int) -> str: ... + def textdomain(__domain: str | None) -> str: ... + def bindtextdomain(__domain: str, __dir: StrPath | None) -> str: ... + def bind_textdomain_codeset(__domain: str, __codeset: str | None) -> str | None: ... diff --git a/mypy/typeshed/stdlib/_msi.pyi b/mypy/typeshed/stdlib/_msi.pyi index 2fdbdfd0e9f4..160406a6d8d5 100644 --- a/mypy/typeshed/stdlib/_msi.pyi +++ b/mypy/typeshed/stdlib/_msi.pyi @@ -1,6 +1,7 @@ import sys if sys.platform == "win32": + class MSIError(Exception): ... # Actual typename View, not exposed by the implementation class _View: def Execute(self, params: _Record | None = ...) -> None: ... diff --git a/mypy/typeshed/stdlib/_winapi.pyi b/mypy/typeshed/stdlib/_winapi.pyi index b51d844701ac..e887fb38a7fa 100644 --- a/mypy/typeshed/stdlib/_winapi.pyi +++ b/mypy/typeshed/stdlib/_winapi.pyi @@ -255,3 +255,4 @@ if sys.platform == "win32": if sys.version_info >= (3, 12): def CopyFile2(existing_file_name: str, new_file_name: str, flags: int, progress_routine: int | None = None) -> int: ... + def NeedCurrentDirectoryForExePath(__exe_name: str) -> bool: ... diff --git a/mypy/typeshed/stdlib/argparse.pyi b/mypy/typeshed/stdlib/argparse.pyi index 0004250b17a9..924cc8986114 100644 --- a/mypy/typeshed/stdlib/argparse.pyi +++ b/mypy/typeshed/stdlib/argparse.pyi @@ -342,11 +342,11 @@ if sys.version_info >= (3, 12): option_strings: Sequence[str], dest: str, default: _T | str | None = None, - type: Callable[[str], _T] | FileType | None = sentinel, # noqa: Y011 - choices: Iterable[_T] | None = sentinel, # noqa: Y011 + type: Callable[[str], _T] | FileType | None = sentinel, + choices: Iterable[_T] | None = sentinel, required: bool = False, help: str | None = None, - metavar: str | tuple[str, ...] | None = sentinel, # noqa: Y011 + metavar: str | tuple[str, ...] | None = sentinel, ) -> None: ... elif sys.version_info >= (3, 9): diff --git a/mypy/typeshed/stdlib/asyncio/tasks.pyi b/mypy/typeshed/stdlib/asyncio/tasks.pyi index b6929deb0fae..366ac7fa35e3 100644 --- a/mypy/typeshed/stdlib/asyncio/tasks.pyi +++ b/mypy/typeshed/stdlib/asyncio/tasks.pyi @@ -68,6 +68,7 @@ _T2 = TypeVar("_T2") _T3 = TypeVar("_T3") _T4 = TypeVar("_T4") _T5 = TypeVar("_T5") +_T6 = TypeVar("_T6") _FT = TypeVar("_FT", bound=Future[Any]) _FutureLike: TypeAlias = Future[_T] | Generator[Any, None, _T] | Awaitable[_T] _TaskYieldType: TypeAlias = Future[object] | None @@ -131,6 +132,19 @@ if sys.version_info >= (3, 10): return_exceptions: Literal[False] = False, ) -> Future[tuple[_T1, _T2, _T3, _T4, _T5]]: ... @overload + def gather( # type: ignore[misc] + __coro_or_future1: _FutureLike[_T1], + __coro_or_future2: _FutureLike[_T2], + __coro_or_future3: _FutureLike[_T3], + __coro_or_future4: _FutureLike[_T4], + __coro_or_future5: _FutureLike[_T5], + __coro_or_future6: _FutureLike[_T6], + *, + return_exceptions: Literal[False] = False, + ) -> Future[tuple[_T1, _T2, _T3, _T4, _T5, _T6]]: ... + @overload + def gather(*coros_or_futures: _FutureLike[_T], return_exceptions: Literal[False] = False) -> Future[list[_T]]: ... # type: ignore[misc] + @overload def gather(__coro_or_future1: _FutureLike[_T1], *, return_exceptions: bool) -> Future[tuple[_T1 | BaseException]]: ... # type: ignore[misc] @overload def gather( # type: ignore[misc] @@ -166,7 +180,27 @@ if sys.version_info >= (3, 10): tuple[_T1 | BaseException, _T2 | BaseException, _T3 | BaseException, _T4 | BaseException, _T5 | BaseException] ]: ... @overload - def gather(*coros_or_futures: _FutureLike[Any], return_exceptions: bool = False) -> Future[list[Any]]: ... + def gather( # type: ignore[misc] + __coro_or_future1: _FutureLike[_T1], + __coro_or_future2: _FutureLike[_T2], + __coro_or_future3: _FutureLike[_T3], + __coro_or_future4: _FutureLike[_T4], + __coro_or_future5: _FutureLike[_T5], + __coro_or_future6: _FutureLike[_T6], + *, + return_exceptions: bool, + ) -> Future[ + tuple[ + _T1 | BaseException, + _T2 | BaseException, + _T3 | BaseException, + _T4 | BaseException, + _T5 | BaseException, + _T6 | BaseException, + ] + ]: ... + @overload + def gather(*coros_or_futures: _FutureLike[_T], return_exceptions: bool) -> Future[list[_T | BaseException]]: ... else: @overload @@ -212,6 +246,22 @@ else: return_exceptions: Literal[False] = False, ) -> Future[tuple[_T1, _T2, _T3, _T4, _T5]]: ... @overload + def gather( # type: ignore[misc] + __coro_or_future1: _FutureLike[_T1], + __coro_or_future2: _FutureLike[_T2], + __coro_or_future3: _FutureLike[_T3], + __coro_or_future4: _FutureLike[_T4], + __coro_or_future5: _FutureLike[_T5], + __coro_or_future6: _FutureLike[_T6], + *, + loop: AbstractEventLoop | None = None, + return_exceptions: Literal[False] = False, + ) -> Future[tuple[_T1, _T2, _T3, _T4, _T5, _T6]]: ... + @overload + def gather( # type: ignore[misc] + *coros_or_futures: _FutureLike[_T], loop: AbstractEventLoop | None = None, return_exceptions: Literal[False] = False + ) -> Future[list[_T]]: ... + @overload def gather( # type: ignore[misc] __coro_or_future1: _FutureLike[_T1], *, loop: AbstractEventLoop | None = None, return_exceptions: bool ) -> Future[tuple[_T1 | BaseException]]: ... @@ -249,16 +299,24 @@ else: __coro_or_future3: _FutureLike[_T3], __coro_or_future4: _FutureLike[_T4], __coro_or_future5: _FutureLike[_T5], + __coro_or_future6: _FutureLike[_T6], *, loop: AbstractEventLoop | None = None, return_exceptions: bool, ) -> Future[ - tuple[_T1 | BaseException, _T2 | BaseException, _T3 | BaseException, _T4 | BaseException, _T5 | BaseException] + tuple[ + _T1 | BaseException, + _T2 | BaseException, + _T3 | BaseException, + _T4 | BaseException, + _T5 | BaseException, + _T6 | BaseException, + ] ]: ... @overload - def gather( - *coros_or_futures: _FutureLike[Any], loop: AbstractEventLoop | None = None, return_exceptions: bool = False - ) -> Future[list[Any]]: ... + def gather( # type: ignore[misc] + *coros_or_futures: _FutureLike[_T], loop: AbstractEventLoop | None = None, return_exceptions: bool + ) -> Future[list[_T | BaseException]]: ... def run_coroutine_threadsafe(coro: _FutureLike[_T], loop: AbstractEventLoop) -> concurrent.futures.Future[_T]: ... diff --git a/mypy/typeshed/stdlib/asyncio/windows_events.pyi b/mypy/typeshed/stdlib/asyncio/windows_events.pyi index 2942a25c0ac4..8e643dd4a3f2 100644 --- a/mypy/typeshed/stdlib/asyncio/windows_events.pyi +++ b/mypy/typeshed/stdlib/asyncio/windows_events.pyi @@ -1,6 +1,6 @@ import socket import sys -from _typeshed import Incomplete, WriteableBuffer +from _typeshed import Incomplete, ReadableBuffer, WriteableBuffer from collections.abc import Callable from typing import IO, Any, ClassVar, NoReturn from typing_extensions import Literal @@ -48,6 +48,12 @@ if sys.platform == "win32": def select(self, timeout: int | None = None) -> list[futures.Future[Any]]: ... def recv(self, conn: socket.socket, nbytes: int, flags: int = 0) -> futures.Future[bytes]: ... def recv_into(self, conn: socket.socket, buf: WriteableBuffer, flags: int = 0) -> futures.Future[Any]: ... + def recvfrom( + self, conn: socket.socket, nbytes: int, flags: int = 0 + ) -> futures.Future[tuple[bytes, socket._RetAddress]]: ... + def sendto( + self, conn: socket.socket, buf: ReadableBuffer, flags: int = 0, addr: socket._Address | None = None + ) -> futures.Future[int]: ... def send(self, conn: socket.socket, buf: WriteableBuffer, flags: int = 0) -> futures.Future[Any]: ... def accept(self, listener: socket.socket) -> futures.Future[Any]: ... def connect( @@ -60,6 +66,10 @@ if sys.platform == "win32": async def connect_pipe(self, address: str) -> windows_utils.PipeHandle: ... def wait_for_handle(self, handle: windows_utils.PipeHandle, timeout: int | None = None) -> bool: ... def close(self) -> None: ... + if sys.version_info >= (3, 11): + def recvfrom_into( + self, conn: socket.socket, buf: WriteableBuffer, flags: int = 0 + ) -> futures.Future[tuple[int, socket._RetAddress]]: ... SelectorEventLoop = _WindowsSelectorEventLoop class WindowsSelectorEventLoopPolicy(events.BaseDefaultEventLoopPolicy): diff --git a/mypy/typeshed/stdlib/locale.pyi b/mypy/typeshed/stdlib/locale.pyi index 3753700ea889..2e95c659dbcd 100644 --- a/mypy/typeshed/stdlib/locale.pyi +++ b/mypy/typeshed/stdlib/locale.pyi @@ -1,6 +1,95 @@ import sys -from _typeshed import StrPath -from collections.abc import Callable, Iterable, Mapping +from _locale import ( + CHAR_MAX as CHAR_MAX, + LC_ALL as LC_ALL, + LC_COLLATE as LC_COLLATE, + LC_CTYPE as LC_CTYPE, + LC_MONETARY as LC_MONETARY, + LC_NUMERIC as LC_NUMERIC, + LC_TIME as LC_TIME, + localeconv as localeconv, + setlocale as setlocale, + strcoll as strcoll, + strxfrm as strxfrm, +) + +# This module defines a function "str()", which is why "str" can't be used +# as a type annotation or type alias. +from builtins import str as _str +from collections.abc import Callable +from decimal import Decimal +from typing import Any + +if sys.version_info >= (3, 11): + from _locale import getencoding as getencoding + +# Some parts of the `_locale` module are platform-specific: +if sys.platform != "win32": + from _locale import ( + ABDAY_1 as ABDAY_1, + ABDAY_2 as ABDAY_2, + ABDAY_3 as ABDAY_3, + ABDAY_4 as ABDAY_4, + ABDAY_5 as ABDAY_5, + ABDAY_6 as ABDAY_6, + ABDAY_7 as ABDAY_7, + ABMON_1 as ABMON_1, + ABMON_2 as ABMON_2, + ABMON_3 as ABMON_3, + ABMON_4 as ABMON_4, + ABMON_5 as ABMON_5, + ABMON_6 as ABMON_6, + ABMON_7 as ABMON_7, + ABMON_8 as ABMON_8, + ABMON_9 as ABMON_9, + ABMON_10 as ABMON_10, + ABMON_11 as ABMON_11, + ABMON_12 as ABMON_12, + ALT_DIGITS as ALT_DIGITS, + AM_STR as AM_STR, + CODESET as CODESET, + CRNCYSTR as CRNCYSTR, + D_FMT as D_FMT, + D_T_FMT as D_T_FMT, + DAY_1 as DAY_1, + DAY_2 as DAY_2, + DAY_3 as DAY_3, + DAY_4 as DAY_4, + DAY_5 as DAY_5, + DAY_6 as DAY_6, + DAY_7 as DAY_7, + ERA as ERA, + ERA_D_FMT as ERA_D_FMT, + ERA_D_T_FMT as ERA_D_T_FMT, + ERA_T_FMT as ERA_T_FMT, + LC_MESSAGES as LC_MESSAGES, + MON_1 as MON_1, + MON_2 as MON_2, + MON_3 as MON_3, + MON_4 as MON_4, + MON_5 as MON_5, + MON_6 as MON_6, + MON_7 as MON_7, + MON_8 as MON_8, + MON_9 as MON_9, + MON_10 as MON_10, + MON_11 as MON_11, + MON_12 as MON_12, + NOEXPR as NOEXPR, + PM_STR as PM_STR, + RADIXCHAR as RADIXCHAR, + T_FMT as T_FMT, + T_FMT_AMPM as T_FMT_AMPM, + THOUSEP as THOUSEP, + YESEXPR as YESEXPR, + bind_textdomain_codeset as bind_textdomain_codeset, + bindtextdomain as bindtextdomain, + dcgettext as dcgettext, + dgettext as dgettext, + gettext as gettext, + nl_langinfo as nl_langinfo, + textdomain as textdomain, + ) __all__ = [ "getlocale", @@ -20,7 +109,6 @@ __all__ = [ "normalize", "LC_CTYPE", "LC_COLLATE", - "LC_MESSAGES", "LC_TIME", "LC_MONETARY", "LC_NUMERIC", @@ -34,88 +122,11 @@ if sys.version_info >= (3, 11): if sys.version_info < (3, 12): __all__ += ["format"] -# This module defines a function "str()", which is why "str" can't be used -# as a type annotation or type alias. -from builtins import str as _str -from decimal import Decimal -from typing import Any - -CODESET: int -D_T_FMT: int -D_FMT: int -T_FMT: int -T_FMT_AMPM: int -AM_STR: int -PM_STR: int - -DAY_1: int -DAY_2: int -DAY_3: int -DAY_4: int -DAY_5: int -DAY_6: int -DAY_7: int -ABDAY_1: int -ABDAY_2: int -ABDAY_3: int -ABDAY_4: int -ABDAY_5: int -ABDAY_6: int -ABDAY_7: int - -MON_1: int -MON_2: int -MON_3: int -MON_4: int -MON_5: int -MON_6: int -MON_7: int -MON_8: int -MON_9: int -MON_10: int -MON_11: int -MON_12: int -ABMON_1: int -ABMON_2: int -ABMON_3: int -ABMON_4: int -ABMON_5: int -ABMON_6: int -ABMON_7: int -ABMON_8: int -ABMON_9: int -ABMON_10: int -ABMON_11: int -ABMON_12: int - -RADIXCHAR: int -THOUSEP: int -YESEXPR: int -NOEXPR: int -CRNCYSTR: int - -ERA: int -ERA_D_T_FMT: int -ERA_D_FMT: int -ERA_T_FMT: int - -ALT_DIGITS: int - -LC_CTYPE: int -LC_COLLATE: int -LC_TIME: int -LC_MONETARY: int -LC_MESSAGES: int -LC_NUMERIC: int -LC_ALL: int - -CHAR_MAX: int +if sys.platform != "win32": + __all__ += ["LC_MESSAGES"] class Error(Exception): ... -def setlocale(category: int, locale: _str | Iterable[_str | None] | None = None) -> _str: ... -def localeconv() -> Mapping[_str, int | _str | list[int]]: ... -def nl_langinfo(__key: int) -> _str: ... def getdefaultlocale( envvars: tuple[_str, ...] = ("LC_ALL", "LC_CTYPE", "LANG", "LANGUAGE") ) -> tuple[_str | None, _str | None]: ... @@ -123,8 +134,6 @@ def getlocale(category: int = ...) -> tuple[_str | None, _str | None]: ... def getpreferredencoding(do_setlocale: bool = True) -> _str: ... def normalize(localename: _str) -> _str: ... def resetlocale(category: int = ...) -> None: ... -def strcoll(__os1: _str, __os2: _str) -> int: ... -def strxfrm(__string: _str) -> _str: ... if sys.version_info < (3, 12): def format( @@ -138,20 +147,6 @@ def atof(string: _str, func: Callable[[_str], float] = ...) -> float: ... def atoi(string: _str) -> int: ... def str(val: float) -> _str: ... -# native gettext functions -# https://docs.python.org/3/library/locale.html#access-to-message-catalogs -# https://github.com/python/cpython/blob/f4c03484da59049eb62a9bf7777b963e2267d187/Modules/_localemodule.c#L626 -if sys.platform == "linux" or sys.platform == "darwin": - def gettext(__msg: _str) -> _str: ... - def dgettext(__domain: _str | None, __msg: _str) -> _str: ... - def dcgettext(__domain: _str | None, __msg: _str, __category: int) -> _str: ... - def textdomain(__domain: _str | None) -> _str: ... - def bindtextdomain(__domain: _str, __dir: StrPath | None) -> _str: ... - def bind_textdomain_codeset(__domain: _str, __codeset: _str | None) -> _str | None: ... - -if sys.version_info >= (3, 11): - def getencoding() -> _str: ... - locale_alias: dict[_str, _str] # undocumented locale_encoding_alias: dict[_str, _str] # undocumented windows_locale: dict[int, _str] # undocumented diff --git a/mypy/typeshed/stdlib/mimetypes.pyi b/mypy/typeshed/stdlib/mimetypes.pyi index 128a05fa5752..532cc5e3ce39 100644 --- a/mypy/typeshed/stdlib/mimetypes.pyi +++ b/mypy/typeshed/stdlib/mimetypes.pyi @@ -53,5 +53,4 @@ class MimeTypes: def guess_all_extensions(self, type: str, strict: bool = True) -> list[str]: ... def read(self, filename: str, strict: bool = True) -> None: ... def readfp(self, fp: IO[str], strict: bool = True) -> None: ... - if sys.platform == "win32": - def read_windows_registry(self, strict: bool = True) -> None: ... + def read_windows_registry(self, strict: bool = True) -> None: ... diff --git a/mypy/typeshed/stdlib/mmap.pyi b/mypy/typeshed/stdlib/mmap.pyi index 09319980692f..9a213a8b8cf0 100644 --- a/mypy/typeshed/stdlib/mmap.pyi +++ b/mypy/typeshed/stdlib/mmap.pyi @@ -28,7 +28,7 @@ if sys.platform != "win32": PROT_READ: int PROT_WRITE: int - PAGESIZE: int +PAGESIZE: int class mmap(Iterable[int], Sized): if sys.platform == "win32": diff --git a/mypy/typeshed/stdlib/msilib/text.pyi b/mypy/typeshed/stdlib/msilib/text.pyi index 1353cf8a2392..441c843ca6cf 100644 --- a/mypy/typeshed/stdlib/msilib/text.pyi +++ b/mypy/typeshed/stdlib/msilib/text.pyi @@ -3,5 +3,5 @@ import sys if sys.platform == "win32": ActionText: list[tuple[str, str, str | None]] UIText: list[tuple[str, str | None]] - + dirname: str tables: list[str] diff --git a/mypy/typeshed/stdlib/msvcrt.pyi b/mypy/typeshed/stdlib/msvcrt.pyi index 5849b9b00ca0..768edbc18ab3 100644 --- a/mypy/typeshed/stdlib/msvcrt.pyi +++ b/mypy/typeshed/stdlib/msvcrt.pyi @@ -1,8 +1,9 @@ import sys -from typing_extensions import Literal +from typing_extensions import Final, Literal # This module is only available on Windows if sys.platform == "win32": + CRT_ASSEMBLY_VERSION: Final[str] LK_UNLCK: Literal[0] LK_LOCK: Literal[1] LK_NBLCK: Literal[2] @@ -26,3 +27,6 @@ if sys.platform == "win32": def ungetch(__char: bytes | bytearray) -> None: ... def ungetwch(__unicode_char: str) -> None: ... def heapmin() -> None: ... + def SetErrorMode(__mode: int) -> int: ... + if sys.version_info >= (3, 10): + def GetErrorMode() -> int: ... # undocumented diff --git a/mypy/typeshed/stdlib/os/__init__.pyi b/mypy/typeshed/stdlib/os/__init__.pyi index fa4c55011eba..7fd04218fd7c 100644 --- a/mypy/typeshed/stdlib/os/__init__.pyi +++ b/mypy/typeshed/stdlib/os/__init__.pyi @@ -70,9 +70,20 @@ if sys.platform != "win32": POSIX_FADV_WILLNEED: int POSIX_FADV_DONTNEED: int - SF_NODISKIO: int - SF_MNOWAIT: int - SF_SYNC: int + if sys.platform != "linux" and sys.platform != "darwin": + # In the os-module docs, these are marked as being available + # on "Unix, not Emscripten, not WASI." + # However, in the source code, a comment indicates they're "FreeBSD constants". + # sys.platform could have one of many values on a FreeBSD Python build, + # so the sys-module docs recommend doing `if sys.platform.startswith('freebsd')` + # to detect FreeBSD builds. Unfortunately that would be too dynamic + # for type checkers, however. + SF_NODISKIO: int + SF_MNOWAIT: int + SF_SYNC: int + + if sys.version_info >= (3, 11): + SF_NOCACHE: int if sys.platform == "linux": XATTR_SIZE_MAX: int @@ -282,6 +293,8 @@ if sys.platform != "win32": EX_PROTOCOL: int EX_NOPERM: int EX_CONFIG: int + +if sys.platform != "win32" and sys.platform != "darwin": EX_NOTFOUND: int P_NOWAIT: int diff --git a/mypy/typeshed/stdlib/posix.pyi b/mypy/typeshed/stdlib/posix.pyi index ab6bf2e63be5..81cc93c5aa66 100644 --- a/mypy/typeshed/stdlib/posix.pyi +++ b/mypy/typeshed/stdlib/posix.pyi @@ -14,7 +14,6 @@ if sys.platform != "win32": EX_NOHOST as EX_NOHOST, EX_NOINPUT as EX_NOINPUT, EX_NOPERM as EX_NOPERM, - EX_NOTFOUND as EX_NOTFOUND, EX_NOUSER as EX_NOUSER, EX_OK as EX_OK, EX_OSERR as EX_OSERR, @@ -29,6 +28,7 @@ if sys.platform != "win32": F_TEST as F_TEST, F_TLOCK as F_TLOCK, F_ULOCK as F_ULOCK, + NGROUPS_MAX as NGROUPS_MAX, O_APPEND as O_APPEND, O_ASYNC as O_ASYNC, O_CREAT as O_CREAT, @@ -222,6 +222,9 @@ if sys.platform != "win32": writev as writev, ) + if sys.platform != "darwin": + from os import EX_NOTFOUND as EX_NOTFOUND + if sys.platform == "linux": from os import ( GRND_NONBLOCK as GRND_NONBLOCK, diff --git a/mypy/typeshed/stdlib/select.pyi b/mypy/typeshed/stdlib/select.pyi index c86d20c352e0..5e2828e42c30 100644 --- a/mypy/typeshed/stdlib/select.pyi +++ b/mypy/typeshed/stdlib/select.pyi @@ -15,7 +15,8 @@ if sys.platform != "win32": POLLOUT: int POLLPRI: int POLLRDBAND: int - POLLRDHUP: int + if sys.platform == "linux": + POLLRDHUP: int POLLRDNORM: int POLLWRBAND: int POLLWRNORM: int @@ -136,7 +137,6 @@ if sys.platform == "linux": EPOLLRDNORM: int EPOLLWRBAND: int EPOLLWRNORM: int - EPOLL_RDHUP: int EPOLL_CLOEXEC: int if sys.platform != "linux" and sys.platform != "darwin" and sys.platform != "win32": diff --git a/mypy/typeshed/stdlib/selectors.pyi b/mypy/typeshed/stdlib/selectors.pyi index 90a923f09355..043df9253316 100644 --- a/mypy/typeshed/stdlib/selectors.pyi +++ b/mypy/typeshed/stdlib/selectors.pyi @@ -59,15 +59,21 @@ class DevpollSelector(BaseSelector): def select(self, timeout: float | None = ...) -> list[tuple[SelectorKey, _EventMask]]: ... def get_map(self) -> Mapping[FileDescriptorLike, SelectorKey]: ... -class KqueueSelector(BaseSelector): - def fileno(self) -> int: ... - def register(self, fileobj: FileDescriptorLike, events: _EventMask, data: Any = None) -> SelectorKey: ... - def unregister(self, fileobj: FileDescriptorLike) -> SelectorKey: ... - def select(self, timeout: float | None = None) -> list[tuple[SelectorKey, _EventMask]]: ... - def get_map(self) -> Mapping[FileDescriptorLike, SelectorKey]: ... +if sys.platform != "win32": + class KqueueSelector(BaseSelector): + def fileno(self) -> int: ... + def register(self, fileobj: FileDescriptorLike, events: _EventMask, data: Any = None) -> SelectorKey: ... + def unregister(self, fileobj: FileDescriptorLike) -> SelectorKey: ... + def select(self, timeout: float | None = None) -> list[tuple[SelectorKey, _EventMask]]: ... + def get_map(self) -> Mapping[FileDescriptorLike, SelectorKey]: ... +# Not a real class at runtime, it is just a conditional alias to other real selectors. +# The runtime logic is more fine-grained than a `sys.platform` check; +# not really expressible in the stubs class DefaultSelector(BaseSelector): def register(self, fileobj: FileDescriptorLike, events: _EventMask, data: Any = None) -> SelectorKey: ... def unregister(self, fileobj: FileDescriptorLike) -> SelectorKey: ... def select(self, timeout: float | None = None) -> list[tuple[SelectorKey, _EventMask]]: ... def get_map(self) -> Mapping[FileDescriptorLike, SelectorKey]: ... + if sys.platform != "win32": + def fileno(self) -> int: ... diff --git a/mypy/typeshed/stdlib/signal.pyi b/mypy/typeshed/stdlib/signal.pyi index 72c78f1b69f5..906a6dabe192 100644 --- a/mypy/typeshed/stdlib/signal.pyi +++ b/mypy/typeshed/stdlib/signal.pyi @@ -10,10 +10,8 @@ NSIG: int class Signals(IntEnum): SIGABRT: int - SIGEMT: int SIGFPE: int SIGILL: int - SIGINFO: int SIGINT: int SIGSEGV: int SIGTERM: int @@ -47,6 +45,9 @@ class Signals(IntEnum): SIGWINCH: int SIGXCPU: int SIGXFSZ: int + if sys.platform != "linux": + SIGEMT: int + SIGINFO: int if sys.platform != "darwin": SIGCLD: int SIGPOLL: int @@ -77,10 +78,8 @@ else: def signal(__signalnum: _SIGNUM, __handler: _HANDLER) -> _HANDLER: ... SIGABRT: Signals -SIGEMT: Signals SIGFPE: Signals SIGILL: Signals -SIGINFO: Signals SIGINT: Signals SIGSEGV: Signals SIGTERM: Signals @@ -90,6 +89,9 @@ if sys.platform == "win32": CTRL_C_EVENT: Signals CTRL_BREAK_EVENT: Signals else: + if sys.platform != "linux": + SIGINFO: Signals + SIGEMT: Signals SIGALRM: Signals SIGBUS: Signals SIGCHLD: Signals diff --git a/mypy/typeshed/stdlib/socket.pyi b/mypy/typeshed/stdlib/socket.pyi index da06ce2c2b06..cc0cbe3709af 100644 --- a/mypy/typeshed/stdlib/socket.pyi +++ b/mypy/typeshed/stdlib/socket.pyi @@ -129,7 +129,9 @@ if sys.platform != "darwin" or sys.version_info >= (3, 9): IPV6_RTHDR as IPV6_RTHDR, ) -if sys.platform != "darwin": +if sys.platform == "darwin": + from _socket import PF_SYSTEM as PF_SYSTEM, SYSPROTO_CONTROL as SYSPROTO_CONTROL +else: from _socket import SO_EXCLUSIVEADDRUSE as SO_EXCLUSIVEADDRUSE if sys.version_info >= (3, 10): diff --git a/mypy/typeshed/stdlib/ssl.pyi b/mypy/typeshed/stdlib/ssl.pyi index faf667afb475..d7f256d031ac 100644 --- a/mypy/typeshed/stdlib/ssl.pyi +++ b/mypy/typeshed/stdlib/ssl.pyi @@ -4,7 +4,7 @@ import sys from _typeshed import ReadableBuffer, StrOrBytesPath, WriteableBuffer from collections.abc import Callable, Iterable from typing import Any, NamedTuple, overload -from typing_extensions import Literal, Self, TypeAlias, TypedDict, final +from typing_extensions import Literal, Never, Self, TypeAlias, TypedDict, final _PCTRTT: TypeAlias = tuple[tuple[str, str], ...] _PCTRTTT: TypeAlias = tuple[_PCTRTT, ...] @@ -367,6 +367,10 @@ class SSLSocket(socket.socket): def pending(self) -> int: ... if sys.version_info >= (3, 8): def verify_client_post_handshake(self) -> None: ... + # These methods always raise `NotImplementedError`: + def recvmsg(self, *args: Never, **kwargs: Never) -> Never: ... # type: ignore[override] + def recvmsg_into(self, *args: Never, **kwargs: Never) -> Never: ... # type: ignore[override] + def sendmsg(self, *args: Never, **kwargs: Never) -> Never: ... # type: ignore[override] class TLSVersion(enum.IntEnum): MINIMUM_SUPPORTED: int diff --git a/mypy/typeshed/stdlib/subprocess.pyi b/mypy/typeshed/stdlib/subprocess.pyi index 346e4d5513d8..1013db7ee984 100644 --- a/mypy/typeshed/stdlib/subprocess.pyi +++ b/mypy/typeshed/stdlib/subprocess.pyi @@ -2600,6 +2600,7 @@ if sys.platform == "win32": hStdError: Any | None wShowWindow: int lpAttributeList: Mapping[str, Any] + def copy(self) -> STARTUPINFO: ... from _winapi import ( ABOVE_NORMAL_PRIORITY_CLASS as ABOVE_NORMAL_PRIORITY_CLASS, BELOW_NORMAL_PRIORITY_CLASS as BELOW_NORMAL_PRIORITY_CLASS, diff --git a/mypy/typeshed/stdlib/winreg.pyi b/mypy/typeshed/stdlib/winreg.pyi index 337bd9706050..613b239ff663 100644 --- a/mypy/typeshed/stdlib/winreg.pyi +++ b/mypy/typeshed/stdlib/winreg.pyi @@ -99,3 +99,5 @@ if sys.platform == "win32": def Close(self) -> None: ... def Detach(self) -> int: ... def __hash__(self) -> int: ... + @property + def handle(self) -> int: ... diff --git a/mypy/typeshed/stdlib/winsound.pyi b/mypy/typeshed/stdlib/winsound.pyi index 9b2b57a38986..aa04fdc27a01 100644 --- a/mypy/typeshed/stdlib/winsound.pyi +++ b/mypy/typeshed/stdlib/winsound.pyi @@ -4,6 +4,7 @@ from typing import overload from typing_extensions import Literal if sys.platform == "win32": + SND_APPLICATION: Literal[128] SND_FILENAME: Literal[131072] SND_ALIAS: Literal[65536] SND_LOOP: Literal[8] diff --git a/mypy/typeshed/stdlib/zipfile.pyi b/mypy/typeshed/stdlib/zipfile.pyi index dc07eb3f2a38..b7144f3ab528 100644 --- a/mypy/typeshed/stdlib/zipfile.pyi +++ b/mypy/typeshed/stdlib/zipfile.pyi @@ -2,9 +2,10 @@ import io import sys from _typeshed import SizedBuffer, StrOrBytesPath, StrPath from collections.abc import Callable, Iterable, Iterator +from io import TextIOWrapper from os import PathLike from types import TracebackType -from typing import IO, Any, Protocol, overload +from typing import IO, Protocol, overload from typing_extensions import Literal, Self, TypeAlias __all__ = [ @@ -223,11 +224,18 @@ class ZipInfo: def FileHeader(self, zip64: bool | None = None) -> bytes: ... if sys.version_info >= (3, 8): - if sys.version_info < (3, 9): - class _PathOpenProtocol(Protocol): - def __call__(self, mode: _ReadWriteMode = "r", pwd: bytes | None = ..., *, force_zip64: bool = ...) -> IO[bytes]: ... + class CompleteDirs(ZipFile): + def resolve_dir(self, name: str) -> str: ... + @overload + @classmethod + def make(cls, source: ZipFile) -> CompleteDirs: ... + @overload + @classmethod + def make(cls: type[Self], source: StrPath | IO[bytes]) -> Self: ... class Path: + root: CompleteDirs + def __init__(self, root: ZipFile | StrPath | IO[bytes], at: str = "") -> None: ... @property def name(self) -> str: ... @property @@ -243,19 +251,25 @@ if sys.version_info >= (3, 8): @property def stem(self) -> str: ... - def __init__(self, root: ZipFile | StrPath | IO[bytes], at: str = "") -> None: ... if sys.version_info >= (3, 9): + @overload def open( self, - mode: _ReadWriteBinaryMode = "r", + mode: Literal["r", "w"] = "r", encoding: str | None = None, - *args: Any, + errors: str | None = None, + newline: str | None = None, + line_buffering: bool = ..., + write_through: bool = ..., + *, pwd: bytes | None = None, - **kwargs: Any, - ) -> IO[bytes]: ... + ) -> TextIOWrapper: ... + @overload + def open(self, mode: Literal["rb", "wb"], *, pwd: bytes | None = None) -> IO[bytes]: ... else: - @property - def open(self) -> _PathOpenProtocol: ... + def open( + self, mode: _ReadWriteBinaryMode = "r", pwd: bytes | None = None, *, force_zip64: bool = False + ) -> IO[bytes]: ... if sys.version_info >= (3, 10): def iterdir(self) -> Iterator[Self]: ... From 940fceb59a55513913c3c9c1eaf89a7f5ee774d6 Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Sun, 15 Oct 2023 12:12:44 -0700 Subject: [PATCH 095/144] [mypyc] Fix direct __dict__ access on inner functions in new Python (#16084) Fixes #16077 --- mypyc/codegen/emitclass.py | 5 ++++- mypyc/test-data/run-functions.test | 30 ++++++++++++++++++++++++++++++ 2 files changed, 34 insertions(+), 1 deletion(-) diff --git a/mypyc/codegen/emitclass.py b/mypyc/codegen/emitclass.py index 62e1b4b2dea1..8dcf7212b694 100644 --- a/mypyc/codegen/emitclass.py +++ b/mypyc/codegen/emitclass.py @@ -217,7 +217,7 @@ def generate_class(cl: ClassIR, module: str, emitter: Emitter) -> None: fields["tp_name"] = f'"{name}"' generate_full = not cl.is_trait and not cl.builtin_base - needs_getseters = cl.needs_getseters or not cl.is_generated + needs_getseters = cl.needs_getseters or not cl.is_generated or cl.has_dict if not cl.builtin_base: fields["tp_new"] = new_name @@ -886,6 +886,9 @@ def generate_getseters_table(cl: ClassIR, name: str, emitter: Emitter) -> None: else: emitter.emit_line("NULL, NULL, NULL},") + if cl.has_dict: + emitter.emit_line('{"__dict__", PyObject_GenericGetDict, PyObject_GenericSetDict},') + emitter.emit_line("{NULL} /* Sentinel */") emitter.emit_line("};") diff --git a/mypyc/test-data/run-functions.test b/mypyc/test-data/run-functions.test index 21993891c4e3..bd8f1a9197dd 100644 --- a/mypyc/test-data/run-functions.test +++ b/mypyc/test-data/run-functions.test @@ -1256,3 +1256,33 @@ def foo(**kwargs: Unpack[Person]) -> None: foo(name='Jennifer', age=38) [out] Jennifer + +[case testNestedFunctionDunderDict312] +import sys + +def foo() -> None: + def inner() -> str: return "bar" + print(inner.__dict__) # type: ignore[attr-defined] + inner.__dict__.update({"x": 1}) # type: ignore[attr-defined] + print(inner.__dict__) # type: ignore[attr-defined] + print(inner.x) # type: ignore[attr-defined] + +if sys.version_info >= (3, 12): # type: ignore + foo() +[out] +[out version>=3.12] +{} +{'x': 1} +1 + +[case testFunctoolsUpdateWrapper] +import functools + +def bar() -> None: + def inner() -> str: return "bar" + functools.update_wrapper(inner, bar) # type: ignore + print(inner.__dict__) # type: ignore + +bar() +[out] +{'__module__': 'native', '__name__': 'bar', '__qualname__': 'bar', '__doc__': None, '__wrapped__': } From ff9deb3001d9c7cc84a1e2fed9125bf456b1d68b Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Sun, 15 Oct 2023 21:44:02 +0100 Subject: [PATCH 096/144] Correctly handle runtime type applications of variadic types (#16240) This adds some missing pieces to runtime type application handling for both `TypeVarTuple` and `ParamSpec`. Everything is straightforward (maybe a bit hacky, but we already import `typeanal` in `checkexpr` for similar purposes, e.g. type aliases in runtime context). Fixes https://github.com/python/mypy/issues/14799 --- mypy/checkexpr.py | 34 ++++++++++++++++--- .../unit/check-parameter-specification.test | 13 +++++++ test-data/unit/check-typevar-tuple.test | 20 +++++++++++ 3 files changed, 62 insertions(+), 5 deletions(-) diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index fd155ff87379..a1dd6d830758 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -28,7 +28,7 @@ from mypy.maptype import map_instance_to_supertype from mypy.meet import is_overlapping_types, narrow_declared_type from mypy.message_registry import ErrorMessage -from mypy.messages import MessageBuilder +from mypy.messages import MessageBuilder, format_type from mypy.nodes import ( ARG_NAMED, ARG_POS, @@ -116,10 +116,12 @@ from mypy.type_visitor import TypeTranslator from mypy.typeanal import ( check_for_explicit_any, + fix_instance, has_any_from_unimported_type, instantiate_type_alias, make_optional_type, set_any_tvars, + validate_instance, ) from mypy.typeops import ( callable_type, @@ -166,10 +168,12 @@ TypeVarLikeType, TypeVarTupleType, TypeVarType, + UnboundType, UninhabitedType, UnionType, UnpackType, find_unpack_in_list, + flatten_nested_tuples, flatten_nested_unions, get_proper_type, get_proper_types, @@ -4637,15 +4641,35 @@ class C(Generic[T, Unpack[Ts]]): ... similar to how it is done in other places using split_with_prefix_and_suffix(). """ vars = t.variables + args = flatten_nested_tuples(args) + + # TODO: this logic is duplicated with semanal_typeargs. + for tv, arg in zip(t.variables, args): + if isinstance(tv, ParamSpecType): + if not isinstance( + get_proper_type(arg), (Parameters, ParamSpecType, AnyType, UnboundType) + ): + self.chk.fail( + "Can only replace ParamSpec with a parameter types list or" + f" another ParamSpec, got {format_type(arg, self.chk.options)}", + ctx, + ) + return [AnyType(TypeOfAny.from_error)] * len(vars) + if not vars or not any(isinstance(v, TypeVarTupleType) for v in vars): return list(args) + assert t.is_type_obj() + info = t.type_object() + # We reuse the logic from semanal phase to reduce code duplication. + fake = Instance(info, args, line=ctx.line, column=ctx.column) + if not validate_instance(fake, self.chk.fail): + fix_instance( + fake, self.chk.fail, self.chk.note, disallow_any=False, options=self.chk.options + ) + args = list(fake.args) prefix = next(i for (i, v) in enumerate(vars) if isinstance(v, TypeVarTupleType)) suffix = len(vars) - prefix - 1 - if len(args) < len(vars) - 1: - self.msg.incompatible_type_application(len(vars), len(args), ctx) - return [AnyType(TypeOfAny.from_error)] * len(vars) - tvt = vars[prefix] assert isinstance(tvt, TypeVarTupleType) start, middle, end = split_with_prefix_and_suffix(tuple(args), prefix, suffix) diff --git a/test-data/unit/check-parameter-specification.test b/test-data/unit/check-parameter-specification.test index 5b6024da687e..48fadbc96c90 100644 --- a/test-data/unit/check-parameter-specification.test +++ b/test-data/unit/check-parameter-specification.test @@ -1977,6 +1977,19 @@ g(cb, y='a', x=0) # E: Argument "y" to "g" has incompatible type "str"; expecte # E: Argument "x" to "g" has incompatible type "int"; expected "str" [builtins fixtures/paramspec.pyi] +[case testParamSpecBadRuntimeTypeApplication] +from typing import ParamSpec, TypeVar, Generic, Callable + +R = TypeVar("R") +P = ParamSpec("P") +class C(Generic[P, R]): + x: Callable[P, R] + +bad = C[int, str]() # E: Can only replace ParamSpec with a parameter types list or another ParamSpec, got "int" +reveal_type(bad) # N: Revealed type is "__main__.C[Any, Any]" +reveal_type(bad.x) # N: Revealed type is "def (*Any, **Any) -> Any" +[builtins fixtures/paramspec.pyi] + [case testParamSpecNoCrashOnUnificationAlias] import mod [file mod.pyi] diff --git a/test-data/unit/check-typevar-tuple.test b/test-data/unit/check-typevar-tuple.test index 0212518bdec0..22a30432d098 100644 --- a/test-data/unit/check-typevar-tuple.test +++ b/test-data/unit/check-typevar-tuple.test @@ -1845,3 +1845,23 @@ def foo2(func: Callable[[Unpack[Args]], T], *args: Unpack[Args2]) -> T: def foo3(func: Callable[[int, Unpack[Args2]], T], *args: Unpack[Args2]) -> T: return submit(func, 1, *args) [builtins fixtures/tuple.pyi] + +[case testTypeVarTupleRuntimeTypeApplication] +from typing import Generic, TypeVar, Tuple +from typing_extensions import Unpack, TypeVarTuple + +T = TypeVar("T") +S = TypeVar("S") +Ts = TypeVarTuple("Ts") +class C(Generic[T, Unpack[Ts], S]): ... + +Ints = Tuple[int, int] +x = C[Unpack[Ints]]() +reveal_type(x) # N: Revealed type is "__main__.C[builtins.int, builtins.int]" + +y = C[Unpack[Tuple[int, ...]]]() +reveal_type(y) # N: Revealed type is "__main__.C[builtins.int, Unpack[builtins.tuple[builtins.int, ...]], builtins.int]" + +z = C[int]() # E: Bad number of arguments, expected: at least 2, given: 1 +reveal_type(z) # N: Revealed type is "__main__.C[Any, Unpack[builtins.tuple[Any, ...]], Any]" +[builtins fixtures/tuple.pyi] From e4355948d797600c7b76da0a916fc5f29d10448e Mon Sep 17 00:00:00 2001 From: Chad Dombrova Date: Sun, 15 Oct 2023 15:35:20 -0700 Subject: [PATCH 097/144] stubgen: unify C extension and pure python stub generators with object oriented design (#15770) This MR is a major overhaul to `stubgen`. It has been tested extensively in the process of creating stubs for multiple large and varied libraries (detailed below). ## User story The impetus of this change is as follows: as a maintainer of third-party stubs I do _not_ want to use `stubgen` as a starting point for hand-editing stub files, I want a framework to regenerate stubs against upstream changes to a library. ## Summary of Changes - Introduces an object-oriented design for C extension stub generation, including a common base class that is shared between inspection-based and parsing-based stub generation. - Generally unifies and harmonizes the behavior between inspection and parsing approaches. For example, function formatting, import tracking, signature generators, and attribute filtering are now handled with the same code. - Adds support for `--include-private` and `--export-less` to c-extensions (inspection-based generation). - Adds support for force enabling inspection-based stub generation (the approach used for C extensions) on pure python code using a new `--inspect-mode` flag. Useful for packages that employ dynamic function or class factories. Also makes it possible to generate stubs for pyc-only modules (yes, this is a real use case) - Adds an alias `--no-analysis` for `--parse-only` to clarify the purpose of this option. - Removes filtering of `__version__` attribute from modules: I've encountered a number of cases in real-world code that utilize this attribute. - Adds a number of tests for inspection mode. Even though these run on pure python code they increase coverage of the C extension code since it shares much of hte same code base. Below I've compiled some basic information about each stub library that I've created using my changes, and a link to the specialized code for procedurally generating the stubs. | Library | code type | other notes | | --- | --- | --- | | [USD](https://github.com/LumaPictures/cg-stubs/blob/master/usd/stubgen_usd.py) | boost-python | integrates types from doxygen | | [katana](https://github.com/LumaPictures/cg-stubs/blob/master/katana/stubgen_katana.py) | pyc and C extensions | uses epydoc docstrings. has pyi-only packages | | [mari](https://github.com/LumaPictures/cg-stubs/blob/master/mari/stubgen_mari.py) | pure python and C extensions | uses epydoc docstrings | | [opencolorio](https://github.com/LumaPictures/cg-stubs/blob/master/ocio/stubgen_ocio.py) | pybind11 | | | [pyside2](https://github.com/LumaPictures/cg-stubs/blob/master/pyside/stubgen_pyside.py) | shiboken | | | substance_painter | pure python | basic / non-custom. reads types from annotations | | pymel | pure python | integrates types parsed from custom docs | I know that this is a pretty big PR, and I know it's a lot to go through, but I've spent a huge amount of time on it and I believe this makes mypy's stubgen tool the absolute best available. If it helps, I also have 13 merged mypy PRs under my belt and I'll be around to fix any issues if they come up. --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Jelle Zijlstra --- docs/source/stubgen.rst | 14 +- mypy/moduleinspect.py | 4 + mypy/stubdoc.py | 100 +- mypy/stubgen.py | 814 ++++------- mypy/stubgenc.py | 1267 ++++++++++------- mypy/stubutil.py | 604 +++++++- mypy/test/teststubgen.py | 455 ++---- mypy/traverser.py | 3 +- setup.py | 1 - .../stubgen/pybind11_mypy_demo/__init__.pyi | 1 + .../stubgen/pybind11_mypy_demo/basics.pyi | 8 +- test-data/unit/stubgen.test | 296 +++- 12 files changed, 2125 insertions(+), 1442 deletions(-) diff --git a/docs/source/stubgen.rst b/docs/source/stubgen.rst index 2de0743572e7..c9e52956379a 100644 --- a/docs/source/stubgen.rst +++ b/docs/source/stubgen.rst @@ -127,12 +127,22 @@ alter the default behavior: unwanted side effects, such as the running of tests. Stubgen tries to skip test modules even without this option, but this does not always work. -.. option:: --parse-only +.. option:: --no-analysis Don't perform semantic analysis of source files. This may generate worse stubs -- in particular, some module, class, and function aliases may be represented as variables with the ``Any`` type. This is generally only - useful if semantic analysis causes a critical mypy error. + useful if semantic analysis causes a critical mypy error. Does not apply to + C extension modules. Incompatible with :option:`--inspect-mode`. + +.. option:: --inspect-mode + + Import and inspect modules instead of parsing source code. This is the default + behavior for C modules and pyc-only packages. The flag is useful to force + inspection for pure Python modules that make use of dynamically generated + members that would otherwise be omitted when using the default behavior of + code parsing. Implies :option:`--no-analysis` as analysis requires source + code. .. option:: --doc-dir PATH diff --git a/mypy/moduleinspect.py b/mypy/moduleinspect.py index b383fc9dc145..580b31fb4107 100644 --- a/mypy/moduleinspect.py +++ b/mypy/moduleinspect.py @@ -39,6 +39,10 @@ def is_c_module(module: ModuleType) -> bool: return os.path.splitext(module.__dict__["__file__"])[-1] in [".so", ".pyd", ".dll"] +def is_pyc_only(file: str | None) -> bool: + return bool(file and file.endswith(".pyc") and not os.path.exists(file[:-1])) + + class InspectError(Exception): pass diff --git a/mypy/stubdoc.py b/mypy/stubdoc.py index 145f57fd7751..c277573f0b59 100644 --- a/mypy/stubdoc.py +++ b/mypy/stubdoc.py @@ -8,11 +8,14 @@ import contextlib import io +import keyword import re import tokenize from typing import Any, Final, MutableMapping, MutableSequence, NamedTuple, Sequence, Tuple from typing_extensions import TypeAlias as _TypeAlias +import mypy.util + # Type alias for signatures strings in format ('func_name', '(arg, opt_arg=False)'). Sig: _TypeAlias = Tuple[str, str] @@ -35,12 +38,16 @@ class ArgSig: def __init__(self, name: str, type: str | None = None, default: bool = False): self.name = name - if type and not is_valid_type(type): - raise ValueError("Invalid type: " + type) self.type = type # Does this argument have a default value? self.default = default + def is_star_arg(self) -> bool: + return self.name.startswith("*") and not self.name.startswith("**") + + def is_star_kwarg(self) -> bool: + return self.name.startswith("**") + def __repr__(self) -> str: return "ArgSig(name={}, type={}, default={})".format( repr(self.name), repr(self.type), repr(self.default) @@ -59,7 +66,80 @@ def __eq__(self, other: Any) -> bool: class FunctionSig(NamedTuple): name: str args: list[ArgSig] - ret_type: str + ret_type: str | None + + def is_special_method(self) -> bool: + return bool( + self.name.startswith("__") + and self.name.endswith("__") + and self.args + and self.args[0].name in ("self", "cls") + ) + + def has_catchall_args(self) -> bool: + """Return if this signature has catchall args: (*args, **kwargs)""" + if self.args and self.args[0].name in ("self", "cls"): + args = self.args[1:] + else: + args = self.args + return ( + len(args) == 2 + and all(a.type in (None, "object", "Any", "typing.Any") for a in args) + and args[0].is_star_arg() + and args[1].is_star_kwarg() + ) + + def is_catchall_signature(self) -> bool: + """Return if this signature is the catchall identity: (*args, **kwargs) -> Any""" + return self.has_catchall_args() and self.ret_type in (None, "Any", "typing.Any") + + def format_sig( + self, + indent: str = "", + is_async: bool = False, + any_val: str | None = None, + docstring: str | None = None, + ) -> str: + args: list[str] = [] + for arg in self.args: + arg_def = arg.name + + if arg_def in keyword.kwlist: + arg_def = "_" + arg_def + + if ( + arg.type is None + and any_val is not None + and arg.name not in ("self", "cls") + and not arg.name.startswith("*") + ): + arg_type: str | None = any_val + else: + arg_type = arg.type + if arg_type: + arg_def += ": " + arg_type + if arg.default: + arg_def += " = ..." + + elif arg.default: + arg_def += "=..." + + args.append(arg_def) + + retfield = "" + ret_type = self.ret_type if self.ret_type else any_val + if ret_type is not None: + retfield = " -> " + ret_type + + prefix = "async " if is_async else "" + sig = "{indent}{prefix}def {name}({args}){ret}:".format( + indent=indent, prefix=prefix, name=self.name, args=", ".join(args), ret=retfield + ) + if docstring: + suffix = f"\n{indent} {mypy.util.quote_docstring(docstring)}" + else: + suffix = " ..." + return f"{sig}{suffix}" # States of the docstring parser. @@ -176,17 +256,17 @@ def add_token(self, token: tokenize.TokenInfo) -> None: # arg_name is empty when there are no args. e.g. func() if self.arg_name: - try: + if self.arg_type and not is_valid_type(self.arg_type): + # wrong type, use Any + self.args.append( + ArgSig(name=self.arg_name, type=None, default=bool(self.arg_default)) + ) + else: self.args.append( ArgSig( name=self.arg_name, type=self.arg_type, default=bool(self.arg_default) ) ) - except ValueError: - # wrong type, use Any - self.args.append( - ArgSig(name=self.arg_name, type=None, default=bool(self.arg_default)) - ) self.arg_name = "" self.arg_type = None self.arg_default = None @@ -240,7 +320,7 @@ def args_kwargs(signature: FunctionSig) -> bool: def infer_sig_from_docstring(docstr: str | None, name: str) -> list[FunctionSig] | None: - """Convert function signature to list of TypedFunctionSig + """Convert function signature to list of FunctionSig Look for function signatures of function in docstring. Signature is a string of the format () -> or perhaps without diff --git a/mypy/stubgen.py b/mypy/stubgen.py index e8c12ee4d99b..395a49fa4e08 100755 --- a/mypy/stubgen.py +++ b/mypy/stubgen.py @@ -7,7 +7,7 @@ - or use mypy's mechanisms, if importing is prohibited * (optionally) semantically analysing the sources using mypy (as a single set) * emitting the stubs text: - - for Python modules: from ASTs using StubGenerator + - for Python modules: from ASTs using ASTStubGenerator - for C modules using runtime introspection and (optionally) Sphinx docs During first and third steps some problematic files can be skipped, but any @@ -42,14 +42,12 @@ from __future__ import annotations import argparse -import glob import keyword import os import os.path import sys import traceback -from collections import defaultdict -from typing import Final, Iterable, Mapping +from typing import Final, Iterable import mypy.build import mypy.mixedtraverser @@ -66,7 +64,7 @@ SearchPaths, default_lib_path, ) -from mypy.moduleinspect import ModuleInspect +from mypy.moduleinspect import ModuleInspect, is_pyc_only from mypy.nodes import ( ARG_NAMED, ARG_POS, @@ -85,6 +83,7 @@ DictExpr, EllipsisExpr, Expression, + ExpressionStmt, FloatExpr, FuncBase, FuncDef, @@ -109,20 +108,19 @@ Var, ) from mypy.options import Options as MypyOptions -from mypy.stubdoc import Sig, find_unique_signatures, parse_all_signatures -from mypy.stubgenc import ( - DocstringSignatureGenerator, - ExternalSignatureGenerator, - FallbackSignatureGenerator, - SignatureGenerator, - generate_stub_for_c_module, -) +from mypy.stubdoc import ArgSig, FunctionSig +from mypy.stubgenc import InspectionStubGenerator, generate_stub_for_c_module from mypy.stubutil import ( + BaseStubGenerator, CantImport, + ClassInfo, + FunctionContext, common_dir_prefix, fail_missing, find_module_path_and_all_py3, generate_guarded, + infer_method_arg_types, + infer_method_ret_type, remove_misplaced_type_comments, report_missing, walk_packages, @@ -140,19 +138,13 @@ AnyType, CallableType, Instance, - NoneType, TupleType, Type, - TypeList, - TypeStrVisitor, UnboundType, - UnionType, get_proper_type, ) from mypy.visitor import NodeVisitor -TYPING_MODULE_NAMES: Final = ("typing", "typing_extensions") - # Common ways of naming package containing vendored modules. VENDOR_PACKAGES: Final = ["packages", "vendor", "vendored", "_vendor", "_vendored_packages"] @@ -165,32 +157,6 @@ "/_vendored_packages/", ] -# Special-cased names that are implicitly exported from the stub (from m import y as y). -EXTRA_EXPORTED: Final = { - "pyasn1_modules.rfc2437.univ", - "pyasn1_modules.rfc2459.char", - "pyasn1_modules.rfc2459.univ", -} - -# These names should be omitted from generated stubs. -IGNORED_DUNDERS: Final = { - "__all__", - "__author__", - "__version__", - "__about__", - "__copyright__", - "__email__", - "__license__", - "__summary__", - "__title__", - "__uri__", - "__str__", - "__repr__", - "__getstate__", - "__setstate__", - "__slots__", -} - # These methods are expected to always return a non-trivial value. METHODS_WITH_RETURN_VALUE: Final = { "__ne__", @@ -203,22 +169,6 @@ "__iter__", } -# These magic methods always return the same type. -KNOWN_MAGIC_METHODS_RETURN_TYPES: Final = { - "__len__": "int", - "__length_hint__": "int", - "__init__": "None", - "__del__": "None", - "__bool__": "bool", - "__bytes__": "bytes", - "__format__": "str", - "__contains__": "bool", - "__complex__": "complex", - "__int__": "int", - "__float__": "float", - "__index__": "int", -} - class Options: """Represents stubgen options. @@ -230,6 +180,7 @@ def __init__( self, pyversion: tuple[int, int], no_import: bool, + inspect: bool, doc_dir: str, search_path: list[str], interpreter: str, @@ -248,6 +199,7 @@ def __init__( # See parse_options for descriptions of the flags. self.pyversion = pyversion self.no_import = no_import + self.inspect = inspect self.doc_dir = doc_dir self.search_path = search_path self.interpreter = interpreter @@ -279,6 +231,9 @@ def __init__( self.runtime_all = runtime_all self.ast: MypyFile | None = None + def __repr__(self) -> str: + return f"StubSource({self.source})" + @property def module(self) -> str: return self.source.module @@ -303,71 +258,13 @@ def path(self) -> str | None: ERROR_MARKER: Final = "" -class AnnotationPrinter(TypeStrVisitor): - """Visitor used to print existing annotations in a file. - - The main difference from TypeStrVisitor is a better treatment of - unbound types. - - Notes: - * This visitor doesn't add imports necessary for annotations, this is done separately - by ImportTracker. - * It can print all kinds of types, but the generated strings may not be valid (notably - callable types) since it prints the same string that reveal_type() does. - * For Instance types it prints the fully qualified names. - """ - - # TODO: Generate valid string representation for callable types. - # TODO: Use short names for Instances. - def __init__(self, stubgen: StubGenerator) -> None: - super().__init__(options=mypy.options.Options()) - self.stubgen = stubgen - - def visit_any(self, t: AnyType) -> str: - s = super().visit_any(t) - self.stubgen.import_tracker.require_name(s) - return s - - def visit_unbound_type(self, t: UnboundType) -> str: - s = t.name - self.stubgen.import_tracker.require_name(s) - if t.args: - s += f"[{self.args_str(t.args)}]" - return s - - def visit_none_type(self, t: NoneType) -> str: - return "None" - - def visit_type_list(self, t: TypeList) -> str: - return f"[{self.list_str(t.items)}]" - - def visit_union_type(self, t: UnionType) -> str: - return " | ".join([item.accept(self) for item in t.items]) - - def args_str(self, args: Iterable[Type]) -> str: - """Convert an array of arguments to strings and join the results with commas. - - The main difference from list_str is the preservation of quotes for string - arguments - """ - types = ["builtins.bytes", "builtins.str"] - res = [] - for arg in args: - arg_str = arg.accept(self) - if isinstance(arg, UnboundType) and arg.original_str_fallback in types: - res.append(f"'{arg_str}'") - else: - res.append(arg_str) - return ", ".join(res) - - class AliasPrinter(NodeVisitor[str]): """Visitor used to collect type aliases _and_ type variable definitions. Visit r.h.s of the definition to get the string representation of type alias. """ - def __init__(self, stubgen: StubGenerator) -> None: + def __init__(self, stubgen: ASTStubGenerator) -> None: self.stubgen = stubgen super().__init__() @@ -435,124 +332,6 @@ def visit_op_expr(self, o: OpExpr) -> str: return f"{o.left.accept(self)} {o.op} {o.right.accept(self)}" -class ImportTracker: - """Record necessary imports during stub generation.""" - - def __init__(self) -> None: - # module_for['foo'] has the module name where 'foo' was imported from, or None if - # 'foo' is a module imported directly; examples - # 'from pkg.m import f as foo' ==> module_for['foo'] == 'pkg.m' - # 'from m import f' ==> module_for['f'] == 'm' - # 'import m' ==> module_for['m'] == None - # 'import pkg.m' ==> module_for['pkg.m'] == None - # ==> module_for['pkg'] == None - self.module_for: dict[str, str | None] = {} - - # direct_imports['foo'] is the module path used when the name 'foo' was added to the - # namespace. - # import foo.bar.baz ==> direct_imports['foo'] == 'foo.bar.baz' - # ==> direct_imports['foo.bar'] == 'foo.bar.baz' - # ==> direct_imports['foo.bar.baz'] == 'foo.bar.baz' - self.direct_imports: dict[str, str] = {} - - # reverse_alias['foo'] is the name that 'foo' had originally when imported with an - # alias; examples - # 'import numpy as np' ==> reverse_alias['np'] == 'numpy' - # 'import foo.bar as bar' ==> reverse_alias['bar'] == 'foo.bar' - # 'from decimal import Decimal as D' ==> reverse_alias['D'] == 'Decimal' - self.reverse_alias: dict[str, str] = {} - - # required_names is the set of names that are actually used in a type annotation - self.required_names: set[str] = set() - - # Names that should be reexported if they come from another module - self.reexports: set[str] = set() - - def add_import_from(self, module: str, names: list[tuple[str, str | None]]) -> None: - for name, alias in names: - if alias: - # 'from {module} import {name} as {alias}' - self.module_for[alias] = module - self.reverse_alias[alias] = name - else: - # 'from {module} import {name}' - self.module_for[name] = module - self.reverse_alias.pop(name, None) - self.direct_imports.pop(alias or name, None) - - def add_import(self, module: str, alias: str | None = None) -> None: - if alias: - # 'import {module} as {alias}' - self.module_for[alias] = None - self.reverse_alias[alias] = module - else: - # 'import {module}' - name = module - # add module and its parent packages - while name: - self.module_for[name] = None - self.direct_imports[name] = module - self.reverse_alias.pop(name, None) - name = name.rpartition(".")[0] - - def require_name(self, name: str) -> None: - while name not in self.direct_imports and "." in name: - name = name.rsplit(".", 1)[0] - self.required_names.add(name) - - def reexport(self, name: str) -> None: - """Mark a given non qualified name as needed in __all__. - - This means that in case it comes from a module, it should be - imported with an alias even is the alias is the same as the name. - """ - self.require_name(name) - self.reexports.add(name) - - def import_lines(self) -> list[str]: - """The list of required import lines (as strings with python code).""" - result = [] - - # To summarize multiple names imported from a same module, we collect those - # in the `module_map` dictionary, mapping a module path to the list of names that should - # be imported from it. the names can also be alias in the form 'original as alias' - module_map: Mapping[str, list[str]] = defaultdict(list) - - for name in sorted( - self.required_names, - key=lambda n: (self.reverse_alias[n], n) if n in self.reverse_alias else (n, ""), - ): - # If we haven't seen this name in an import statement, ignore it - if name not in self.module_for: - continue - - m = self.module_for[name] - if m is not None: - # This name was found in a from ... import ... - # Collect the name in the module_map - if name in self.reverse_alias: - name = f"{self.reverse_alias[name]} as {name}" - elif name in self.reexports: - name = f"{name} as {name}" - module_map[m].append(name) - else: - # This name was found in an import ... - # We can already generate the import line - if name in self.reverse_alias: - source = self.reverse_alias[name] - result.append(f"import {source} as {name}\n") - elif name in self.reexports: - assert "." not in name # Because reexports only has nonqualified names - result.append(f"import {name} as {name}\n") - else: - result.append(f"import {name}\n") - - # Now generate all the from ... import ... lines collected in module_map - for module, names in sorted(module_map.items()): - result.append(f"from {module} import {', '.join(sorted(names))}\n") - return result - - def find_defined_names(file: MypyFile) -> set[str]: finder = DefinitionFinder() file.accept(finder) @@ -583,6 +362,10 @@ def find_referenced_names(file: MypyFile) -> set[str]: return finder.refs +def is_none_expr(expr: Expression) -> bool: + return isinstance(expr, NameExpr) and expr.name == "None" + + class ReferenceFinder(mypy.mixedtraverser.MixedTraverserVisitor): """Find all name references (both local and global).""" @@ -625,74 +408,37 @@ def add_ref(self, fullname: str) -> None: self.refs.add(fullname) -class StubGenerator(mypy.traverser.TraverserVisitor): +class ASTStubGenerator(BaseStubGenerator, mypy.traverser.TraverserVisitor): """Generate stub text from a mypy AST.""" def __init__( self, - _all_: list[str] | None, + _all_: list[str] | None = None, include_private: bool = False, analyzed: bool = False, export_less: bool = False, include_docstrings: bool = False, ) -> None: - # Best known value of __all__. - self._all_ = _all_ - self._output: list[str] = [] + super().__init__(_all_, include_private, export_less, include_docstrings) self._decorators: list[str] = [] - self._import_lines: list[str] = [] - # Current indent level (indent is hardcoded to 4 spaces). - self._indent = "" # Stack of defined variables (per scope). self._vars: list[list[str]] = [[]] # What was generated previously in the stub file. self._state = EMPTY - self._toplevel_names: list[str] = [] - self._include_private = include_private - self._include_docstrings = include_docstrings self._current_class: ClassDef | None = None - self.import_tracker = ImportTracker() # Was the tree semantically analysed before? self.analyzed = analyzed - # Disable implicit exports of package-internal imports? - self.export_less = export_less - # Add imports that could be implicitly generated - self.import_tracker.add_import_from("typing", [("NamedTuple", None)]) - # Names in __all__ are required - for name in _all_ or (): - if name not in IGNORED_DUNDERS: - self.import_tracker.reexport(name) - self.defined_names: set[str] = set() # Short names of methods defined in the body of the current class self.method_names: set[str] = set() self.processing_dataclass = False def visit_mypy_file(self, o: MypyFile) -> None: - self.module = o.fullname # Current module being processed + self.module_name = o.fullname # Current module being processed self.path = o.path - self.defined_names = find_defined_names(o) + self.set_defined_names(find_defined_names(o)) self.referenced_names = find_referenced_names(o) - known_imports = { - "_typeshed": ["Incomplete"], - "typing": ["Any", "TypeVar", "NamedTuple"], - "collections.abc": ["Generator"], - "typing_extensions": ["TypedDict", "ParamSpec", "TypeVarTuple"], - } - for pkg, imports in known_imports.items(): - for t in imports: - if t not in self.defined_names: - alias = None - else: - alias = "_" + t - self.import_tracker.add_import_from(pkg, [(t, alias)]) super().visit_mypy_file(o) - undefined_names = [name for name in self._all_ or [] if name not in self._toplevel_names] - if undefined_names: - if self._state != EMPTY: - self.add("\n") - self.add("# Names in __all__ with no definition:\n") - for name in sorted(undefined_names): - self.add(f"# {name}\n") + self.check_undefined_names() def visit_overloaded_func_def(self, o: OverloadedFuncDef) -> None: """@property with setters and getters, @overload chain and some others.""" @@ -714,38 +460,14 @@ def visit_overloaded_func_def(self, o: OverloadedFuncDef) -> None: # skip the overload implementation and clear the decorator we just processed self.clear_decorators() - def visit_func_def(self, o: FuncDef) -> None: - is_dataclass_generated = ( - self.analyzed and self.processing_dataclass and o.info.names[o.name].plugin_generated - ) - if is_dataclass_generated and o.name != "__init__": - # Skip methods generated by the @dataclass decorator (except for __init__) - return - if ( - self.is_private_name(o.name, o.fullname) - or self.is_not_in_all(o.name) - or (self.is_recorded_name(o.name) and not o.is_overload) - ): - self.clear_decorators() - return - if not self._indent and self._state not in (EMPTY, FUNC) and not o.is_awaitable_coroutine: - self.add("\n") - if not self.is_top_level(): - self_inits = find_self_initializers(o) - for init, value in self_inits: - if init in self.method_names: - # Can't have both an attribute and a method/property with the same name. - continue - init_code = self.get_init(init, value) - if init_code: - self.add(init_code) - # dump decorators, just before "def ..." - for s in self._decorators: - self.add(s) - self.clear_decorators() - self.add(f"{self._indent}{'async ' if o.is_coroutine else ''}def {o.name}(") - self.record_name(o.name) - args: list[str] = [] + def get_default_function_sig(self, func_def: FuncDef, ctx: FunctionContext) -> FunctionSig: + args = self._get_func_args(func_def, ctx) + retname = self._get_func_return(func_def, ctx) + return FunctionSig(func_def.name, args, retname) + + def _get_func_args(self, o: FuncDef, ctx: FunctionContext) -> list[ArgSig]: + args: list[ArgSig] = [] + for i, arg_ in enumerate(o.arguments): var = arg_.variable kind = arg_.kind @@ -759,87 +481,146 @@ def visit_func_def(self, o: FuncDef) -> None: # name their 0th argument other than self/cls is_self_arg = i == 0 and name == "self" is_cls_arg = i == 0 and name == "cls" - annotation = "" + typename: str | None = None if annotated_type and not is_self_arg and not is_cls_arg: # Luckily, an argument explicitly annotated with "Any" has # type "UnboundType" and will not match. if not isinstance(get_proper_type(annotated_type), AnyType): - annotation = f": {self.print_annotation(annotated_type)}" + typename = self.print_annotation(annotated_type) - if kind.is_named() and not any(arg.startswith("*") for arg in args): - args.append("*") + if kind.is_named() and not any(arg.name.startswith("*") for arg in args): + args.append(ArgSig("*")) if arg_.initializer: - if not annotation: + if not typename: typename = self.get_str_type_of_node(arg_.initializer, True, False) - if typename == "": - annotation = "=..." - else: - annotation = f": {typename} = ..." - else: - annotation += " = ..." - arg = name + annotation elif kind == ARG_STAR: - arg = f"*{name}{annotation}" + name = f"*{name}" elif kind == ARG_STAR2: - arg = f"**{name}{annotation}" - else: - arg = name + annotation - args.append(arg) - if o.name == "__init__" and is_dataclass_generated and "**" in args: - # The dataclass plugin generates invalid nameless "*" and "**" arguments - new_name = "".join(a.split(":", 1)[0] for a in args).replace("*", "") - args[args.index("*")] = f"*{new_name}_" # this name is guaranteed to be unique - args[args.index("**")] = f"**{new_name}__" # same here + name = f"**{name}" + + args.append(ArgSig(name, typename, default=bool(arg_.initializer))) + + if ctx.class_info is not None and all( + arg.type is None and arg.default is False for arg in args + ): + new_args = infer_method_arg_types( + ctx.name, ctx.class_info.self_var, [arg.name for arg in args] + ) + if new_args is not None: + args = new_args - retname = None + is_dataclass_generated = ( + self.analyzed and self.processing_dataclass and o.info.names[o.name].plugin_generated + ) + if o.name == "__init__" and is_dataclass_generated and "**" in [a.name for a in args]: + # The dataclass plugin generates invalid nameless "*" and "**" arguments + new_name = "".join(a.name.strip("*") for a in args) + for arg in args: + if arg.name == "*": + arg.name = f"*{new_name}_" # this name is guaranteed to be unique + elif arg.name == "**": + arg.name = f"**{new_name}__" # same here + return args + + def _get_func_return(self, o: FuncDef, ctx: FunctionContext) -> str | None: if o.name != "__init__" and isinstance(o.unanalyzed_type, CallableType): if isinstance(get_proper_type(o.unanalyzed_type.ret_type), AnyType): # Luckily, a return type explicitly annotated with "Any" has # type "UnboundType" and will enter the else branch. - retname = None # implicit Any + return None # implicit Any else: - retname = self.print_annotation(o.unanalyzed_type.ret_type) - elif o.abstract_status == IS_ABSTRACT or o.name in METHODS_WITH_RETURN_VALUE: + return self.print_annotation(o.unanalyzed_type.ret_type) + if o.abstract_status == IS_ABSTRACT or o.name in METHODS_WITH_RETURN_VALUE: # Always assume abstract methods return Any unless explicitly annotated. Also # some dunder methods should not have a None return type. - retname = None # implicit Any - elif o.name in KNOWN_MAGIC_METHODS_RETURN_TYPES: - retname = KNOWN_MAGIC_METHODS_RETURN_TYPES[o.name] - elif has_yield_expression(o) or has_yield_from_expression(o): - generator_name = self.add_typing_import("Generator") + return None # implicit Any + retname = infer_method_ret_type(o.name) + if retname is not None: + return retname + if has_yield_expression(o) or has_yield_from_expression(o): + generator_name = self.add_name("collections.abc.Generator") yield_name = "None" send_name = "None" return_name = "None" if has_yield_from_expression(o): - yield_name = send_name = self.add_typing_import("Incomplete") + yield_name = send_name = self.add_name("_typeshed.Incomplete") else: for expr, in_assignment in all_yield_expressions(o): - if expr.expr is not None and not self.is_none_expr(expr.expr): - yield_name = self.add_typing_import("Incomplete") + if expr.expr is not None and not is_none_expr(expr.expr): + yield_name = self.add_name("_typeshed.Incomplete") if in_assignment: - send_name = self.add_typing_import("Incomplete") + send_name = self.add_name("_typeshed.Incomplete") if has_return_statement(o): - return_name = self.add_typing_import("Incomplete") - retname = f"{generator_name}[{yield_name}, {send_name}, {return_name}]" - elif not has_return_statement(o) and o.abstract_status == NOT_ABSTRACT: - retname = "None" - retfield = "" - if retname is not None: - retfield = " -> " + retname + return_name = self.add_name("_typeshed.Incomplete") + return f"{generator_name}[{yield_name}, {send_name}, {return_name}]" + if not has_return_statement(o) and o.abstract_status == NOT_ABSTRACT: + return "None" + return None + + def _get_func_docstring(self, node: FuncDef) -> str | None: + if not node.body.body: + return None + expr = node.body.body[0] + if isinstance(expr, ExpressionStmt) and isinstance(expr.expr, StrExpr): + return expr.expr.value + return None - self.add(", ".join(args)) - self.add(f"){retfield}:") - if self._include_docstrings and o.docstring: - docstring = mypy.util.quote_docstring(o.docstring) - self.add(f"\n{self._indent} {docstring}\n") + def visit_func_def(self, o: FuncDef) -> None: + is_dataclass_generated = ( + self.analyzed and self.processing_dataclass and o.info.names[o.name].plugin_generated + ) + if is_dataclass_generated and o.name != "__init__": + # Skip methods generated by the @dataclass decorator (except for __init__) + return + if ( + self.is_private_name(o.name, o.fullname) + or self.is_not_in_all(o.name) + or (self.is_recorded_name(o.name) and not o.is_overload) + ): + self.clear_decorators() + return + if self.is_top_level() and self._state not in (EMPTY, FUNC): + self.add("\n") + if not self.is_top_level(): + self_inits = find_self_initializers(o) + for init, value in self_inits: + if init in self.method_names: + # Can't have both an attribute and a method/property with the same name. + continue + init_code = self.get_init(init, value) + if init_code: + self.add(init_code) + + if self._current_class is not None: + if len(o.arguments): + self_var = o.arguments[0].variable.name + else: + self_var = "self" + class_info = ClassInfo(self._current_class.name, self_var) else: - self.add(" ...\n") + class_info = None + + ctx = FunctionContext( + module_name=self.module_name, + name=o.name, + docstring=self._get_func_docstring(o), + is_abstract=o.abstract_status != NOT_ABSTRACT, + class_info=class_info, + ) - self._state = FUNC + self.record_name(o.name) - def is_none_expr(self, expr: Expression) -> bool: - return isinstance(expr, NameExpr) and expr.name == "None" + default_sig = self.get_default_function_sig(o, ctx) + sigs = self.get_signatures(default_sig, self.sig_generators, ctx) + + for output in self.format_func_def( + sigs, is_coroutine=o.is_coroutine, decorators=self._decorators, docstring=ctx.docstring + ): + self.add(output + "\n") + + self.clear_decorators() + self._state = FUNC def visit_decorator(self, o: Decorator) -> None: if self.is_private_name(o.func.name, o.func.fullname): @@ -917,13 +698,12 @@ def visit_class_def(self, o: ClassDef) -> None: self._current_class = o self.method_names = find_method_names(o.defs.body) sep: int | None = None - if not self._indent and self._state != EMPTY: + if self.is_top_level() and self._state != EMPTY: sep = len(self._output) self.add("\n") decorators = self.get_class_decorators(o) for d in decorators: self.add(f"{self._indent}@{d}\n") - self.add(f"{self._indent}class {o.name}") self.record_name(o.name) base_types = self.get_base_types(o) if base_types: @@ -936,17 +716,16 @@ def visit_class_def(self, o: ClassDef) -> None: base_types.append("metaclass=abc.ABCMeta") self.import_tracker.add_import("abc") self.import_tracker.require_name("abc") - if base_types: - self.add(f"({', '.join(base_types)})") - self.add(":\n") - self._indent += " " + bases = f"({', '.join(base_types)})" if base_types else "" + self.add(f"{self._indent}class {o.name}{bases}:\n") + self.indent() if self._include_docstrings and o.docstring: docstring = mypy.util.quote_docstring(o.docstring) self.add(f"{self._indent}{docstring}\n") n = len(self._output) self._vars.append([]) super().visit_class_def(o) - self._indent = self._indent[:-4] + self.dedent() self._vars.pop() self._vars[-1].append(o.name) if len(self._output) == n: @@ -987,17 +766,17 @@ def get_base_types(self, cdef: ClassDef) -> list[str]: typename = base.args[0].value if nt_fields is None: # Invalid namedtuple() call, cannot determine fields - base_types.append(self.add_typing_import("Incomplete")) + base_types.append(self.add_name("_typeshed.Incomplete")) continue fields_str = ", ".join(f"({f!r}, {t})" for f, t in nt_fields) - namedtuple_name = self.add_typing_import("NamedTuple") + namedtuple_name = self.add_name("typing.NamedTuple") base_types.append(f"{namedtuple_name}({typename!r}, [{fields_str}])") elif self.is_typed_namedtuple(base): base_types.append(base.accept(p)) else: # At this point, we don't know what the base class is, so we # just use Incomplete as the base class. - base_types.append(self.add_typing_import("Incomplete")) + base_types.append(self.add_name("_typeshed.Incomplete")) for name, value in cdef.keywords.items(): if name == "metaclass": continue # handled separately @@ -1063,7 +842,7 @@ def visit_assignment_stmt(self, o: AssignmentStmt) -> None: init = self.get_init(item.name, o.rvalue, annotation) if init: found = True - if not sep and not self._indent and self._state not in (EMPTY, VAR): + if not sep and self.is_top_level() and self._state not in (EMPTY, VAR): init = "\n" + init sep = True self.add(init) @@ -1092,10 +871,12 @@ def _get_namedtuple_fields(self, call: CallExpr) -> list[tuple[str, str]] | None field_names.append(field.value) else: return None # Invalid namedtuple fields type - if not field_names: + if field_names: + incomplete = self.add_name("_typeshed.Incomplete") + return [(field_name, incomplete) for field_name in field_names] + else: return [] - incomplete = self.add_typing_import("Incomplete") - return [(field_name, incomplete) for field_name in field_names] + elif self.is_typed_namedtuple(call): fields_arg = call.args[1] if not isinstance(fields_arg, (ListExpr, TupleExpr)): @@ -1125,7 +906,7 @@ def process_namedtuple(self, lvalue: NameExpr, rvalue: CallExpr) -> None: if fields is None: self.annotate_as_incomplete(lvalue) return - bases = self.add_typing_import("NamedTuple") + bases = self.add_name("typing.NamedTuple") # TODO: Add support for generic NamedTuples. Requires `Generic` as base class. class_def = f"{self._indent}class {lvalue.name}({bases}):" if len(fields) == 0: @@ -1175,13 +956,13 @@ def process_typeddict(self, lvalue: NameExpr, rvalue: CallExpr) -> None: total = arg else: items.append((arg_name, arg)) - bases = self.add_typing_import("TypedDict") p = AliasPrinter(self) if any(not key.isidentifier() or keyword.iskeyword(key) for key, _ in items): # Keep the call syntax if there are non-identifier or reserved keyword keys. self.add(f"{self._indent}{lvalue.name} = {rvalue.accept(p)}\n") self._state = VAR else: + bases = self.add_name("typing_extensions.TypedDict") # TODO: Add support for generic TypedDicts. Requires `Generic` as base class. if total is not None: bases += f", total={total.accept(p)}" @@ -1198,7 +979,8 @@ def process_typeddict(self, lvalue: NameExpr, rvalue: CallExpr) -> None: self._state = CLASS def annotate_as_incomplete(self, lvalue: NameExpr) -> None: - self.add(f"{self._indent}{lvalue.name}: {self.add_typing_import('Incomplete')}\n") + incomplete = self.add_name("_typeshed.Incomplete") + self.add(f"{self._indent}{lvalue.name}: {incomplete}\n") self._state = VAR def is_alias_expression(self, expr: Expression, top_level: bool = True) -> bool: @@ -1280,9 +1062,9 @@ def visit_import_from(self, o: ImportFrom) -> None: exported_names: set[str] = set() import_names = [] module, relative = translate_module_name(o.id, o.relative) - if self.module: + if self.module_name: full_module, ok = mypy.util.correct_relative_import( - self.module, relative, module, self.path.endswith(".__init__.py") + self.module_name, relative, module, self.path.endswith(".__init__.py") ) if not ok: full_module = module @@ -1295,37 +1077,7 @@ def visit_import_from(self, o: ImportFrom) -> None: # Vendored six -- translate into plain 'import six'. self.visit_import(Import([("six", None)])) continue - exported = False - if as_name is None and self.module and (self.module + "." + name) in EXTRA_EXPORTED: - # Special case certain names that should be exported, against our general rules. - exported = True - is_private = self.is_private_name(name, full_module + "." + name) - if ( - as_name is None - and name not in self.referenced_names - and not any(n.startswith(name + ".") for n in self.referenced_names) - and (not self._all_ or name in IGNORED_DUNDERS) - and not is_private - and module not in ("abc", "asyncio") + TYPING_MODULE_NAMES - ): - # An imported name that is never referenced in the module is assumed to be - # exported, unless there is an explicit __all__. Note that we need to special - # case 'abc' since some references are deleted during semantic analysis. - exported = True - top_level = full_module.split(".", 1)[0] - self_top_level = self.module.split(".", 1)[0] - if ( - as_name is None - and not self.export_less - and (not self._all_ or name in IGNORED_DUNDERS) - and self.module - and not is_private - and top_level in (self_top_level, "_" + self_top_level) - ): - # Export imports from the same package, since we can't reliably tell whether they - # are part of the public API. - exported = True - if exported: + if self.should_reexport(name, full_module, as_name is not None): self.import_tracker.reexport(name) as_name = name import_names.append((name, as_name)) @@ -1339,7 +1091,7 @@ def visit_import_from(self, o: ImportFrom) -> None: names = [ name for name, alias in o.names - if name in self._all_ and alias is None and name not in IGNORED_DUNDERS + if name in self._all_ and alias is None and name not in self.IGNORED_DUNDERS ] exported_names.update(names) @@ -1373,7 +1125,7 @@ def get_init( isinstance(annotation, UnboundType) and not annotation.args and annotation.name == "Final" - and self.import_tracker.module_for.get("Final") in TYPING_MODULE_NAMES + and self.import_tracker.module_for.get("Final") in self.TYPING_MODULE_NAMES ): # Final without type argument is invalid in stubs. final_arg = self.get_str_type_of_node(rvalue) @@ -1406,67 +1158,14 @@ def get_assign_initializer(self, rvalue: Expression) -> str: # By default, no initializer is required: return "" - def add(self, string: str) -> None: - """Add text to generated stub.""" - self._output.append(string) - def add_decorator(self, name: str, require_name: bool = False) -> None: if require_name: self.import_tracker.require_name(name) - if not self._indent and self._state not in (EMPTY, FUNC): - self._decorators.append("\n") - self._decorators.append(f"{self._indent}@{name}\n") + self._decorators.append(f"@{name}") def clear_decorators(self) -> None: self._decorators.clear() - def typing_name(self, name: str) -> str: - if name in self.defined_names: - # Avoid name clash between name from typing and a name defined in stub. - return "_" + name - else: - return name - - def add_typing_import(self, name: str) -> str: - """Add a name to be imported for typing, unless it's imported already. - - The import will be internal to the stub. - """ - name = self.typing_name(name) - self.import_tracker.require_name(name) - return name - - def add_import_line(self, line: str) -> None: - """Add a line of text to the import section, unless it's already there.""" - if line not in self._import_lines: - self._import_lines.append(line) - - def output(self) -> str: - """Return the text for the stub.""" - imports = "" - if self._import_lines: - imports += "".join(self._import_lines) - imports += "".join(self.import_tracker.import_lines()) - if imports and self._output: - imports += "\n" - return imports + "".join(self._output) - - def is_not_in_all(self, name: str) -> bool: - if self.is_private_name(name): - return False - if self._all_: - return self.is_top_level() and name not in self._all_ - return False - - def is_private_name(self, name: str, fullname: str | None = None) -> bool: - if self._include_private: - return False - if fullname in EXTRA_EXPORTED: - return False - if name == "_": - return False - return name.startswith("_") and (not name.endswith("__") or name in IGNORED_DUNDERS) - def is_private_member(self, fullname: str) -> bool: parts = fullname.split(".") return any(self.is_private_name(part) for part in parts) @@ -1494,9 +1193,9 @@ def get_str_type_of_node( if isinstance(rvalue, NameExpr) and rvalue.name in ("True", "False"): return "bool" if can_infer_optional and isinstance(rvalue, NameExpr) and rvalue.name == "None": - return f"{self.add_typing_import('Incomplete')} | None" + return f"{self.add_name('_typeshed.Incomplete')} | None" if can_be_any: - return self.add_typing_import("Incomplete") + return self.add_name("_typeshed.Incomplete") else: return "" @@ -1534,25 +1233,20 @@ def maybe_unwrap_unary_expr(self, expr: Expression) -> Expression: # This is some other unary expr, we cannot do anything with it (yet?). return expr - def print_annotation(self, t: Type) -> str: - printer = AnnotationPrinter(self) - return t.accept(printer) - - def is_top_level(self) -> bool: - """Are we processing the top level of a file?""" - return self._indent == "" - - def record_name(self, name: str) -> None: - """Mark a name as defined. - - This only does anything if at the top level of a module. - """ - if self.is_top_level(): - self._toplevel_names.append(name) - - def is_recorded_name(self, name: str) -> bool: - """Has this name been recorded previously?""" - return self.is_top_level() and name in self._toplevel_names + def should_reexport(self, name: str, full_module: str, name_is_alias: bool) -> bool: + is_private = self.is_private_name(name, full_module + "." + name) + if ( + not name_is_alias + and name not in self.referenced_names + and (not self._all_ or name in self.IGNORED_DUNDERS) + and not is_private + and full_module not in ("abc", "asyncio") + self.TYPING_MODULE_NAMES + ): + # An imported name that is never referenced in the module is assumed to be + # exported, unless there is an explicit __all__. Note that we need to special + # case 'abc' since some references are deleted during semantic analysis. + return True + return super().should_reexport(name, full_module, name_is_alias) def find_method_names(defs: list[Statement]) -> set[str]: @@ -1608,6 +1302,17 @@ def remove_blacklisted_modules(modules: list[StubSource]) -> list[StubSource]: ] +def split_pyc_from_py(modules: list[StubSource]) -> tuple[list[StubSource], list[StubSource]]: + py_modules = [] + pyc_modules = [] + for mod in modules: + if is_pyc_only(mod.path): + pyc_modules.append(mod) + else: + py_modules.append(mod) + return pyc_modules, py_modules + + def is_blacklisted_path(path: str) -> bool: return any(substr in (normalize_path_separators(path) + "\n") for substr in BLACKLIST) @@ -1620,10 +1325,10 @@ def normalize_path_separators(path: str) -> str: def collect_build_targets( options: Options, mypy_opts: MypyOptions -) -> tuple[list[StubSource], list[StubSource]]: +) -> tuple[list[StubSource], list[StubSource], list[StubSource]]: """Collect files for which we need to generate stubs. - Return list of Python modules and C modules. + Return list of py modules, pyc modules, and C modules. """ if options.packages or options.modules: if options.no_import: @@ -1646,8 +1351,8 @@ def collect_build_targets( c_modules = [] py_modules = remove_blacklisted_modules(py_modules) - - return py_modules, c_modules + pyc_mod, py_mod = split_pyc_from_py(py_modules) + return py_mod, pyc_mod, c_modules def find_module_paths_using_imports( @@ -1826,98 +1531,90 @@ def generate_asts_for_modules( mod.runtime_all = res.manager.semantic_analyzer.export_map[mod.module] -def generate_stub_from_ast( +def generate_stub_for_py_module( mod: StubSource, target: str, + *, parse_only: bool = False, + inspect: bool = False, include_private: bool = False, export_less: bool = False, include_docstrings: bool = False, + doc_dir: str = "", + all_modules: list[str], ) -> None: """Use analysed (or just parsed) AST to generate type stub for single file. If directory for target doesn't exist it will created. Existing stub will be overwritten. """ - gen = StubGenerator( - mod.runtime_all, - include_private=include_private, - analyzed=not parse_only, - export_less=export_less, - include_docstrings=include_docstrings, - ) - assert mod.ast is not None, "This function must be used only with analyzed modules" - mod.ast.accept(gen) + if inspect: + ngen = InspectionStubGenerator( + module_name=mod.module, + known_modules=all_modules, + _all_=mod.runtime_all, + doc_dir=doc_dir, + include_private=include_private, + export_less=export_less, + include_docstrings=include_docstrings, + ) + ngen.generate_module() + output = ngen.output() + + else: + gen = ASTStubGenerator( + mod.runtime_all, + include_private=include_private, + analyzed=not parse_only, + export_less=export_less, + include_docstrings=include_docstrings, + ) + assert mod.ast is not None, "This function must be used only with analyzed modules" + mod.ast.accept(gen) + output = gen.output() # Write output to file. subdir = os.path.dirname(target) if subdir and not os.path.isdir(subdir): os.makedirs(subdir) with open(target, "w") as file: - file.write("".join(gen.output())) - - -def get_sig_generators(options: Options) -> list[SignatureGenerator]: - sig_generators: list[SignatureGenerator] = [ - DocstringSignatureGenerator(), - FallbackSignatureGenerator(), - ] - if options.doc_dir: - # Collect info from docs (if given). Always check these first. - sigs, class_sigs = collect_docs_signatures(options.doc_dir) - sig_generators.insert(0, ExternalSignatureGenerator(sigs, class_sigs)) - return sig_generators - - -def collect_docs_signatures(doc_dir: str) -> tuple[dict[str, str], dict[str, str]]: - """Gather all function and class signatures in the docs. - - Return a tuple (function signatures, class signatures). - Currently only used for C modules. - """ - all_sigs: list[Sig] = [] - all_class_sigs: list[Sig] = [] - for path in glob.glob(f"{doc_dir}/*.rst"): - with open(path) as f: - loc_sigs, loc_class_sigs = parse_all_signatures(f.readlines()) - all_sigs += loc_sigs - all_class_sigs += loc_class_sigs - sigs = dict(find_unique_signatures(all_sigs)) - class_sigs = dict(find_unique_signatures(all_class_sigs)) - return sigs, class_sigs + file.write(output) def generate_stubs(options: Options) -> None: """Main entry point for the program.""" mypy_opts = mypy_options(options) - py_modules, c_modules = collect_build_targets(options, mypy_opts) - sig_generators = get_sig_generators(options) + py_modules, pyc_modules, c_modules = collect_build_targets(options, mypy_opts) + all_modules = py_modules + pyc_modules + c_modules + all_module_names = sorted(m.module for m in all_modules) # Use parsed sources to generate stubs for Python modules. generate_asts_for_modules(py_modules, options.parse_only, mypy_opts, options.verbose) files = [] - for mod in py_modules: + for mod in py_modules + pyc_modules: assert mod.path is not None, "Not found module was not skipped" target = mod.module.replace(".", "/") - if os.path.basename(mod.path) == "__init__.py": + if os.path.basename(mod.path) in ["__init__.py", "__init__.pyc"]: target += "/__init__.pyi" else: target += ".pyi" target = os.path.join(options.output_dir, target) files.append(target) with generate_guarded(mod.module, target, options.ignore_errors, options.verbose): - generate_stub_from_ast( + generate_stub_for_py_module( mod, target, - options.parse_only, - options.include_private, - options.export_less, + parse_only=options.parse_only, + inspect=options.inspect or mod in pyc_modules, + include_private=options.include_private, + export_less=options.export_less, include_docstrings=options.include_docstrings, + doc_dir=options.doc_dir, + all_modules=all_module_names, ) # Separately analyse C modules using different logic. - all_modules = sorted(m.module for m in (py_modules + c_modules)) for mod in c_modules: - if any(py_mod.module.startswith(mod.module + ".") for py_mod in py_modules + c_modules): + if any(py_mod.module.startswith(mod.module + ".") for py_mod in all_modules): target = mod.module.replace(".", "/") + "/__init__.pyi" else: target = mod.module.replace(".", "/") + ".pyi" @@ -1927,11 +1624,12 @@ def generate_stubs(options: Options) -> None: generate_stub_for_c_module( mod.module, target, - known_modules=all_modules, - sig_generators=sig_generators, - include_docstrings=options.include_docstrings, + known_modules=all_module_names, + doc_dir=options.doc_dir, + include_private=options.include_private, + export_less=options.export_less, ) - num_modules = len(py_modules) + len(c_modules) + num_modules = len(all_modules) if not options.quiet and num_modules > 0: print("Processed %d modules" % num_modules) if len(files) == 1: @@ -1967,10 +1665,21 @@ def parse_options(args: list[str]) -> Options: "respect __all__)", ) parser.add_argument( + "--no-analysis", "--parse-only", + dest="parse_only", action="store_true", help="don't perform semantic analysis of sources, just parse them " - "(only applies to Python modules, might affect quality of stubs)", + "(only applies to Python modules, might affect quality of stubs. " + "Not compatible with --inspect)", + ) + parser.add_argument( + "--inspect-mode", + dest="inspect", + action="store_true", + help="import and inspect modules instead of parsing source code." + "This is the default behavior for c modules and pyc-only packages, but " + "it is also useful for pure python modules with dynamically generated members.", ) parser.add_argument( "--include-private", @@ -2047,6 +1756,8 @@ def parse_options(args: list[str]) -> Options: parser.error("May only specify one of: modules/packages or files.") if ns.quiet and ns.verbose: parser.error("Cannot specify both quiet and verbose messages") + if ns.inspect and ns.parse_only: + parser.error("Cannot specify both --parse-only/--no-analysis and --inspect-mode") # Create the output folder if it doesn't already exist. if not os.path.exists(ns.output_dir): @@ -2055,6 +1766,7 @@ def parse_options(args: list[str]) -> Options: return Options( pyversion=pyversion, no_import=ns.no_import, + inspect=ns.inspect, doc_dir=ns.doc_dir, search_path=ns.search_path.split(":"), interpreter=ns.interpreter, diff --git a/mypy/stubgenc.py b/mypy/stubgenc.py index 31487f9d0dcf..0ad79a4265b3 100755 --- a/mypy/stubgenc.py +++ b/mypy/stubgenc.py @@ -6,68 +6,38 @@ from __future__ import annotations +import glob import importlib import inspect +import keyword import os.path -import re -from abc import abstractmethod -from types import ModuleType -from typing import Any, Final, Iterable, Mapping +from types import FunctionType, ModuleType +from typing import Any, Mapping -import mypy.util +from mypy.fastparse import parse_type_comment from mypy.moduleinspect import is_c_module from mypy.stubdoc import ( ArgSig, FunctionSig, + Sig, + find_unique_signatures, infer_arg_sig_from_anon_docstring, infer_prop_type_from_docstring, infer_ret_type_sig_from_anon_docstring, infer_ret_type_sig_from_docstring, infer_sig_from_docstring, + parse_all_signatures, ) - -# Members of the typing module to consider for importing by default. -_DEFAULT_TYPING_IMPORTS: Final = ( - "Any", - "Callable", - "ClassVar", - "Dict", - "Iterable", - "Iterator", - "List", - "Optional", - "Tuple", - "Union", +from mypy.stubutil import ( + BaseStubGenerator, + ClassInfo, + FunctionContext, + SignatureGenerator, + infer_method_arg_types, + infer_method_ret_type, ) -class SignatureGenerator: - """Abstract base class for extracting a list of FunctionSigs for each function.""" - - def remove_self_type( - self, inferred: list[FunctionSig] | None, self_var: str - ) -> list[FunctionSig] | None: - """Remove type annotation from self/cls argument""" - if inferred: - for signature in inferred: - if signature.args: - if signature.args[0].name == self_var: - signature.args[0].type = None - return inferred - - @abstractmethod - def get_function_sig( - self, func: object, module_name: str, name: str - ) -> list[FunctionSig] | None: - pass - - @abstractmethod - def get_method_sig( - self, cls: type, func: object, module_name: str, class_name: str, name: str, self_var: str - ) -> list[FunctionSig] | None: - pass - - class ExternalSignatureGenerator(SignatureGenerator): def __init__( self, func_sigs: dict[str, str] | None = None, class_sigs: dict[str, str] | None = None @@ -79,97 +49,104 @@ class signatures (usually corresponds to __init__). self.func_sigs = func_sigs or {} self.class_sigs = class_sigs or {} - def get_function_sig( - self, func: object, module_name: str, name: str - ) -> list[FunctionSig] | None: - if name in self.func_sigs: - return [ - FunctionSig( - name=name, - args=infer_arg_sig_from_anon_docstring(self.func_sigs[name]), - ret_type="Any", - ) - ] - else: - return None + @classmethod + def from_doc_dir(cls, doc_dir: str) -> ExternalSignatureGenerator: + """Instantiate from a directory of .rst files.""" + all_sigs: list[Sig] = [] + all_class_sigs: list[Sig] = [] + for path in glob.glob(f"{doc_dir}/*.rst"): + with open(path) as f: + loc_sigs, loc_class_sigs = parse_all_signatures(f.readlines()) + all_sigs += loc_sigs + all_class_sigs += loc_class_sigs + sigs = dict(find_unique_signatures(all_sigs)) + class_sigs = dict(find_unique_signatures(all_class_sigs)) + return ExternalSignatureGenerator(sigs, class_sigs) - def get_method_sig( - self, cls: type, func: object, module_name: str, class_name: str, name: str, self_var: str + def get_function_sig( + self, default_sig: FunctionSig, ctx: FunctionContext ) -> list[FunctionSig] | None: + # method: if ( - name in ("__new__", "__init__") - and name not in self.func_sigs - and class_name in self.class_sigs + ctx.class_info + and ctx.name in ("__new__", "__init__") + and ctx.name not in self.func_sigs + and ctx.class_info.name in self.class_sigs ): return [ FunctionSig( - name=name, - args=infer_arg_sig_from_anon_docstring(self.class_sigs[class_name]), - ret_type=infer_method_ret_type(name), + name=ctx.name, + args=infer_arg_sig_from_anon_docstring(self.class_sigs[ctx.class_info.name]), + ret_type=infer_method_ret_type(ctx.name), ) ] - inferred = self.get_function_sig(func, module_name, name) - return self.remove_self_type(inferred, self_var) + + # function: + if ctx.name not in self.func_sigs: + return None + + inferred = [ + FunctionSig( + name=ctx.name, + args=infer_arg_sig_from_anon_docstring(self.func_sigs[ctx.name]), + ret_type=None, + ) + ] + if ctx.class_info: + return self.remove_self_type(inferred, ctx.class_info.self_var) + else: + return inferred + + def get_property_type(self, default_type: str | None, ctx: FunctionContext) -> str | None: + return None class DocstringSignatureGenerator(SignatureGenerator): def get_function_sig( - self, func: object, module_name: str, name: str + self, default_sig: FunctionSig, ctx: FunctionContext ) -> list[FunctionSig] | None: - docstr = getattr(func, "__doc__", None) - inferred = infer_sig_from_docstring(docstr, name) + inferred = infer_sig_from_docstring(ctx.docstring, ctx.name) if inferred: - assert docstr is not None - if is_pybind11_overloaded_function_docstring(docstr, name): + assert ctx.docstring is not None + if is_pybind11_overloaded_function_docstring(ctx.docstring, ctx.name): # Remove pybind11 umbrella (*args, **kwargs) for overloaded functions del inferred[-1] - return inferred - def get_method_sig( - self, - cls: type, - func: object, - module_name: str, - class_name: str, - func_name: str, - self_var: str, - ) -> list[FunctionSig] | None: - inferred = self.get_function_sig(func, module_name, func_name) - if not inferred and func_name == "__init__": - # look for class-level constructor signatures of the form () - inferred = self.get_function_sig(cls, module_name, class_name) - return self.remove_self_type(inferred, self_var) + if ctx.class_info: + if not inferred and ctx.name == "__init__": + # look for class-level constructor signatures of the form () + inferred = infer_sig_from_docstring(ctx.class_info.docstring, ctx.class_info.name) + if inferred: + inferred = [sig._replace(name="__init__") for sig in inferred] + return self.remove_self_type(inferred, ctx.class_info.self_var) + else: + return inferred + def get_property_type(self, default_type: str | None, ctx: FunctionContext) -> str | None: + """Infer property type from docstring or docstring signature.""" + if ctx.docstring is not None: + inferred = infer_ret_type_sig_from_anon_docstring(ctx.docstring) + if not inferred: + inferred = infer_ret_type_sig_from_docstring(ctx.docstring, ctx.name) + if not inferred: + inferred = infer_prop_type_from_docstring(ctx.docstring) + return inferred + else: + return None -class FallbackSignatureGenerator(SignatureGenerator): - def get_function_sig( - self, func: object, module_name: str, name: str - ) -> list[FunctionSig] | None: - return [ - FunctionSig( - name=name, - args=infer_arg_sig_from_anon_docstring("(*args, **kwargs)"), - ret_type="Any", - ) - ] - def get_method_sig( - self, cls: type, func: object, module_name: str, class_name: str, name: str, self_var: str - ) -> list[FunctionSig] | None: - return [ - FunctionSig( - name=name, - args=infer_method_args(name, self_var), - ret_type=infer_method_ret_type(name), - ) - ] +def is_pybind11_overloaded_function_docstring(docstring: str, name: str) -> bool: + return docstring.startswith(f"{name}(*args, **kwargs)\nOverloaded function.\n\n") def generate_stub_for_c_module( module_name: str, target: str, known_modules: list[str], - sig_generators: Iterable[SignatureGenerator], + doc_dir: str = "", + *, + include_private: bool = False, + export_less: bool = False, include_docstrings: bool = False, ) -> None: """Generate stub for C module. @@ -184,452 +161,664 @@ def generate_stub_for_c_module( If directory for target doesn't exist it will be created. Existing stub will be overwritten. """ - module = importlib.import_module(module_name) - assert is_c_module(module), f"{module_name} is not a C module" subdir = os.path.dirname(target) if subdir and not os.path.isdir(subdir): os.makedirs(subdir) - imports: list[str] = [] - functions: list[str] = [] - done = set() - items = sorted(get_members(module), key=lambda x: x[0]) - for name, obj in items: - if is_c_function(obj): - generate_c_function_stub( - module, - name, - obj, - output=functions, - known_modules=known_modules, - imports=imports, - sig_generators=sig_generators, - include_docstrings=include_docstrings, - ) - done.add(name) - types: list[str] = [] - for name, obj in items: - if name.startswith("__") and name.endswith("__"): - continue - if is_c_type(obj): - generate_c_type_stub( - module, - name, - obj, - output=types, - known_modules=known_modules, - imports=imports, - sig_generators=sig_generators, - include_docstrings=include_docstrings, - ) - done.add(name) - variables = [] - for name, obj in items: - if name.startswith("__") and name.endswith("__"): - continue - if name not in done and not inspect.ismodule(obj): - type_str = strip_or_import( - get_type_fullname(type(obj)), module, known_modules, imports - ) - variables.append(f"{name}: {type_str}") - output = sorted(set(imports)) - for line in variables: - output.append(line) - for line in types: - if line.startswith("class") and output and output[-1]: - output.append("") - output.append(line) - if output and functions: - output.append("") - for line in functions: - output.append(line) - output = add_typing_import(output) + + gen = InspectionStubGenerator( + module_name, + known_modules, + doc_dir, + include_private=include_private, + export_less=export_less, + include_docstrings=include_docstrings, + ) + gen.generate_module() + output = gen.output() + with open(target, "w") as file: - for line in output: - file.write(f"{line}\n") - - -def add_typing_import(output: list[str]) -> list[str]: - """Add typing imports for collections/types that occur in the generated stub.""" - names = [] - for name in _DEFAULT_TYPING_IMPORTS: - if any(re.search(r"\b%s\b" % name, line) for line in output): - names.append(name) - if names: - return [f"from typing import {', '.join(names)}", ""] + output - else: - return output.copy() - - -def get_members(obj: object) -> list[tuple[str, Any]]: - obj_dict: Mapping[str, Any] = getattr(obj, "__dict__") # noqa: B009 - results = [] - for name in obj_dict: - if is_skipped_attribute(name): - continue - # Try to get the value via getattr - try: - value = getattr(obj, name) - except AttributeError: - continue - else: - results.append((name, value)) - return results + file.write(output) -def is_c_function(obj: object) -> bool: - return inspect.isbuiltin(obj) or type(obj) is type(ord) +class CFunctionStub: + """ + Class that mimics a C function in order to provide parseable docstrings. + """ + def __init__(self, name: str, doc: str, is_abstract: bool = False): + self.__name__ = name + self.__doc__ = doc + self.__abstractmethod__ = is_abstract -def is_c_method(obj: object) -> bool: - return inspect.ismethoddescriptor(obj) or type(obj) in ( - type(str.index), - type(str.__add__), - type(str.__new__), - ) + @classmethod + def _from_sig(cls, sig: FunctionSig, is_abstract: bool = False) -> CFunctionStub: + return CFunctionStub(sig.name, sig.format_sig()[:-4], is_abstract) + @classmethod + def _from_sigs(cls, sigs: list[FunctionSig], is_abstract: bool = False) -> CFunctionStub: + return CFunctionStub( + sigs[0].name, "\n".join(sig.format_sig()[:-4] for sig in sigs), is_abstract + ) -def is_c_classmethod(obj: object) -> bool: - return inspect.isbuiltin(obj) or type(obj).__name__ in ( - "classmethod", - "classmethod_descriptor", - ) + def __get__(self) -> None: + """ + This exists to make this object look like a method descriptor and thus + return true for CStubGenerator.ismethod() + """ + pass -def is_c_property(obj: object) -> bool: - return inspect.isdatadescriptor(obj) or hasattr(obj, "fget") +class InspectionStubGenerator(BaseStubGenerator): + """Stub generator that does not parse code. + Generation is performed by inspecting the module's contents, and thus works + for highly dynamic modules, pyc files, and C modules (via the CStubGenerator + subclass). + """ -def is_c_property_readonly(prop: Any) -> bool: - return hasattr(prop, "fset") and prop.fset is None + def __init__( + self, + module_name: str, + known_modules: list[str], + doc_dir: str = "", + _all_: list[str] | None = None, + include_private: bool = False, + export_less: bool = False, + include_docstrings: bool = False, + module: ModuleType | None = None, + ) -> None: + self.doc_dir = doc_dir + if module is None: + self.module = importlib.import_module(module_name) + else: + self.module = module + self.is_c_module = is_c_module(self.module) + self.known_modules = known_modules + self.resort_members = self.is_c_module + super().__init__(_all_, include_private, export_less, include_docstrings) + self.module_name = module_name + + def get_default_function_sig(self, func: object, ctx: FunctionContext) -> FunctionSig: + argspec = None + if not self.is_c_module: + # Get the full argument specification of the function + try: + argspec = inspect.getfullargspec(func) + except TypeError: + # some callables cannot be inspected, e.g. functools.partial + pass + if argspec is None: + if ctx.class_info is not None: + # method: + return FunctionSig( + name=ctx.name, + args=infer_c_method_args(ctx.name, ctx.class_info.self_var), + ret_type=infer_method_ret_type(ctx.name), + ) + else: + # function: + return FunctionSig( + name=ctx.name, + args=[ArgSig(name="*args"), ArgSig(name="**kwargs")], + ret_type=None, + ) + # Extract the function arguments, defaults, and varargs + args = argspec.args + defaults = argspec.defaults + varargs = argspec.varargs + kwargs = argspec.varkw + annotations = argspec.annotations + + def get_annotation(key: str) -> str | None: + if key not in annotations: + return None + argtype = annotations[key] + if argtype is None: + return "None" + if not isinstance(argtype, str): + return self.get_type_fullname(argtype) + return argtype + + arglist: list[ArgSig] = [] + # Add the arguments to the signature + for i, arg in enumerate(args): + # Check if the argument has a default value + if defaults and i >= len(args) - len(defaults): + default_value = defaults[i - (len(args) - len(defaults))] + if arg in annotations: + argtype = annotations[arg] + else: + argtype = self.get_type_annotation(default_value) + if argtype == "None": + # None is not a useful annotation, but we can infer that the arg + # is optional + incomplete = self.add_name("_typeshed.Incomplete") + argtype = f"{incomplete} | None" + arglist.append(ArgSig(arg, argtype, default=True)) + else: + arglist.append(ArgSig(arg, get_annotation(arg), default=False)) -def is_c_type(obj: object) -> bool: - return inspect.isclass(obj) or type(obj) is type(int) + # Add *args if present + if varargs: + arglist.append(ArgSig(f"*{varargs}", get_annotation(varargs))) + # Add **kwargs if present + if kwargs: + arglist.append(ArgSig(f"**{kwargs}", get_annotation(kwargs))) -def is_pybind11_overloaded_function_docstring(docstr: str, name: str) -> bool: - return docstr.startswith(f"{name}(*args, **kwargs)\n" + "Overloaded function.\n\n") + # add types for known special methods + if ctx.class_info is not None and all( + arg.type is None and arg.default is False for arg in arglist + ): + new_args = infer_method_arg_types( + ctx.name, ctx.class_info.self_var, [arg.name for arg in arglist if arg.name] + ) + if new_args is not None: + arglist = new_args + ret_type = get_annotation("return") or infer_method_ret_type(ctx.name) + return FunctionSig(ctx.name, arglist, ret_type) -def generate_c_function_stub( - module: ModuleType, - name: str, - obj: object, - *, - known_modules: list[str], - sig_generators: Iterable[SignatureGenerator], - output: list[str], - imports: list[str], - self_var: str | None = None, - cls: type | None = None, - class_name: str | None = None, - include_docstrings: bool = False, -) -> None: - """Generate stub for a single function or method. + def get_sig_generators(self) -> list[SignatureGenerator]: + if not self.is_c_module: + return [] + else: + sig_generators: list[SignatureGenerator] = [DocstringSignatureGenerator()] + if self.doc_dir: + # Collect info from docs (if given). Always check these first. + sig_generators.insert(0, ExternalSignatureGenerator.from_doc_dir(self.doc_dir)) + return sig_generators - The result will be appended to 'output'. - If necessary, any required names will be added to 'imports'. - The 'class_name' is used to find signature of __init__ or __new__ in - 'class_sigs'. - """ - inferred: list[FunctionSig] | None = None - docstr: str | None = None - if class_name: - # method: - assert cls is not None, "cls should be provided for methods" - assert self_var is not None, "self_var should be provided for methods" - for sig_gen in sig_generators: - inferred = sig_gen.get_method_sig( - cls, obj, module.__name__, class_name, name, self_var + def strip_or_import(self, type_name: str) -> str: + """Strips unnecessary module names from typ. + + If typ represents a type that is inside module or is a type coming from builtins, remove + module declaration from it. Return stripped name of the type. + + Arguments: + typ: name of the type + """ + local_modules = ["builtins", self.module_name] + parsed_type = parse_type_comment(type_name, 0, 0, None)[1] + assert parsed_type is not None, type_name + return self.print_annotation(parsed_type, self.known_modules, local_modules) + + def get_obj_module(self, obj: object) -> str | None: + """Return module name of the object.""" + return getattr(obj, "__module__", None) + + def is_defined_in_module(self, obj: object) -> bool: + """Check if object is considered defined in the current module.""" + module = self.get_obj_module(obj) + return module is None or module == self.module_name + + def generate_module(self) -> None: + all_items = self.get_members(self.module) + if self.resort_members: + all_items = sorted(all_items, key=lambda x: x[0]) + items = [] + for name, obj in all_items: + if inspect.ismodule(obj) and obj.__name__ in self.known_modules: + module_name = obj.__name__ + if module_name.startswith(self.module_name + "."): + # from {.rel_name} import {mod_name} as {name} + pkg_name, mod_name = module_name.rsplit(".", 1) + rel_module = pkg_name[len(self.module_name) :] or "." + self.import_tracker.add_import_from(rel_module, [(mod_name, name)]) + self.import_tracker.reexport(name) + else: + # import {module_name} as {name} + self.import_tracker.add_import(module_name, name) + self.import_tracker.reexport(name) + elif self.is_defined_in_module(obj) and not inspect.ismodule(obj): + # process this below + items.append((name, obj)) + else: + # from {obj_module} import {obj_name} + obj_module_name = self.get_obj_module(obj) + if obj_module_name: + self.import_tracker.add_import_from(obj_module_name, [(name, None)]) + if self.should_reexport(name, obj_module_name, name_is_alias=False): + self.import_tracker.reexport(name) + + self.set_defined_names(set([name for name, obj in all_items if not inspect.ismodule(obj)])) + + if self.resort_members: + functions: list[str] = [] + types: list[str] = [] + variables: list[str] = [] + else: + output: list[str] = [] + functions = types = variables = output + + for name, obj in items: + if self.is_function(obj): + self.generate_function_stub(name, obj, output=functions) + elif inspect.isclass(obj): + self.generate_class_stub(name, obj, output=types) + else: + self.generate_variable_stub(name, obj, output=variables) + + self._output = [] + + if self.resort_members: + for line in variables: + self._output.append(line + "\n") + for line in types: + if line.startswith("class") and self._output and self._output[-1]: + self._output.append("\n") + self._output.append(line + "\n") + if self._output and functions: + self._output.append("\n") + for line in functions: + self._output.append(line + "\n") + else: + for i, line in enumerate(output): + if ( + self._output + and line.startswith("class") + and ( + not self._output[-1].startswith("class") + or (len(output) > i + 1 and output[i + 1].startswith(" ")) + ) + ) or ( + self._output + and self._output[-1].startswith("def") + and not line.startswith("def") + ): + self._output.append("\n") + self._output.append(line + "\n") + self.check_undefined_names() + + def is_skipped_attribute(self, attr: str) -> bool: + return ( + attr + in ( + "__class__", + "__getattribute__", + "__str__", + "__repr__", + "__doc__", + "__dict__", + "__module__", + "__weakref__", + "__annotations__", ) - if inferred: - # add self/cls var, if not present - for sig in inferred: - if not sig.args or sig.args[0].name not in ("self", "cls"): - sig.args.insert(0, ArgSig(name=self_var)) - break - else: - # function: - for sig_gen in sig_generators: - inferred = sig_gen.get_function_sig(obj, module.__name__, name) - if inferred: - break - - if not inferred: - raise ValueError( - "No signature was found. This should never happen " - "if FallbackSignatureGenerator is provided" + or attr in self.IGNORED_DUNDERS + or is_pybind_skipped_attribute(attr) # For pickling + or keyword.iskeyword(attr) ) - is_overloaded = len(inferred) > 1 if inferred else False - if is_overloaded: - imports.append("from typing import overload") - if inferred: - for signature in inferred: - args: list[str] = [] - for arg in signature.args: - arg_def = arg.name - if arg_def == "None": - arg_def = "_none" # None is not a valid argument name - - if arg.type: - arg_def += ": " + strip_or_import(arg.type, module, known_modules, imports) - - if arg.default: - arg_def += " = ..." - - args.append(arg_def) - - if is_overloaded: - output.append("@overload") - # a sig generator indicates @classmethod by specifying the cls arg - if class_name and signature.args and signature.args[0].name == "cls": - output.append("@classmethod") - output_signature = "def {function}({args}) -> {ret}:".format( - function=name, - args=", ".join(args), - ret=strip_or_import(signature.ret_type, module, known_modules, imports), - ) - if include_docstrings and docstr: - docstr_quoted = mypy.util.quote_docstring(docstr.strip()) - docstr_indented = "\n ".join(docstr_quoted.split("\n")) - output.append(output_signature) - output.extend(f" {docstr_indented}".split("\n")) + def get_members(self, obj: object) -> list[tuple[str, Any]]: + obj_dict: Mapping[str, Any] = getattr(obj, "__dict__") # noqa: B009 + results = [] + for name in obj_dict: + if self.is_skipped_attribute(name): + continue + # Try to get the value via getattr + try: + value = getattr(obj, name) + except AttributeError: + continue else: - output_signature += " ..." - output.append(output_signature) - + results.append((name, value)) + return results -def strip_or_import( - typ: str, module: ModuleType, known_modules: list[str], imports: list[str] -) -> str: - """Strips unnecessary module names from typ. + def get_type_annotation(self, obj: object) -> str: + """ + Given an instance, return a string representation of its type that is valid + to use as a type annotation. + """ + if obj is None or obj is type(None): + return "None" + elif inspect.isclass(obj): + return "type[{}]".format(self.get_type_fullname(obj)) + elif isinstance(obj, FunctionType): + return self.add_name("typing.Callable") + elif isinstance(obj, ModuleType): + return self.add_name("types.ModuleType", require=False) + else: + return self.get_type_fullname(type(obj)) - If typ represents a type that is inside module or is a type coming from builtins, remove - module declaration from it. Return stripped name of the type. + def is_function(self, obj: object) -> bool: + if self.is_c_module: + return inspect.isbuiltin(obj) + else: + return inspect.isfunction(obj) + + def is_method(self, class_info: ClassInfo, name: str, obj: object) -> bool: + if self.is_c_module: + return inspect.ismethoddescriptor(obj) or type(obj) in ( + type(str.index), + type(str.__add__), + type(str.__new__), + ) + else: + # this is valid because it is only called on members of a class + return inspect.isfunction(obj) + + def is_classmethod(self, class_info: ClassInfo, name: str, obj: object) -> bool: + if self.is_c_module: + return inspect.isbuiltin(obj) or type(obj).__name__ in ( + "classmethod", + "classmethod_descriptor", + ) + else: + return inspect.ismethod(obj) - Arguments: - typ: name of the type - module: in which this type is used - known_modules: other modules being processed - imports: list of import statements (may be modified during the call) - """ - local_modules = ["builtins"] - if module: - local_modules.append(module.__name__) - - stripped_type = typ - if any(c in typ for c in "[,"): - for subtyp in re.split(r"[\[,\]]", typ): - stripped_subtyp = strip_or_import(subtyp.strip(), module, known_modules, imports) - if stripped_subtyp != subtyp: - stripped_type = re.sub( - r"(^|[\[, ]+)" + re.escape(subtyp) + r"($|[\], ]+)", - r"\1" + stripped_subtyp + r"\2", - stripped_type, - ) - elif "." in typ: - for module_name in local_modules + list(reversed(known_modules)): - if typ.startswith(module_name + "."): - if module_name in local_modules: - stripped_type = typ[len(module_name) + 1 :] - arg_module = module_name - break + def is_staticmethod(self, class_info: ClassInfo | None, name: str, obj: object) -> bool: + if self.is_c_module: + return False else: - arg_module = typ[: typ.rindex(".")] - if arg_module not in local_modules: - imports.append(f"import {arg_module}") - if stripped_type == "NoneType": - stripped_type = "None" - return stripped_type - - -def is_static_property(obj: object) -> bool: - return type(obj).__name__ == "pybind11_static_property" - - -def generate_c_property_stub( - name: str, - obj: object, - static_properties: list[str], - rw_properties: list[str], - ro_properties: list[str], - readonly: bool, - module: ModuleType | None = None, - known_modules: list[str] | None = None, - imports: list[str] | None = None, -) -> None: - """Generate property stub using introspection of 'obj'. + return class_info is not None and isinstance( + inspect.getattr_static(class_info.cls, name), staticmethod + ) - Try to infer type from docstring, append resulting lines to 'output'. - """ + @staticmethod + def is_abstract_method(obj: object) -> bool: + return getattr(obj, "__abstractmethod__", False) - def infer_prop_type(docstr: str | None) -> str | None: - """Infer property type from docstring or docstring signature.""" - if docstr is not None: - inferred = infer_ret_type_sig_from_anon_docstring(docstr) - if not inferred: - inferred = infer_ret_type_sig_from_docstring(docstr, name) - if not inferred: - inferred = infer_prop_type_from_docstring(docstr) - return inferred - else: - return None + @staticmethod + def is_property(class_info: ClassInfo, name: str, obj: object) -> bool: + return inspect.isdatadescriptor(obj) or hasattr(obj, "fget") - inferred = infer_prop_type(getattr(obj, "__doc__", None)) - if not inferred: - fget = getattr(obj, "fget", None) - inferred = infer_prop_type(getattr(fget, "__doc__", None)) - if not inferred: - inferred = "Any" - - if module is not None and imports is not None and known_modules is not None: - inferred = strip_or_import(inferred, module, known_modules, imports) - - if is_static_property(obj): - trailing_comment = " # read-only" if readonly else "" - static_properties.append(f"{name}: ClassVar[{inferred}] = ...{trailing_comment}") - else: # regular property - if readonly: - ro_properties.append("@property") - ro_properties.append(f"def {name}(self) -> {inferred}: ...") + @staticmethod + def is_property_readonly(prop: Any) -> bool: + return hasattr(prop, "fset") and prop.fset is None + + def is_static_property(self, obj: object) -> bool: + """For c-modules, whether the property behaves like an attribute""" + if self.is_c_module: + # StaticProperty is from boost-python + return type(obj).__name__ in ("pybind11_static_property", "StaticProperty") else: - rw_properties.append(f"{name}: {inferred}") + return False + + def process_inferred_sigs(self, inferred: list[FunctionSig]) -> None: + for i, sig in enumerate(inferred): + for arg in sig.args: + if arg.type is not None: + arg.type = self.strip_or_import(arg.type) + if sig.ret_type is not None: + inferred[i] = sig._replace(ret_type=self.strip_or_import(sig.ret_type)) + + def generate_function_stub( + self, name: str, obj: object, *, output: list[str], class_info: ClassInfo | None = None + ) -> None: + """Generate stub for a single function or method. + + The result (always a single line) will be appended to 'output'. + If necessary, any required names will be added to 'imports'. + The 'class_name' is used to find signature of __init__ or __new__ in + 'class_sigs'. + """ + docstring: Any = getattr(obj, "__doc__", None) + if not isinstance(docstring, str): + docstring = None + + ctx = FunctionContext( + self.module_name, + name, + docstring=docstring, + is_abstract=self.is_abstract_method(obj), + class_info=class_info, + ) + if self.is_private_name(name, ctx.fullname) or self.is_not_in_all(name): + return + self.record_name(ctx.name) + default_sig = self.get_default_function_sig(obj, ctx) + inferred = self.get_signatures(default_sig, self.sig_generators, ctx) + self.process_inferred_sigs(inferred) -def generate_c_type_stub( - module: ModuleType, - class_name: str, - obj: type, - output: list[str], - known_modules: list[str], - imports: list[str], - sig_generators: Iterable[SignatureGenerator], - include_docstrings: bool = False, -) -> None: - """Generate stub for a single class using runtime introspection. + decorators = [] + if len(inferred) > 1: + decorators.append("@{}".format(self.add_name("typing.overload"))) - The result lines will be appended to 'output'. If necessary, any - required names will be added to 'imports'. - """ - raw_lookup = getattr(obj, "__dict__") # noqa: B009 - items = sorted(get_members(obj), key=lambda x: method_name_sort_key(x[0])) - names = {x[0] for x in items} - methods: list[str] = [] - types: list[str] = [] - static_properties: list[str] = [] - rw_properties: list[str] = [] - ro_properties: list[str] = [] - attrs: list[tuple[str, Any]] = [] - for attr, value in items: - # use unevaluated descriptors when dealing with property inspection - raw_value = raw_lookup.get(attr, value) - if is_c_method(value) or is_c_classmethod(value): - if attr == "__new__": - # TODO: We should support __new__. - if "__init__" in names: - # Avoid duplicate functions if both are present. - # But is there any case where .__new__() has a - # better signature than __init__() ? - continue - attr = "__init__" - if is_c_classmethod(value): - self_var = "cls" + if ctx.is_abstract: + decorators.append("@{}".format(self.add_name("abc.abstractmethod"))) + + if class_info is not None: + if self.is_staticmethod(class_info, name, obj): + decorators.append("@staticmethod") else: - self_var = "self" - generate_c_function_stub( - module, - attr, - value, - output=methods, - known_modules=known_modules, - imports=imports, - self_var=self_var, - cls=obj, - class_name=class_name, - sig_generators=sig_generators, - include_docstrings=include_docstrings, - ) - elif is_c_property(raw_value): - generate_c_property_stub( - attr, - raw_value, - static_properties, - rw_properties, - ro_properties, - is_c_property_readonly(raw_value), - module=module, - known_modules=known_modules, - imports=imports, - ) - elif is_c_type(value): - generate_c_type_stub( - module, - attr, - value, - types, - imports=imports, - known_modules=known_modules, - sig_generators=sig_generators, - include_docstrings=include_docstrings, + for sig in inferred: + if not sig.args or sig.args[0].name not in ("self", "cls"): + sig.args.insert(0, ArgSig(name=class_info.self_var)) + # a sig generator indicates @classmethod by specifying the cls arg. + if inferred[0].args and inferred[0].args[0].name == "cls": + decorators.append("@classmethod") + + output.extend(self.format_func_def(inferred, decorators=decorators, docstring=docstring)) + self._fix_iter(ctx, inferred, output) + + def _fix_iter( + self, ctx: FunctionContext, inferred: list[FunctionSig], output: list[str] + ) -> None: + """Ensure that objects which implement old-style iteration via __getitem__ + are considered iterable. + """ + if ( + ctx.class_info + and ctx.class_info.cls is not None + and ctx.name == "__getitem__" + and "__iter__" not in ctx.class_info.cls.__dict__ + ): + item_type: str | None = None + for sig in inferred: + if sig.args and sig.args[-1].type == "int": + item_type = sig.ret_type + break + if item_type is None: + return + obj = CFunctionStub( + "__iter__", f"def __iter__(self) -> typing.Iterator[{item_type}]\n" ) + self.generate_function_stub("__iter__", obj, output=output, class_info=ctx.class_info) + + def generate_property_stub( + self, + name: str, + raw_obj: object, + obj: object, + static_properties: list[str], + rw_properties: list[str], + ro_properties: list[str], + class_info: ClassInfo | None = None, + ) -> None: + """Generate property stub using introspection of 'obj'. + + Try to infer type from docstring, append resulting lines to 'output'. + + raw_obj : object before evaluation of descriptor (if any) + obj : object after evaluation of descriptor + """ + + docstring = getattr(raw_obj, "__doc__", None) + fget = getattr(raw_obj, "fget", None) + if fget: + alt_docstr = getattr(fget, "__doc__", None) + if alt_docstr and docstring: + docstring += alt_docstr + elif alt_docstr: + docstring = alt_docstr + + ctx = FunctionContext( + self.module_name, name, docstring=docstring, is_abstract=False, class_info=class_info + ) + + if self.is_private_name(name, ctx.fullname) or self.is_not_in_all(name): + return + + self.record_name(ctx.name) + static = self.is_static_property(raw_obj) + readonly = self.is_property_readonly(raw_obj) + if static: + ret_type: str | None = self.strip_or_import(self.get_type_annotation(obj)) else: - attrs.append((attr, value)) + default_sig = self.get_default_function_sig(raw_obj, ctx) + ret_type = default_sig.ret_type + + inferred_type = self.get_property_type(ret_type, self.sig_generators, ctx) + if inferred_type is not None: + inferred_type = self.strip_or_import(inferred_type) - for attr, value in attrs: - static_properties.append( - "{}: ClassVar[{}] = ...".format( - attr, - strip_or_import(get_type_fullname(type(value)), module, known_modules, imports), + if static: + classvar = self.add_name("typing.ClassVar") + trailing_comment = " # read-only" if readonly else "" + if inferred_type is None: + inferred_type = self.add_name("_typeshed.Incomplete") + + static_properties.append( + f"{self._indent}{name}: {classvar}[{inferred_type}] = ...{trailing_comment}" ) - ) - all_bases = type.mro(obj) - if all_bases[-1] is object: - # TODO: Is this always object? - del all_bases[-1] - # remove pybind11_object. All classes generated by pybind11 have pybind11_object in their MRO, - # which only overrides a few functions in object type - if all_bases and all_bases[-1].__name__ == "pybind11_object": - del all_bases[-1] - # remove the class itself - all_bases = all_bases[1:] - # Remove base classes of other bases as redundant. - bases: list[type] = [] - for base in all_bases: - if not any(issubclass(b, base) for b in bases): - bases.append(base) - if bases: - bases_str = "(%s)" % ", ".join( - strip_or_import(get_type_fullname(base), module, known_modules, imports) - for base in bases - ) - else: - bases_str = "" - if types or static_properties or rw_properties or methods or ro_properties: - output.append(f"class {class_name}{bases_str}:") - for line in types: - if ( - output - and output[-1] - and not output[-1].startswith("class") - and line.startswith("class") + else: # regular property + if readonly: + ro_properties.append(f"{self._indent}@property") + sig = FunctionSig(name, [ArgSig("self")], inferred_type) + ro_properties.append(sig.format_sig(indent=self._indent)) + else: + if inferred_type is None: + inferred_type = self.add_name("_typeshed.Incomplete") + + rw_properties.append(f"{self._indent}{name}: {inferred_type}") + + def get_type_fullname(self, typ: type) -> str: + """Given a type, return a string representation""" + if typ is Any: + return "Any" + typename = getattr(typ, "__qualname__", typ.__name__) + module_name = self.get_obj_module(typ) + assert module_name is not None, typ + if module_name != "builtins": + typename = f"{module_name}.{typename}" + return typename + + def get_base_types(self, obj: type) -> list[str]: + all_bases = type.mro(obj) + if all_bases[-1] is object: + # TODO: Is this always object? + del all_bases[-1] + # remove pybind11_object. All classes generated by pybind11 have pybind11_object in their MRO, + # which only overrides a few functions in object type + if all_bases and all_bases[-1].__name__ == "pybind11_object": + del all_bases[-1] + # remove the class itself + all_bases = all_bases[1:] + # Remove base classes of other bases as redundant. + bases: list[type] = [] + for base in all_bases: + if not any(issubclass(b, base) for b in bases): + bases.append(base) + return [self.strip_or_import(self.get_type_fullname(base)) for base in bases] + + def generate_class_stub(self, class_name: str, cls: type, output: list[str]) -> None: + """Generate stub for a single class using runtime introspection. + + The result lines will be appended to 'output'. If necessary, any + required names will be added to 'imports'. + """ + raw_lookup = getattr(cls, "__dict__") # noqa: B009 + items = self.get_members(cls) + if self.resort_members: + items = sorted(items, key=lambda x: method_name_sort_key(x[0])) + names = set(x[0] for x in items) + methods: list[str] = [] + types: list[str] = [] + static_properties: list[str] = [] + rw_properties: list[str] = [] + ro_properties: list[str] = [] + attrs: list[tuple[str, Any]] = [] + + self.record_name(class_name) + self.indent() + + class_info = ClassInfo(class_name, "", getattr(cls, "__doc__", None), cls) + + for attr, value in items: + # use unevaluated descriptors when dealing with property inspection + raw_value = raw_lookup.get(attr, value) + if self.is_method(class_info, attr, value) or self.is_classmethod( + class_info, attr, value ): - output.append("") - output.append(" " + line) - for line in static_properties: - output.append(f" {line}") - for line in rw_properties: - output.append(f" {line}") - for line in methods: - output.append(f" {line}") - for line in ro_properties: - output.append(f" {line}") - else: - output.append(f"class {class_name}{bases_str}: ...") + if attr == "__new__": + # TODO: We should support __new__. + if "__init__" in names: + # Avoid duplicate functions if both are present. + # But is there any case where .__new__() has a + # better signature than __init__() ? + continue + attr = "__init__" + # FIXME: make this nicer + if self.is_classmethod(class_info, attr, value): + class_info.self_var = "cls" + else: + class_info.self_var = "self" + self.generate_function_stub(attr, value, output=methods, class_info=class_info) + elif self.is_property(class_info, attr, raw_value): + self.generate_property_stub( + attr, + raw_value, + value, + static_properties, + rw_properties, + ro_properties, + class_info, + ) + elif inspect.isclass(value) and self.is_defined_in_module(value): + self.generate_class_stub(attr, value, types) + else: + attrs.append((attr, value)) + for attr, value in attrs: + if attr == "__hash__" and value is None: + # special case for __hash__ + continue + prop_type_name = self.strip_or_import(self.get_type_annotation(value)) + classvar = self.add_name("typing.ClassVar") + static_properties.append(f"{self._indent}{attr}: {classvar}[{prop_type_name}] = ...") -def get_type_fullname(typ: type) -> str: - return f"{typ.__module__}.{getattr(typ, '__qualname__', typ.__name__)}" + self.dedent() + + bases = self.get_base_types(cls) + if bases: + bases_str = "(%s)" % ", ".join(bases) + else: + bases_str = "" + if types or static_properties or rw_properties or methods or ro_properties: + output.append(f"{self._indent}class {class_name}{bases_str}:") + for line in types: + if ( + output + and output[-1] + and not output[-1].strip().startswith("class") + and line.strip().startswith("class") + ): + output.append("") + output.append(line) + for line in static_properties: + output.append(line) + for line in rw_properties: + output.append(line) + for line in methods: + output.append(line) + for line in ro_properties: + output.append(line) + else: + output.append(f"{self._indent}class {class_name}{bases_str}: ...") + + def generate_variable_stub(self, name: str, obj: object, output: list[str]) -> None: + """Generate stub for a single variable using runtime introspection. + + The result lines will be appended to 'output'. If necessary, any + required names will be added to 'imports'. + """ + if self.is_private_name(name, f"{self.module_name}.{name}") or self.is_not_in_all(name): + return + self.record_name(name) + type_str = self.strip_or_import(self.get_type_annotation(obj)) + output.append(f"{name}: {type_str}") def method_name_sort_key(name: str) -> tuple[int, str]: @@ -648,22 +837,9 @@ def is_pybind_skipped_attribute(attr: str) -> bool: return attr.startswith("__pybind11_module_local_") -def is_skipped_attribute(attr: str) -> bool: - return attr in ( - "__class__", - "__getattribute__", - "__str__", - "__repr__", - "__doc__", - "__dict__", - "__module__", - "__weakref__", - ) or is_pybind_skipped_attribute( # For pickling - attr - ) - - -def infer_method_args(name: str, self_var: str | None = None) -> list[ArgSig]: +def infer_c_method_args( + name: str, self_var: str = "self", arg_names: list[str] | None = None +) -> list[ArgSig]: args: list[ArgSig] | None = None if name.startswith("__") and name.endswith("__"): name = name[2:-2] @@ -703,13 +879,9 @@ def infer_method_args(name: str, self_var: str | None = None) -> list[ArgSig]: args = [] elif name == "setstate": args = [ArgSig(name="state")] + elif name in ("eq", "ne", "lt", "le", "gt", "ge"): + args = [ArgSig(name="other", type="object")] elif name in ( - "eq", - "ne", - "lt", - "le", - "gt", - "ge", "add", "radd", "sub", @@ -761,22 +933,15 @@ def infer_method_args(name: str, self_var: str | None = None) -> list[ArgSig]: elif name == "reduce_ex": args = [ArgSig(name="protocol")] elif name == "exit": - args = [ArgSig(name="type"), ArgSig(name="value"), ArgSig(name="traceback")] + args = [ + ArgSig(name="type", type="type[BaseException] | None"), + ArgSig(name="value", type="BaseException | None"), + ArgSig(name="traceback", type="types.TracebackType | None"), + ] + if args is None: + args = infer_method_arg_types(name, self_var, arg_names) + else: + args = [ArgSig(name=self_var)] + args if args is None: args = [ArgSig(name="*args"), ArgSig(name="**kwargs")] - return [ArgSig(name=self_var or "self")] + args - - -def infer_method_ret_type(name: str) -> str: - if name.startswith("__") and name.endswith("__"): - name = name[2:-2] - if name in ("float", "bool", "bytes", "int"): - return name - # Note: __eq__ and co may return arbitrary types, but bool is good enough for stubgen. - elif name in ("eq", "ne", "lt", "le", "gt", "ge", "contains"): - return "bool" - elif name in ("len", "hash", "sizeof", "trunc", "floor", "ceil"): - return "int" - elif name in ("init", "setitem"): - return "None" - return "Any" + return args diff --git a/mypy/stubutil.py b/mypy/stubutil.py index e15766b66cb3..22e525c14e7c 100644 --- a/mypy/stubutil.py +++ b/mypy/stubutil.py @@ -5,19 +5,26 @@ import os.path import re import sys +from abc import abstractmethod +from collections import defaultdict from contextlib import contextmanager -from typing import Iterator +from typing import Final, Iterable, Iterator, Mapping from typing_extensions import overload +from mypy_extensions import mypyc_attr + +import mypy.options from mypy.modulefinder import ModuleNotFoundReason from mypy.moduleinspect import InspectError, ModuleInspect +from mypy.stubdoc import ArgSig, FunctionSig +from mypy.types import AnyType, NoneType, Type, TypeList, TypeStrVisitor, UnboundType, UnionType # Modules that may fail when imported, or that may have side effects (fully qualified). NOT_IMPORTABLE_MODULES = () class CantImport(Exception): - def __init__(self, module: str, message: str): + def __init__(self, module: str, message: str) -> None: self.module = module self.message = message @@ -70,8 +77,9 @@ def find_module_path_and_all_py3( ) -> tuple[str | None, list[str] | None] | None: """Find module and determine __all__ for a Python 3 module. - Return None if the module is a C module. Return (module_path, __all__) if - it is a Python module. Raise CantImport if import failed. + Return None if the module is a C or pyc-only module. + Return (module_path, __all__) if it is a Python module. + Raise CantImport if import failed. """ if module in NOT_IMPORTABLE_MODULES: raise CantImport(module, "") @@ -182,3 +190,591 @@ def common_dir_prefix(paths: list[str]) -> str: cur = path break return cur or "." + + +class AnnotationPrinter(TypeStrVisitor): + """Visitor used to print existing annotations in a file. + + The main difference from TypeStrVisitor is a better treatment of + unbound types. + + Notes: + * This visitor doesn't add imports necessary for annotations, this is done separately + by ImportTracker. + * It can print all kinds of types, but the generated strings may not be valid (notably + callable types) since it prints the same string that reveal_type() does. + * For Instance types it prints the fully qualified names. + """ + + # TODO: Generate valid string representation for callable types. + # TODO: Use short names for Instances. + def __init__( + self, + stubgen: BaseStubGenerator, + known_modules: list[str] | None = None, + local_modules: list[str] | None = None, + ) -> None: + super().__init__(options=mypy.options.Options()) + self.stubgen = stubgen + self.known_modules = known_modules + self.local_modules = local_modules or ["builtins"] + + def visit_any(self, t: AnyType) -> str: + s = super().visit_any(t) + self.stubgen.import_tracker.require_name(s) + return s + + def visit_unbound_type(self, t: UnboundType) -> str: + s = t.name + if self.known_modules is not None and "." in s: + # see if this object is from any of the modules that we're currently processing. + # reverse sort so that subpackages come before parents: e.g. "foo.bar" before "foo". + for module_name in self.local_modules + sorted(self.known_modules, reverse=True): + if s.startswith(module_name + "."): + if module_name in self.local_modules: + s = s[len(module_name) + 1 :] + arg_module = module_name + break + else: + arg_module = s[: s.rindex(".")] + if arg_module not in self.local_modules: + self.stubgen.import_tracker.add_import(arg_module, require=True) + elif s == "NoneType": + # when called without analysis all types are unbound, so this won't hit + # visit_none_type(). + s = "None" + else: + self.stubgen.import_tracker.require_name(s) + if t.args: + s += f"[{self.args_str(t.args)}]" + return s + + def visit_none_type(self, t: NoneType) -> str: + return "None" + + def visit_type_list(self, t: TypeList) -> str: + return f"[{self.list_str(t.items)}]" + + def visit_union_type(self, t: UnionType) -> str: + return " | ".join([item.accept(self) for item in t.items]) + + def args_str(self, args: Iterable[Type]) -> str: + """Convert an array of arguments to strings and join the results with commas. + + The main difference from list_str is the preservation of quotes for string + arguments + """ + types = ["builtins.bytes", "builtins.str"] + res = [] + for arg in args: + arg_str = arg.accept(self) + if isinstance(arg, UnboundType) and arg.original_str_fallback in types: + res.append(f"'{arg_str}'") + else: + res.append(arg_str) + return ", ".join(res) + + +class ClassInfo: + def __init__( + self, name: str, self_var: str, docstring: str | None = None, cls: type | None = None + ) -> None: + self.name = name + self.self_var = self_var + self.docstring = docstring + self.cls = cls + + +class FunctionContext: + def __init__( + self, + module_name: str, + name: str, + docstring: str | None = None, + is_abstract: bool = False, + class_info: ClassInfo | None = None, + ) -> None: + self.module_name = module_name + self.name = name + self.docstring = docstring + self.is_abstract = is_abstract + self.class_info = class_info + self._fullname: str | None = None + + @property + def fullname(self) -> str: + if self._fullname is None: + if self.class_info: + self._fullname = f"{self.module_name}.{self.class_info.name}.{self.name}" + else: + self._fullname = f"{self.module_name}.{self.name}" + return self._fullname + + +def infer_method_ret_type(name: str) -> str | None: + """Infer return types for known special methods""" + if name.startswith("__") and name.endswith("__"): + name = name[2:-2] + if name in ("float", "bool", "bytes", "int", "complex", "str"): + return name + # Note: __eq__ and co may return arbitrary types, but bool is good enough for stubgen. + elif name in ("eq", "ne", "lt", "le", "gt", "ge", "contains"): + return "bool" + elif name in ("len", "length_hint", "index", "hash", "sizeof", "trunc", "floor", "ceil"): + return "int" + elif name in ("format", "repr"): + return "str" + elif name in ("init", "setitem", "del", "delitem"): + return "None" + return None + + +def infer_method_arg_types( + name: str, self_var: str = "self", arg_names: list[str] | None = None +) -> list[ArgSig] | None: + """Infer argument types for known special methods""" + args: list[ArgSig] | None = None + if name.startswith("__") and name.endswith("__"): + if arg_names and len(arg_names) >= 1 and arg_names[0] == "self": + arg_names = arg_names[1:] + + name = name[2:-2] + if name == "exit": + if arg_names is None: + arg_names = ["type", "value", "traceback"] + if len(arg_names) == 3: + arg_types = [ + "type[BaseException] | None", + "BaseException | None", + "types.TracebackType | None", + ] + args = [ + ArgSig(name=arg_name, type=arg_type) + for arg_name, arg_type in zip(arg_names, arg_types) + ] + if args is not None: + return [ArgSig(name=self_var)] + args + return None + + +@mypyc_attr(allow_interpreted_subclasses=True) +class SignatureGenerator: + """Abstract base class for extracting a list of FunctionSigs for each function.""" + + def remove_self_type( + self, inferred: list[FunctionSig] | None, self_var: str + ) -> list[FunctionSig] | None: + """Remove type annotation from self/cls argument""" + if inferred: + for signature in inferred: + if signature.args: + if signature.args[0].name == self_var: + signature.args[0].type = None + return inferred + + @abstractmethod + def get_function_sig( + self, default_sig: FunctionSig, ctx: FunctionContext + ) -> list[FunctionSig] | None: + """Return a list of signatures for the given function. + + If no signature can be found, return None. If all of the registered SignatureGenerators + for the stub generator return None, then the default_sig will be used. + """ + pass + + @abstractmethod + def get_property_type(self, default_type: str | None, ctx: FunctionContext) -> str | None: + """Return the type of the given property""" + pass + + +class ImportTracker: + """Record necessary imports during stub generation.""" + + def __init__(self) -> None: + # module_for['foo'] has the module name where 'foo' was imported from, or None if + # 'foo' is a module imported directly; + # direct_imports['foo'] is the module path used when the name 'foo' was added to the + # namespace. + # reverse_alias['foo'] is the name that 'foo' had originally when imported with an + # alias; examples + # 'from pkg import mod' ==> module_for['mod'] == 'pkg' + # 'from pkg import mod as m' ==> module_for['m'] == 'pkg' + # ==> reverse_alias['m'] == 'mod' + # 'import pkg.mod as m' ==> module_for['m'] == None + # ==> reverse_alias['m'] == 'pkg.mod' + # 'import pkg.mod' ==> module_for['pkg'] == None + # ==> module_for['pkg.mod'] == None + # ==> direct_imports['pkg'] == 'pkg.mod' + # ==> direct_imports['pkg.mod'] == 'pkg.mod' + self.module_for: dict[str, str | None] = {} + self.direct_imports: dict[str, str] = {} + self.reverse_alias: dict[str, str] = {} + + # required_names is the set of names that are actually used in a type annotation + self.required_names: set[str] = set() + + # Names that should be reexported if they come from another module + self.reexports: set[str] = set() + + def add_import_from( + self, module: str, names: list[tuple[str, str | None]], require: bool = False + ) -> None: + for name, alias in names: + if alias: + # 'from {module} import {name} as {alias}' + self.module_for[alias] = module + self.reverse_alias[alias] = name + else: + # 'from {module} import {name}' + self.module_for[name] = module + self.reverse_alias.pop(name, None) + if require: + self.require_name(alias or name) + self.direct_imports.pop(alias or name, None) + + def add_import(self, module: str, alias: str | None = None, require: bool = False) -> None: + if alias: + # 'import {module} as {alias}' + assert "." not in alias # invalid syntax + self.module_for[alias] = None + self.reverse_alias[alias] = module + if require: + self.required_names.add(alias) + else: + # 'import {module}' + name = module + if require: + self.required_names.add(name) + # add module and its parent packages + while name: + self.module_for[name] = None + self.direct_imports[name] = module + self.reverse_alias.pop(name, None) + name = name.rpartition(".")[0] + + def require_name(self, name: str) -> None: + while name not in self.direct_imports and "." in name: + name = name.rsplit(".", 1)[0] + self.required_names.add(name) + + def reexport(self, name: str) -> None: + """Mark a given non qualified name as needed in __all__. + + This means that in case it comes from a module, it should be + imported with an alias even if the alias is the same as the name. + """ + self.require_name(name) + self.reexports.add(name) + + def import_lines(self) -> list[str]: + """The list of required import lines (as strings with python code). + + In order for a module be included in this output, an indentifier must be both + 'required' via require_name() and 'imported' via add_import_from() + or add_import() + """ + result = [] + + # To summarize multiple names imported from a same module, we collect those + # in the `module_map` dictionary, mapping a module path to the list of names that should + # be imported from it. the names can also be alias in the form 'original as alias' + module_map: Mapping[str, list[str]] = defaultdict(list) + + for name in sorted( + self.required_names, + key=lambda n: (self.reverse_alias[n], n) if n in self.reverse_alias else (n, ""), + ): + # If we haven't seen this name in an import statement, ignore it + if name not in self.module_for: + continue + + m = self.module_for[name] + if m is not None: + # This name was found in a from ... import ... + # Collect the name in the module_map + if name in self.reverse_alias: + name = f"{self.reverse_alias[name]} as {name}" + elif name in self.reexports: + name = f"{name} as {name}" + module_map[m].append(name) + else: + # This name was found in an import ... + # We can already generate the import line + if name in self.reverse_alias: + source = self.reverse_alias[name] + result.append(f"import {source} as {name}\n") + elif name in self.reexports: + assert "." not in name # Because reexports only has nonqualified names + result.append(f"import {name} as {name}\n") + else: + result.append(f"import {name}\n") + + # Now generate all the from ... import ... lines collected in module_map + for module, names in sorted(module_map.items()): + result.append(f"from {module} import {', '.join(sorted(names))}\n") + return result + + +@mypyc_attr(allow_interpreted_subclasses=True) +class BaseStubGenerator: + # These names should be omitted from generated stubs. + IGNORED_DUNDERS: Final = { + "__all__", + "__author__", + "__about__", + "__copyright__", + "__email__", + "__license__", + "__summary__", + "__title__", + "__uri__", + "__str__", + "__repr__", + "__getstate__", + "__setstate__", + "__slots__", + "__builtins__", + "__cached__", + "__file__", + "__name__", + "__package__", + "__path__", + "__spec__", + "__loader__", + } + TYPING_MODULE_NAMES: Final = ("typing", "typing_extensions") + # Special-cased names that are implicitly exported from the stub (from m import y as y). + EXTRA_EXPORTED: Final = { + "pyasn1_modules.rfc2437.univ", + "pyasn1_modules.rfc2459.char", + "pyasn1_modules.rfc2459.univ", + } + + def __init__( + self, + _all_: list[str] | None = None, + include_private: bool = False, + export_less: bool = False, + include_docstrings: bool = False, + ): + # Best known value of __all__. + self._all_ = _all_ + self._include_private = include_private + self._include_docstrings = include_docstrings + # Disable implicit exports of package-internal imports? + self.export_less = export_less + self._import_lines: list[str] = [] + self._output: list[str] = [] + # Current indent level (indent is hardcoded to 4 spaces). + self._indent = "" + self._toplevel_names: list[str] = [] + self.import_tracker = ImportTracker() + # Top-level members + self.defined_names: set[str] = set() + self.sig_generators = self.get_sig_generators() + # populated by visit_mypy_file + self.module_name: str = "" + + def get_sig_generators(self) -> list[SignatureGenerator]: + return [] + + def refers_to_fullname(self, name: str, fullname: str | tuple[str, ...]) -> bool: + """Return True if the variable name identifies the same object as the given fullname(s).""" + if isinstance(fullname, tuple): + return any(self.refers_to_fullname(name, fname) for fname in fullname) + module, short = fullname.rsplit(".", 1) + return self.import_tracker.module_for.get(name) == module and ( + name == short or self.import_tracker.reverse_alias.get(name) == short + ) + + def add_name(self, fullname: str, require: bool = True) -> str: + """Add a name to be imported and return the name reference. + + The import will be internal to the stub (i.e don't reexport). + """ + module, name = fullname.rsplit(".", 1) + alias = "_" + name if name in self.defined_names else None + self.import_tracker.add_import_from(module, [(name, alias)], require=require) + return alias or name + + def add_import_line(self, line: str) -> None: + """Add a line of text to the import section, unless it's already there.""" + if line not in self._import_lines: + self._import_lines.append(line) + + def get_imports(self) -> str: + """Return the import statements for the stub.""" + imports = "" + if self._import_lines: + imports += "".join(self._import_lines) + imports += "".join(self.import_tracker.import_lines()) + return imports + + def output(self) -> str: + """Return the text for the stub.""" + imports = self.get_imports() + if imports and self._output: + imports += "\n" + return imports + "".join(self._output) + + def add(self, string: str) -> None: + """Add text to generated stub.""" + self._output.append(string) + + def is_top_level(self) -> bool: + """Are we processing the top level of a file?""" + return self._indent == "" + + def indent(self) -> None: + """Add one level of indentation.""" + self._indent += " " + + def dedent(self) -> None: + """Remove one level of indentation.""" + self._indent = self._indent[:-4] + + def record_name(self, name: str) -> None: + """Mark a name as defined. + + This only does anything if at the top level of a module. + """ + if self.is_top_level(): + self._toplevel_names.append(name) + + def is_recorded_name(self, name: str) -> bool: + """Has this name been recorded previously?""" + return self.is_top_level() and name in self._toplevel_names + + def set_defined_names(self, defined_names: set[str]) -> None: + self.defined_names = defined_names + # Names in __all__ are required + for name in self._all_ or (): + if name not in self.IGNORED_DUNDERS: + self.import_tracker.reexport(name) + + # These are "soft" imports for objects which might appear in annotations but not have + # a corresponding import statement. + known_imports = { + "_typeshed": ["Incomplete"], + "typing": ["Any", "TypeVar", "NamedTuple"], + "collections.abc": ["Generator"], + "typing_extensions": ["TypedDict", "ParamSpec", "TypeVarTuple"], + } + for pkg, imports in known_imports.items(): + for t in imports: + # require=False means that the import won't be added unless require_name() is called + # for the object during generation. + self.add_name(f"{pkg}.{t}", require=False) + + def check_undefined_names(self) -> None: + print(self._all_) + print(self._toplevel_names) + undefined_names = [name for name in self._all_ or [] if name not in self._toplevel_names] + if undefined_names: + if self._output: + self.add("\n") + self.add("# Names in __all__ with no definition:\n") + for name in sorted(undefined_names): + self.add(f"# {name}\n") + + def get_signatures( + self, + default_signature: FunctionSig, + sig_generators: list[SignatureGenerator], + func_ctx: FunctionContext, + ) -> list[FunctionSig]: + for sig_gen in sig_generators: + inferred = sig_gen.get_function_sig(default_signature, func_ctx) + if inferred: + return inferred + + return [default_signature] + + def get_property_type( + self, + default_type: str | None, + sig_generators: list[SignatureGenerator], + func_ctx: FunctionContext, + ) -> str | None: + for sig_gen in sig_generators: + inferred = sig_gen.get_property_type(default_type, func_ctx) + if inferred: + return inferred + + return default_type + + def format_func_def( + self, + sigs: list[FunctionSig], + is_coroutine: bool = False, + decorators: list[str] | None = None, + docstring: str | None = None, + ) -> list[str]: + lines: list[str] = [] + if decorators is None: + decorators = [] + + for signature in sigs: + # dump decorators, just before "def ..." + for deco in decorators: + lines.append(f"{self._indent}{deco}") + + lines.append( + signature.format_sig( + indent=self._indent, + is_async=is_coroutine, + docstring=docstring if self._include_docstrings else None, + ) + ) + return lines + + def print_annotation( + self, + t: Type, + known_modules: list[str] | None = None, + local_modules: list[str] | None = None, + ) -> str: + printer = AnnotationPrinter(self, known_modules, local_modules) + return t.accept(printer) + + def is_not_in_all(self, name: str) -> bool: + if self.is_private_name(name): + return False + if self._all_: + return self.is_top_level() and name not in self._all_ + return False + + def is_private_name(self, name: str, fullname: str | None = None) -> bool: + if self._include_private: + return False + if fullname in self.EXTRA_EXPORTED: + return False + if name == "_": + return False + return name.startswith("_") and (not name.endswith("__") or name in self.IGNORED_DUNDERS) + + def should_reexport(self, name: str, full_module: str, name_is_alias: bool) -> bool: + if ( + not name_is_alias + and self.module_name + and (self.module_name + "." + name) in self.EXTRA_EXPORTED + ): + # Special case certain names that should be exported, against our general rules. + return True + is_private = self.is_private_name(name, full_module + "." + name) + top_level = full_module.split(".")[0] + self_top_level = self.module_name.split(".", 1)[0] + if ( + not name_is_alias + and not self.export_less + and (not self._all_ or name in self.IGNORED_DUNDERS) + and self.module_name + and not is_private + and top_level in (self_top_level, "_" + self_top_level) + ): + # Export imports from the same package, since we can't reliably tell whether they + # are part of the public API. + return True + return False diff --git a/mypy/test/teststubgen.py b/mypy/test/teststubgen.py index 7e30515ac892..ace0b4d95573 100644 --- a/mypy/test/teststubgen.py +++ b/mypy/test/teststubgen.py @@ -28,21 +28,19 @@ Options, collect_build_targets, generate_stubs, - get_sig_generators, is_blacklisted_path, is_non_library_module, mypy_options, parse_options, ) -from mypy.stubgenc import ( - generate_c_function_stub, - generate_c_property_stub, - generate_c_type_stub, - infer_method_args, +from mypy.stubgenc import InspectionStubGenerator, infer_c_method_args +from mypy.stubutil import ( + ClassInfo, + common_dir_prefix, infer_method_ret_type, - is_c_property_readonly, + remove_misplaced_type_comments, + walk_packages, ) -from mypy.stubutil import common_dir_prefix, remove_misplaced_type_comments, walk_packages from mypy.test.data import DataDrivenTestCase, DataSuite from mypy.test.helpers import assert_equal, assert_string_arrays_equal, local_sys_path_set @@ -62,7 +60,8 @@ def test_files_found(self) -> None: os.mkdir(os.path.join("subdir", "pack")) self.make_file("subdir", "pack", "__init__.py") opts = parse_options(["subdir"]) - py_mods, c_mods = collect_build_targets(opts, mypy_options(opts)) + py_mods, pyi_mods, c_mods = collect_build_targets(opts, mypy_options(opts)) + assert_equal(pyi_mods, []) assert_equal(c_mods, []) files = {mod.path for mod in py_mods} assert_equal( @@ -87,7 +86,8 @@ def test_packages_found(self) -> None: self.make_file("pack", "a.py") self.make_file("pack", "b.py") opts = parse_options(["-p", "pack"]) - py_mods, c_mods = collect_build_targets(opts, mypy_options(opts)) + py_mods, pyi_mods, c_mods = collect_build_targets(opts, mypy_options(opts)) + assert_equal(pyi_mods, []) assert_equal(c_mods, []) files = {os.path.relpath(mod.path or "FAIL") for mod in py_mods} assert_equal( @@ -111,7 +111,7 @@ def test_module_not_found(self) -> None: os.chdir(tmp) self.make_file(tmp, "mymodule.py", content="import a") opts = parse_options(["-m", "mymodule"]) - py_mods, c_mods = collect_build_targets(opts, mypy_options(opts)) + collect_build_targets(opts, mypy_options(opts)) assert captured_output.getvalue() == "" finally: sys.stdout = sys.__stdout__ @@ -702,10 +702,14 @@ def run_case_inner(self, testcase: DataDrivenTestCase) -> None: out_dir = "out" try: try: - if not testcase.name.endswith("_import"): - options.no_import = True - if not testcase.name.endswith("_semanal"): - options.parse_only = True + if testcase.name.endswith("_inspect"): + options.inspect = True + else: + if not testcase.name.endswith("_import"): + options.no_import = True + if not testcase.name.endswith("_semanal"): + options.parse_only = True + generate_stubs(options) a: list[str] = [] for module in modules: @@ -781,35 +785,28 @@ class StubgencSuite(unittest.TestCase): """ def test_infer_hash_sig(self) -> None: - assert_equal(infer_method_args("__hash__"), [self_arg]) + assert_equal(infer_c_method_args("__hash__"), [self_arg]) assert_equal(infer_method_ret_type("__hash__"), "int") def test_infer_getitem_sig(self) -> None: - assert_equal(infer_method_args("__getitem__"), [self_arg, ArgSig(name="index")]) + assert_equal(infer_c_method_args("__getitem__"), [self_arg, ArgSig(name="index")]) def test_infer_setitem_sig(self) -> None: assert_equal( - infer_method_args("__setitem__"), + infer_c_method_args("__setitem__"), [self_arg, ArgSig(name="index"), ArgSig(name="object")], ) assert_equal(infer_method_ret_type("__setitem__"), "None") + def test_infer_eq_op_sig(self) -> None: + for op in ("eq", "ne", "lt", "le", "gt", "ge"): + assert_equal( + infer_c_method_args(f"__{op}__"), [self_arg, ArgSig(name="other", type="object")] + ) + def test_infer_binary_op_sig(self) -> None: - for op in ( - "eq", - "ne", - "lt", - "le", - "gt", - "ge", - "add", - "radd", - "sub", - "rsub", - "mul", - "rmul", - ): - assert_equal(infer_method_args(f"__{op}__"), [self_arg, ArgSig(name="other")]) + for op in ("add", "radd", "sub", "rsub", "mul", "rmul"): + assert_equal(infer_c_method_args(f"__{op}__"), [self_arg, ArgSig(name="other")]) def test_infer_equality_op_sig(self) -> None: for op in ("eq", "ne", "lt", "le", "gt", "ge", "contains"): @@ -817,46 +814,31 @@ def test_infer_equality_op_sig(self) -> None: def test_infer_unary_op_sig(self) -> None: for op in ("neg", "pos"): - assert_equal(infer_method_args(f"__{op}__"), [self_arg]) + assert_equal(infer_c_method_args(f"__{op}__"), [self_arg]) def test_infer_cast_sig(self) -> None: for op in ("float", "bool", "bytes", "int"): assert_equal(infer_method_ret_type(f"__{op}__"), op) - def test_generate_c_type_stub_no_crash_for_object(self) -> None: + def test_generate_class_stub_no_crash_for_object(self) -> None: output: list[str] = [] mod = ModuleType("module", "") # any module is fine - imports: list[str] = [] - generate_c_type_stub( - mod, - "alias", - object, - output, - imports=imports, - known_modules=[mod.__name__], - sig_generators=get_sig_generators(parse_options([])), - ) - assert_equal(imports, []) + gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod) + + gen.generate_class_stub("alias", object, output) + assert_equal(gen.get_imports().splitlines(), []) assert_equal(output[0], "class alias:") - def test_generate_c_type_stub_variable_type_annotation(self) -> None: + def test_generate_class_stub_variable_type_annotation(self) -> None: # This class mimics the stubgen unit test 'testClassVariable' class TestClassVariableCls: x = 1 output: list[str] = [] - imports: list[str] = [] mod = ModuleType("module", "") # any module is fine - generate_c_type_stub( - mod, - "C", - TestClassVariableCls, - output, - imports=imports, - known_modules=[mod.__name__], - sig_generators=get_sig_generators(parse_options([])), - ) - assert_equal(imports, []) + gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod) + gen.generate_class_stub("C", TestClassVariableCls, output) + assert_equal(gen.get_imports().splitlines(), ["from typing import ClassVar"]) assert_equal(output, ["class C:", " x: ClassVar[int] = ..."]) def test_generate_c_type_inheritance(self) -> None: @@ -864,35 +846,19 @@ class TestClass(KeyError): pass output: list[str] = [] - imports: list[str] = [] mod = ModuleType("module, ") - generate_c_type_stub( - mod, - "C", - TestClass, - output, - imports=imports, - known_modules=[mod.__name__], - sig_generators=get_sig_generators(parse_options([])), - ) + gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod) + gen.generate_class_stub("C", TestClass, output) assert_equal(output, ["class C(KeyError): ..."]) - assert_equal(imports, []) + assert_equal(gen.get_imports().splitlines(), []) def test_generate_c_type_inheritance_same_module(self) -> None: output: list[str] = [] - imports: list[str] = [] mod = ModuleType(TestBaseClass.__module__, "") - generate_c_type_stub( - mod, - "C", - TestClass, - output, - imports=imports, - known_modules=[mod.__name__], - sig_generators=get_sig_generators(parse_options([])), - ) + gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod) + gen.generate_class_stub("C", TestClass, output) assert_equal(output, ["class C(TestBaseClass): ..."]) - assert_equal(imports, []) + assert_equal(gen.get_imports().splitlines(), []) def test_generate_c_type_inheritance_other_module(self) -> None: import argparse @@ -901,38 +867,22 @@ class TestClass(argparse.Action): pass output: list[str] = [] - imports: list[str] = [] mod = ModuleType("module", "") - generate_c_type_stub( - mod, - "C", - TestClass, - output, - imports=imports, - known_modules=[mod.__name__], - sig_generators=get_sig_generators(parse_options([])), - ) + gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod) + gen.generate_class_stub("C", TestClass, output) assert_equal(output, ["class C(argparse.Action): ..."]) - assert_equal(imports, ["import argparse"]) + assert_equal(gen.get_imports().splitlines(), ["import argparse"]) def test_generate_c_type_inheritance_builtin_type(self) -> None: class TestClass(type): pass output: list[str] = [] - imports: list[str] = [] mod = ModuleType("module", "") - generate_c_type_stub( - mod, - "C", - TestClass, - output, - imports=imports, - known_modules=[mod.__name__], - sig_generators=get_sig_generators(parse_options([])), - ) + gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod) + gen.generate_class_stub("C", TestClass, output) assert_equal(output, ["class C(type): ..."]) - assert_equal(imports, []) + assert_equal(gen.get_imports().splitlines(), []) def test_generate_c_type_with_docstring(self) -> None: class TestClass: @@ -942,22 +892,16 @@ def test(self, arg0: str) -> None: """ output: list[str] = [] - imports: list[str] = [] mod = ModuleType(TestClass.__module__, "") - generate_c_function_stub( - mod, + gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod) + gen.generate_function_stub( "test", TestClass.test, output=output, - imports=imports, - self_var="self", - cls=TestClass, - class_name="TestClass", - known_modules=[mod.__name__], - sig_generators=get_sig_generators(parse_options([])), + class_info=ClassInfo(self_var="self", cls=TestClass, name="TestClass"), ) assert_equal(output, ["def test(self, arg0: int) -> Any: ..."]) - assert_equal(imports, []) + assert_equal(gen.get_imports().splitlines(), []) def test_generate_c_type_with_docstring_no_self_arg(self) -> None: class TestClass: @@ -967,22 +911,16 @@ def test(self, arg0: str) -> None: """ output: list[str] = [] - imports: list[str] = [] mod = ModuleType(TestClass.__module__, "") - generate_c_function_stub( - mod, + gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod) + gen.generate_function_stub( "test", TestClass.test, output=output, - imports=imports, - self_var="self", - cls=TestClass, - class_name="TestClass", - known_modules=[mod.__name__], - sig_generators=get_sig_generators(parse_options([])), + class_info=ClassInfo(self_var="self", cls=TestClass, name="TestClass"), ) assert_equal(output, ["def test(self, arg0: int) -> Any: ..."]) - assert_equal(imports, []) + assert_equal(gen.get_imports().splitlines(), []) def test_generate_c_type_classmethod(self) -> None: class TestClass: @@ -991,22 +929,16 @@ def test(cls, arg0: str) -> None: pass output: list[str] = [] - imports: list[str] = [] mod = ModuleType(TestClass.__module__, "") - generate_c_function_stub( - mod, + gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod) + gen.generate_function_stub( "test", TestClass.test, output=output, - imports=imports, - self_var="cls", - cls=TestClass, - class_name="TestClass", - known_modules=[mod.__name__], - sig_generators=get_sig_generators(parse_options([])), + class_info=ClassInfo(self_var="cls", cls=TestClass, name="TestClass"), ) - assert_equal(output, ["@classmethod", "def test(cls, *args, **kwargs) -> Any: ..."]) - assert_equal(imports, []) + assert_equal(output, ["@classmethod", "def test(cls, *args, **kwargs): ..."]) + assert_equal(gen.get_imports().splitlines(), []) def test_generate_c_type_classmethod_with_overloads(self) -> None: class TestClass: @@ -1019,19 +951,13 @@ def test(self, arg0: str) -> None: pass output: list[str] = [] - imports: list[str] = [] mod = ModuleType(TestClass.__module__, "") - generate_c_function_stub( - mod, + gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod) + gen.generate_function_stub( "test", TestClass.test, output=output, - imports=imports, - self_var="cls", - cls=TestClass, - class_name="TestClass", - known_modules=[mod.__name__], - sig_generators=get_sig_generators(parse_options([])), + class_info=ClassInfo(self_var="cls", cls=TestClass, name="TestClass"), ) assert_equal( output, @@ -1044,7 +970,7 @@ def test(self, arg0: str) -> None: "def test(cls, arg0: int) -> Any: ...", ], ) - assert_equal(imports, ["from typing import overload"]) + assert_equal(gen.get_imports().splitlines(), ["from typing import overload"]) def test_generate_c_type_with_docstring_empty_default(self) -> None: class TestClass: @@ -1054,22 +980,16 @@ def test(self, arg0: str = "") -> None: """ output: list[str] = [] - imports: list[str] = [] mod = ModuleType(TestClass.__module__, "") - generate_c_function_stub( - mod, + gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod) + gen.generate_function_stub( "test", TestClass.test, output=output, - imports=imports, - self_var="self", - cls=TestClass, - class_name="TestClass", - known_modules=[mod.__name__], - sig_generators=get_sig_generators(parse_options([])), + class_info=ClassInfo(self_var="self", cls=TestClass, name="TestClass"), ) assert_equal(output, ["def test(self, arg0: str = ...) -> Any: ..."]) - assert_equal(imports, []) + assert_equal(gen.get_imports().splitlines(), []) def test_generate_c_function_other_module_arg(self) -> None: """Test that if argument references type from other module, module will be imported.""" @@ -1082,19 +1002,11 @@ def test(arg0: str) -> None: """ output: list[str] = [] - imports: list[str] = [] mod = ModuleType(self.__module__, "") - generate_c_function_stub( - mod, - "test", - test, - output=output, - imports=imports, - known_modules=[mod.__name__], - sig_generators=get_sig_generators(parse_options([])), - ) + gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod) + gen.generate_function_stub("test", test, output=output) assert_equal(output, ["def test(arg0: argparse.Action) -> Any: ..."]) - assert_equal(imports, ["import argparse"]) + assert_equal(gen.get_imports().splitlines(), ["import argparse"]) def test_generate_c_function_same_module(self) -> None: """Test that if annotation references type from same module but using full path, no module @@ -1109,19 +1021,11 @@ def test(arg0: str) -> None: """ output: list[str] = [] - imports: list[str] = [] mod = ModuleType("argparse", "") - generate_c_function_stub( - mod, - "test", - test, - output=output, - imports=imports, - known_modules=[mod.__name__], - sig_generators=get_sig_generators(parse_options([])), - ) + gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod) + gen.generate_function_stub("test", test, output=output) assert_equal(output, ["def test(arg0: Action) -> Action: ..."]) - assert_equal(imports, []) + assert_equal(gen.get_imports().splitlines(), []) def test_generate_c_function_other_module(self) -> None: """Test that if annotation references type from other module, module will be imported.""" @@ -1132,19 +1036,11 @@ def test(arg0: str) -> None: """ output: list[str] = [] - imports: list[str] = [] mod = ModuleType(self.__module__, "") - generate_c_function_stub( - mod, - "test", - test, - output=output, - imports=imports, - known_modules=[mod.__name__], - sig_generators=get_sig_generators(parse_options([])), - ) + gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod) + gen.generate_function_stub("test", test, output=output) assert_equal(output, ["def test(arg0: argparse.Action) -> argparse.Action: ..."]) - assert_equal(set(imports), {"import argparse"}) + assert_equal(gen.get_imports().splitlines(), ["import argparse"]) def test_generate_c_function_same_module_nested(self) -> None: """Test that if annotation references type from same module but using full path, no module @@ -1159,19 +1055,11 @@ def test(arg0: str) -> None: """ output: list[str] = [] - imports: list[str] = [] mod = ModuleType("argparse", "") - generate_c_function_stub( - mod, - "test", - test, - output=output, - imports=imports, - known_modules=[mod.__name__], - sig_generators=get_sig_generators(parse_options([])), - ) + gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod) + gen.generate_function_stub("test", test, output=output) assert_equal(output, ["def test(arg0: list[Action]) -> list[Action]: ..."]) - assert_equal(imports, []) + assert_equal(gen.get_imports().splitlines(), []) def test_generate_c_function_same_module_compound(self) -> None: """Test that if annotation references type from same module but using full path, no module @@ -1186,19 +1074,11 @@ def test(arg0: str) -> None: """ output: list[str] = [] - imports: list[str] = [] mod = ModuleType("argparse", "") - generate_c_function_stub( - mod, - "test", - test, - output=output, - imports=imports, - known_modules=[mod.__name__], - sig_generators=get_sig_generators(parse_options([])), - ) - assert_equal(output, ["def test(arg0: Union[Action,None]) -> Tuple[Action,None]: ..."]) - assert_equal(imports, []) + gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod) + gen.generate_function_stub("test", test, output=output) + assert_equal(output, ["def test(arg0: Union[Action, None]) -> Tuple[Action, None]: ..."]) + assert_equal(gen.get_imports().splitlines(), []) def test_generate_c_function_other_module_nested(self) -> None: """Test that if annotation references type from other module, module will be imported, @@ -1210,19 +1090,13 @@ def test(arg0: str) -> None: """ output: list[str] = [] - imports: list[str] = [] mod = ModuleType(self.__module__, "") - generate_c_function_stub( - mod, - "test", - test, - output=output, - imports=imports, - known_modules=["foo", "foo.spangle", "bar"], - sig_generators=get_sig_generators(parse_options([])), + gen = InspectionStubGenerator( + mod.__name__, known_modules=["foo", "foo.spangle", "bar"], module=mod ) + gen.generate_function_stub("test", test, output=output) assert_equal(output, ["def test(arg0: foo.bar.Action) -> other.Thing: ..."]) - assert_equal(set(imports), {"import foo", "import other"}) + assert_equal(gen.get_imports().splitlines(), ["import foo", "import other"]) def test_generate_c_function_no_crash_for_non_str_docstring(self) -> None: def test(arg0: str) -> None: @@ -1231,19 +1105,11 @@ def test(arg0: str) -> None: test.__doc__ = property(lambda self: "test(arg0: str) -> None") # type: ignore[assignment] output: list[str] = [] - imports: list[str] = [] mod = ModuleType(self.__module__, "") - generate_c_function_stub( - mod, - "test", - test, - output=output, - imports=imports, - known_modules=[mod.__name__], - sig_generators=get_sig_generators(parse_options([])), - ) - assert_equal(output, ["def test(*args, **kwargs) -> Any: ..."]) - assert_equal(imports, []) + gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod) + gen.generate_function_stub("test", test, output=output) + assert_equal(output, ["def test(*args, **kwargs): ..."]) + assert_equal(gen.get_imports().splitlines(), []) def test_generate_c_property_with_pybind11(self) -> None: """Signatures included by PyBind11 inside property.fget are read.""" @@ -1258,13 +1124,15 @@ def get_attribute(self) -> None: readwrite_properties: list[str] = [] readonly_properties: list[str] = [] - generate_c_property_stub( + mod = ModuleType("module", "") # any module is fine + gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod) + gen.generate_property_stub( "attribute", + TestClass.__dict__["attribute"], TestClass.attribute, [], readwrite_properties, readonly_properties, - is_c_property_readonly(TestClass.attribute), ) assert_equal(readwrite_properties, []) assert_equal(readonly_properties, ["@property", "def attribute(self) -> str: ..."]) @@ -1284,15 +1152,17 @@ def attribute(self, value: int) -> None: readwrite_properties: list[str] = [] readonly_properties: list[str] = [] - generate_c_property_stub( + mod = ModuleType("module", "") # any module is fine + gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod) + gen.generate_property_stub( "attribute", - type(TestClass.attribute), + TestClass.__dict__["attribute"], + TestClass.attribute, [], readwrite_properties, readonly_properties, - is_c_property_readonly(TestClass.attribute), ) - assert_equal(readwrite_properties, ["attribute: Any"]) + assert_equal(readwrite_properties, ["attribute: Incomplete"]) assert_equal(readonly_properties, []) def test_generate_c_type_with_single_arg_generic(self) -> None: @@ -1303,22 +1173,16 @@ def test(self, arg0: str) -> None: """ output: list[str] = [] - imports: list[str] = [] mod = ModuleType(TestClass.__module__, "") - generate_c_function_stub( - mod, + gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod) + gen.generate_function_stub( "test", TestClass.test, output=output, - imports=imports, - self_var="self", - cls=TestClass, - class_name="TestClass", - known_modules=[mod.__name__], - sig_generators=get_sig_generators(parse_options([])), + class_info=ClassInfo(self_var="self", cls=TestClass, name="TestClass"), ) assert_equal(output, ["def test(self, arg0: List[int]) -> Any: ..."]) - assert_equal(imports, []) + assert_equal(gen.get_imports().splitlines(), []) def test_generate_c_type_with_double_arg_generic(self) -> None: class TestClass: @@ -1328,22 +1192,16 @@ def test(self, arg0: str) -> None: """ output: list[str] = [] - imports: list[str] = [] mod = ModuleType(TestClass.__module__, "") - generate_c_function_stub( - mod, + gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod) + gen.generate_function_stub( "test", TestClass.test, output=output, - imports=imports, - self_var="self", - cls=TestClass, - class_name="TestClass", - known_modules=[mod.__name__], - sig_generators=get_sig_generators(parse_options([])), + class_info=ClassInfo(self_var="self", cls=TestClass, name="TestClass"), ) - assert_equal(output, ["def test(self, arg0: Dict[str,int]) -> Any: ..."]) - assert_equal(imports, []) + assert_equal(output, ["def test(self, arg0: Dict[str, int]) -> Any: ..."]) + assert_equal(gen.get_imports().splitlines(), []) def test_generate_c_type_with_nested_generic(self) -> None: class TestClass: @@ -1353,22 +1211,16 @@ def test(self, arg0: str) -> None: """ output: list[str] = [] - imports: list[str] = [] mod = ModuleType(TestClass.__module__, "") - generate_c_function_stub( - mod, + gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod) + gen.generate_function_stub( "test", TestClass.test, output=output, - imports=imports, - self_var="self", - cls=TestClass, - class_name="TestClass", - known_modules=[mod.__name__], - sig_generators=get_sig_generators(parse_options([])), + class_info=ClassInfo(self_var="self", cls=TestClass, name="TestClass"), ) - assert_equal(output, ["def test(self, arg0: Dict[str,List[int]]) -> Any: ..."]) - assert_equal(imports, []) + assert_equal(output, ["def test(self, arg0: Dict[str, List[int]]) -> Any: ..."]) + assert_equal(gen.get_imports().splitlines(), []) def test_generate_c_type_with_generic_using_other_module_first(self) -> None: class TestClass: @@ -1378,22 +1230,16 @@ def test(self, arg0: str) -> None: """ output: list[str] = [] - imports: list[str] = [] mod = ModuleType(TestClass.__module__, "") - generate_c_function_stub( - mod, + gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod) + gen.generate_function_stub( "test", TestClass.test, output=output, - imports=imports, - self_var="self", - cls=TestClass, - class_name="TestClass", - known_modules=[mod.__name__], - sig_generators=get_sig_generators(parse_options([])), + class_info=ClassInfo(self_var="self", cls=TestClass, name="TestClass"), ) - assert_equal(output, ["def test(self, arg0: Dict[argparse.Action,int]) -> Any: ..."]) - assert_equal(imports, ["import argparse"]) + assert_equal(output, ["def test(self, arg0: Dict[argparse.Action, int]) -> Any: ..."]) + assert_equal(gen.get_imports().splitlines(), ["import argparse"]) def test_generate_c_type_with_generic_using_other_module_last(self) -> None: class TestClass: @@ -1403,22 +1249,16 @@ def test(self, arg0: str) -> None: """ output: list[str] = [] - imports: list[str] = [] mod = ModuleType(TestClass.__module__, "") - generate_c_function_stub( - mod, + gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod) + gen.generate_function_stub( "test", TestClass.test, output=output, - imports=imports, - self_var="self", - cls=TestClass, - class_name="TestClass", - known_modules=[mod.__name__], - sig_generators=get_sig_generators(parse_options([])), + class_info=ClassInfo(self_var="self", cls=TestClass, name="TestClass"), ) - assert_equal(output, ["def test(self, arg0: Dict[str,argparse.Action]) -> Any: ..."]) - assert_equal(imports, ["import argparse"]) + assert_equal(output, ["def test(self, arg0: Dict[str, argparse.Action]) -> Any: ..."]) + assert_equal(gen.get_imports().splitlines(), ["import argparse"]) def test_generate_c_type_with_overload_pybind11(self) -> None: class TestClass: @@ -1433,19 +1273,13 @@ def __init__(self, arg0: str) -> None: """ output: list[str] = [] - imports: list[str] = [] mod = ModuleType(TestClass.__module__, "") - generate_c_function_stub( - mod, + gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod) + gen.generate_function_stub( "__init__", TestClass.__init__, output=output, - imports=imports, - self_var="self", - cls=TestClass, - class_name="TestClass", - known_modules=[mod.__name__], - sig_generators=get_sig_generators(parse_options([])), + class_info=ClassInfo(self_var="self", cls=TestClass, name="TestClass"), ) assert_equal( output, @@ -1458,7 +1292,7 @@ def __init__(self, arg0: str) -> None: "def __init__(self, *args, **kwargs) -> Any: ...", ], ) - assert_equal(set(imports), {"from typing import overload"}) + assert_equal(gen.get_imports().splitlines(), ["from typing import overload"]) def test_generate_c_type_with_overload_shiboken(self) -> None: class TestClass: @@ -1471,19 +1305,18 @@ def __init__(self, arg0: str) -> None: pass output: list[str] = [] - imports: list[str] = [] mod = ModuleType(TestClass.__module__, "") - generate_c_function_stub( - mod, + gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod) + gen.generate_function_stub( "__init__", TestClass.__init__, output=output, - imports=imports, - self_var="self", - cls=TestClass, - class_name="TestClass", - known_modules=[mod.__name__], - sig_generators=get_sig_generators(parse_options([])), + class_info=ClassInfo( + self_var="self", + cls=TestClass, + name="TestClass", + docstring=getattr(TestClass, "__doc__", None), + ), ) assert_equal( output, @@ -1494,7 +1327,7 @@ def __init__(self, arg0: str) -> None: "def __init__(self, arg0: str, arg1: str) -> None: ...", ], ) - assert_equal(set(imports), {"from typing import overload"}) + assert_equal(gen.get_imports().splitlines(), ["from typing import overload"]) class ArgSigSuite(unittest.TestCase): diff --git a/mypy/traverser.py b/mypy/traverser.py index 2fcc376cfb7c..d11dd395f978 100644 --- a/mypy/traverser.py +++ b/mypy/traverser.py @@ -2,7 +2,7 @@ from __future__ import annotations -from mypy_extensions import mypyc_attr +from mypy_extensions import mypyc_attr, trait from mypy.nodes import ( REVEAL_TYPE, @@ -94,6 +94,7 @@ from mypy.visitor import NodeVisitor +@trait @mypyc_attr(allow_interpreted_subclasses=True) class TraverserVisitor(NodeVisitor[None]): """A parse tree visitor that traverses the parse tree during visiting. diff --git a/setup.py b/setup.py index dcbdc96b3ccf..e3ebe9dd62ec 100644 --- a/setup.py +++ b/setup.py @@ -112,7 +112,6 @@ def run(self): "stubtest.py", "stubgenc.py", "stubdoc.py", - "stubutil.py", ) ) + ( # Don't want to grab this accidentally diff --git a/test-data/pybind11_mypy_demo/stubgen/pybind11_mypy_demo/__init__.pyi b/test-data/pybind11_mypy_demo/stubgen/pybind11_mypy_demo/__init__.pyi index e69de29bb2d1..0cb252f00259 100644 --- a/test-data/pybind11_mypy_demo/stubgen/pybind11_mypy_demo/__init__.pyi +++ b/test-data/pybind11_mypy_demo/stubgen/pybind11_mypy_demo/__init__.pyi @@ -0,0 +1 @@ +from . import basics as basics diff --git a/test-data/pybind11_mypy_demo/stubgen/pybind11_mypy_demo/basics.pyi b/test-data/pybind11_mypy_demo/stubgen/pybind11_mypy_demo/basics.pyi index ab5a4f4e78d2..6527f5733eaf 100644 --- a/test-data/pybind11_mypy_demo/stubgen/pybind11_mypy_demo/basics.pyi +++ b/test-data/pybind11_mypy_demo/stubgen/pybind11_mypy_demo/basics.pyi @@ -1,7 +1,7 @@ -from typing import ClassVar +from typing import ClassVar, overload -from typing import overload PI: float +__version__: str class Point: class AngleUnit: @@ -11,12 +11,10 @@ class Point: radian: ClassVar[Point.AngleUnit] = ... def __init__(self, value: int) -> None: ... def __eq__(self, other: object) -> bool: ... - def __getstate__(self) -> int: ... def __hash__(self) -> int: ... def __index__(self) -> int: ... def __int__(self) -> int: ... def __ne__(self, other: object) -> bool: ... - def __setstate__(self, state: int) -> None: ... @property def name(self) -> str: ... @property @@ -30,12 +28,10 @@ class Point: pixel: ClassVar[Point.LengthUnit] = ... def __init__(self, value: int) -> None: ... def __eq__(self, other: object) -> bool: ... - def __getstate__(self) -> int: ... def __hash__(self) -> int: ... def __index__(self) -> int: ... def __int__(self) -> int: ... def __ne__(self, other: object) -> bool: ... - def __setstate__(self, state: int) -> None: ... @property def name(self) -> str: ... @property diff --git a/test-data/unit/stubgen.test b/test-data/unit/stubgen.test index 23dbf36a551b..d83d74306230 100644 --- a/test-data/unit/stubgen.test +++ b/test-data/unit/stubgen.test @@ -11,6 +11,11 @@ def f() -> None: ... [case testTwoFunctions] def f(a, b): + """ + this is a docstring + + more. + """ x = 1 def g(arg): pass @@ -37,11 +42,21 @@ def f(x=True, y=False): ... [out] def f(x: bool = ..., y: bool = ...) -> None: ... +[case testDefaultArgBool_inspect] +def f(x=True, y=False): ... +[out] +def f(x: bool = ..., y: bool = ...): ... + [case testDefaultArgStr] def f(x='foo'): ... [out] def f(x: str = ...) -> None: ... +[case testDefaultArgStr_inspect] +def f(x='foo'): ... +[out] +def f(x: str = ...): ... + [case testDefaultArgBytes] def f(x=b'foo'): ... [out] @@ -300,6 +315,7 @@ __all__ = [] __author__ = '' __version__ = '' [out] +__version__: str [case testBaseClass] class A: ... @@ -361,6 +377,24 @@ class A: def f(self, x) -> None: ... def h(self) -> None: ... +-- a read/write property is treated the same as an attribute +[case testProperty_inspect] +class A: + @property + def f(self): + return 1 + @f.setter + def f(self, x): ... + + def h(self): + self.f = 1 +[out] +from _typeshed import Incomplete + +class A: + f: Incomplete + def h(self): ... + [case testFunctoolsCachedProperty] import functools @@ -435,6 +469,15 @@ class A: @classmethod def f(cls) -> None: ... +[case testClassMethod_inspect] +class A: + @classmethod + def f(cls): ... +[out] +class A: + @classmethod + def f(cls): ... + [case testIfMainCheck] def a(): ... if __name__ == '__main__': @@ -472,6 +515,23 @@ class B: ... class C: def f(self) -> None: ... +[case testNoSpacesBetweenEmptyClasses_inspect] +class X: + def g(self): ... +class A: ... +class B: ... +class C: + def f(self): ... +[out] +class X: + def g(self): ... + +class A: ... +class B: ... + +class C: + def f(self): ... + [case testExceptionBaseClasses] class A(Exception): ... class B(ValueError): ... @@ -490,6 +550,17 @@ class A: class A: def __eq__(self): ... +[case testOmitSomeSpecialMethods_inspect] +class A: + def __str__(self): ... + def __repr__(self): ... + def __eq__(self): ... + def __getstate__(self): ... + def __setstate__(self, state): ... +[out] +class A: + def __eq__(self) -> bool: ... + -- Tests that will perform runtime imports of modules. -- Don't use `_import` suffix if there are unquoted forward references. @@ -507,6 +578,13 @@ def g(): ... [out] def f() -> None: ... +[case testOmitDefsNotInAll_inspect] +__all__ = [] + ['f'] +def f(): ... +def g(): ... +[out] +def f(): ... + [case testVarDefsNotInAll_import] __all__ = [] + ['f', 'g'] def f(): ... @@ -517,6 +595,16 @@ def g(): ... def f() -> None: ... def g() -> None: ... +[case testVarDefsNotInAll_inspect] +__all__ = [] + ['f', 'g'] +def f(): ... +x = 1 +y = 1 +def g(): ... +[out] +def f(): ... +def g(): ... + [case testIncludeClassNotInAll_import] __all__ = [] + ['f'] def f(): ... @@ -526,6 +614,15 @@ def f() -> None: ... class A: ... +[case testIncludeClassNotInAll_inspect] +__all__ = [] + ['f'] +def f(): ... +class A: ... +[out] +def f(): ... + +class A: ... + [case testAllAndClass_import] __all__ = ['A'] class A: @@ -636,6 +733,23 @@ class C: # Names in __all__ with no definition: # g +[case testCommentForUndefinedName_inspect] +__all__ = ['f', 'x', 'C', 'g'] +def f(): ... +x = 1 +class C: + def g(self): ... +[out] +def f(): ... + +x: int + +class C: + def g(self): ... + +# Names in __all__ with no definition: +# g + [case testIgnoreSlots] class A: __slots__ = () @@ -649,6 +763,13 @@ class A: [out] class A: ... +[case testSkipPrivateProperty_inspect] +class A: + @property + def _foo(self): ... +[out] +class A: ... + [case testIncludePrivateProperty] # flags: --include-private class A: @@ -659,6 +780,16 @@ class A: @property def _foo(self) -> None: ... +[case testIncludePrivateProperty_inspect] +# flags: --include-private +class A: + @property + def _foo(self): ... +[out] +class A: + @property + def _foo(self): ... + [case testSkipPrivateStaticAndClassMethod] class A: @staticmethod @@ -668,6 +799,15 @@ class A: [out] class A: ... +[case testSkipPrivateStaticAndClassMethod_inspect] +class A: + @staticmethod + def _foo(): ... + @classmethod + def _bar(cls): ... +[out] +class A: ... + [case testIncludePrivateStaticAndClassMethod] # flags: --include-private class A: @@ -682,6 +822,20 @@ class A: @classmethod def _bar(cls) -> None: ... +[case testIncludePrivateStaticAndClassMethod_inspect] +# flags: --include-private +class A: + @staticmethod + def _foo(): ... + @classmethod + def _bar(cls): ... +[out] +class A: + @staticmethod + def _foo(): ... + @classmethod + def _bar(cls): ... + [case testNamedtuple] import collections, typing, x X = collections.namedtuple('X', ['a', 'b']) @@ -1801,6 +1955,19 @@ class Outer: class Inner: ... A = Outer.Inner +-- needs improvement +[case testNestedClass_inspect] +class Outer: + class Inner: + pass + +A = Outer.Inner +[out] +class Outer: + class Inner: ... + +class A: ... + [case testFunctionAlias_semanal] from asyncio import coroutine @@ -2034,6 +2201,25 @@ class A: def f(x) -> None: ... def g(x, y: str): ... +class A: + def f(self, x) -> None: ... + +-- Same as above +[case testFunctionPartiallyAnnotated_inspect] +def f(x) -> None: + pass + +def g(x, y: str): + pass + +class A: + def f(self, x) -> None: + pass + +[out] +def f(x) -> None: ... +def g(x, y: str): ... + class A: def f(self, x) -> None: ... @@ -2054,6 +2240,24 @@ def f(x: Any): ... def g(x, y: Any) -> str: ... def h(x: Any) -> str: ... +-- Same as above +[case testExplicitAnyArg_inspect] +from typing import Any + +def f(x: Any): + pass +def g(x, y: Any) -> str: + pass +def h(x: Any) -> str: + pass + +[out] +from typing import Any + +def f(x: Any): ... +def g(x, y: Any) -> str: ... +def h(x: Any) -> str: ... + [case testExplicitReturnedAny] from typing import Any @@ -2385,6 +2589,28 @@ def g() -> None: ... +[case testTestFiles_inspect] +# modules: p p.x p.tests p.tests.test_foo + +[file p/__init__.py] +def f(): pass + +[file p/x.py] +def g(): pass + +[file p/tests/__init__.py] + +[file p/tests/test_foo.py] +def test_thing(): pass + +[out] +# p/__init__.pyi +def f(): ... +# p/x.pyi +def g(): ... + + + [case testVerboseFlag] # Just test that --verbose does not break anything in a basic test case. # flags: --verbose @@ -2686,6 +2912,8 @@ __uri__ = '' __version__ = '' [out] +from m import __version__ as __version__ + class A: ... [case testHideDunderModuleAttributesWithAll_import] @@ -2715,6 +2943,7 @@ __uri__ = '' __version__ = '' [out] +from m import __version__ as __version__ [case testAttrsClass_semanal] import attrs @@ -2949,7 +3178,6 @@ class A: @overload def f(self, x: Tuple[int, int]) -> int: ... - @overload def f(x: int, y: int) -> int: ... @overload @@ -2993,7 +3221,6 @@ class A: @overload def f(self, x: Tuple[int, int]) -> int: ... - @overload def f(x: int, y: int) -> int: ... @overload @@ -3068,7 +3295,6 @@ class A: @classmethod def g(cls, x: typing.Tuple[int, int]) -> int: ... - @typing.overload def f(x: int, y: int) -> int: ... @typing.overload @@ -3147,7 +3373,6 @@ class A: @classmethod def g(cls, x: t.Tuple[int, int]) -> int: ... - @t.overload def f(x: int, y: int) -> int: ... @t.overload @@ -3345,6 +3570,67 @@ class Some: def __float__(self) -> float: ... def __index__(self) -> int: ... +-- Same as above +[case testKnownMagicMethodsReturnTypes_inspect] +class Some: + def __len__(self): ... + def __length_hint__(self): ... + def __init__(self): ... + def __del__(self): ... + def __bool__(self): ... + def __bytes__(self): ... + def __format__(self, spec): ... + def __contains__(self, obj): ... + def __complex__(self): ... + def __int__(self): ... + def __float__(self): ... + def __index__(self): ... +[out] +class Some: + def __len__(self) -> int: ... + def __length_hint__(self) -> int: ... + def __init__(self) -> None: ... + def __del__(self) -> None: ... + def __bool__(self) -> bool: ... + def __bytes__(self) -> bytes: ... + def __format__(self, spec) -> str: ... + def __contains__(self, obj) -> bool: ... + def __complex__(self) -> complex: ... + def __int__(self) -> int: ... + def __float__(self) -> float: ... + def __index__(self) -> int: ... + + +[case testKnownMagicMethodsArgTypes] +class MismatchNames: + def __exit__(self, tp, val, tb): ... + +class MatchNames: + def __exit__(self, type, value, traceback): ... + +[out] +class MismatchNames: + def __exit__(self, tp: type[BaseException] | None, val: BaseException | None, tb: types.TracebackType | None) -> None: ... + +class MatchNames: + def __exit__(self, type: type[BaseException] | None, value: BaseException | None, traceback: types.TracebackType | None) -> None: ... + +-- Same as above (but can generate import statements) +[case testKnownMagicMethodsArgTypes_inspect] +class MismatchNames: + def __exit__(self, tp, val, tb): ... + +class MatchNames: + def __exit__(self, type, value, traceback): ... + +[out] +import types + +class MismatchNames: + def __exit__(self, tp: type[BaseException] | None, val: BaseException | None, tb: types.TracebackType | None): ... + +class MatchNames: + def __exit__(self, type: type[BaseException] | None, value: BaseException | None, traceback: types.TracebackType | None): ... [case testTypeVarPEP604Bound] from typing import TypeVar @@ -3397,7 +3683,7 @@ from typing import TypedDict X = TypedDict('X', a=int, b=str) Y = TypedDict('X', a=int, b=str, total=False) [out] -from typing import TypedDict +from typing_extensions import TypedDict class X(TypedDict): a: int From 2bcec24635670bcff6efab3d21641f39f0f35857 Mon Sep 17 00:00:00 2001 From: Valentin Stanciu <250871+svalentin@users.noreply.github.com> Date: Mon, 16 Oct 2023 18:37:23 +0100 Subject: [PATCH 098/144] Stream dmypy output instead of dumping everything at the end (#16252) This does 2 things: 1. It changes the IPC code to work with multiple messages. 2. It changes the dmypy client/server communication so that it streams stdout/stderr instead of dumping everything at the end. For 1, we have to provide a way to separate out different messages. I chose to frame messages as bytes separated by whitespace character. That means we have to encode the message in a scheme that escapes whitespace. The `codecs.encode(, 'base64')` seems reasonable. It encodes more than needed but the application is not IPC IO limited so it should be fine. With this convention in place, all we have to do is read from the socket stream until we have a whitespace character. The framing logic can be easily changed. For 2, since we communicate with JSONs, it's easy to add a "finished" key that tells us it's the final response from dmypy. Anything else is just stdout/stderr output. Note: dmypy server also returns out/err which is the output of actual mypy type checking. Right now this change does not stream that output. We can stream that in a followup change. We just have to decide on how to differenciate the 4 text streams (stdout/stderr/out/err) that will now be interleaved. The WriteToConn class could use more love. I just put a bare minimum. --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- mypy/dmypy/client.py | 35 +++++++++++------------ mypy/dmypy_server.py | 20 ++++++-------- mypy/dmypy_util.py | 33 ++++++++++++++++++++-- mypy/ipc.py | 66 ++++++++++++++++++++++++++++++++++++-------- mypy/test/testipc.py | 52 ++++++++++++++++++++++++++++------ 5 files changed, 155 insertions(+), 51 deletions(-) diff --git a/mypy/dmypy/client.py b/mypy/dmypy/client.py index c3a2308d1b44..229740e44db0 100644 --- a/mypy/dmypy/client.py +++ b/mypy/dmypy/client.py @@ -17,7 +17,7 @@ from typing import Any, Callable, Mapping, NoReturn from mypy.dmypy_os import alive, kill -from mypy.dmypy_util import DEFAULT_STATUS_FILE, receive +from mypy.dmypy_util import DEFAULT_STATUS_FILE, receive, send from mypy.ipc import IPCClient, IPCException from mypy.util import check_python_version, get_terminal_width, should_force_color from mypy.version import __version__ @@ -659,28 +659,29 @@ def request( # so that it can format the type checking output accordingly. args["is_tty"] = sys.stdout.isatty() or should_force_color() args["terminal_width"] = get_terminal_width() - bdata = json.dumps(args).encode("utf8") _, name = get_status(status_file) try: with IPCClient(name, timeout) as client: - client.write(bdata) - response = receive(client) + send(client, args) + + final = False + while not final: + response = receive(client) + final = bool(response.pop("final", False)) + # Display debugging output written to stdout/stderr in the server process for convenience. + # This should not be confused with "out" and "err" fields in the response. + # Those fields hold the output of the "check" command, and are handled in check_output(). + stdout = response.pop("stdout", None) + if stdout: + sys.stdout.write(stdout) + stderr = response.pop("stderr", None) + if stderr: + sys.stderr.write(stderr) except (OSError, IPCException) as err: return {"error": str(err)} # TODO: Other errors, e.g. ValueError, UnicodeError - else: - # Display debugging output written to stdout/stderr in the server process for convenience. - # This should not be confused with "out" and "err" fields in the response. - # Those fields hold the output of the "check" command, and are handled in check_output(). - stdout = response.get("stdout") - if stdout: - sys.stdout.write(stdout) - stderr = response.get("stderr") - if stderr: - print("-" * 79) - print("stderr:") - sys.stdout.write(stderr) - return response + + return response def get_status(status_file: str) -> tuple[int, str]: diff --git a/mypy/dmypy_server.py b/mypy/dmypy_server.py index faa9a23fadfb..9cc0888fc208 100644 --- a/mypy/dmypy_server.py +++ b/mypy/dmypy_server.py @@ -23,7 +23,7 @@ import mypy.build import mypy.errors import mypy.main -from mypy.dmypy_util import receive +from mypy.dmypy_util import WriteToConn, receive, send from mypy.find_sources import InvalidSourceList, create_source_list from mypy.fscache import FileSystemCache from mypy.fswatcher import FileData, FileSystemWatcher @@ -208,10 +208,12 @@ def _response_metadata(self) -> dict[str, str]: def serve(self) -> None: """Serve requests, synchronously (no thread or fork).""" + command = None server = IPCServer(CONNECTION_NAME, self.timeout) orig_stdout = sys.stdout orig_stderr = sys.stderr + try: with open(self.status_file, "w") as f: json.dump({"pid": os.getpid(), "connection_name": server.connection_name}, f) @@ -219,10 +221,8 @@ def serve(self) -> None: while True: with server: data = receive(server) - debug_stdout = io.StringIO() - debug_stderr = io.StringIO() - sys.stdout = debug_stdout - sys.stderr = debug_stderr + sys.stdout = WriteToConn(server, "stdout") # type: ignore[assignment] + sys.stderr = WriteToConn(server, "stderr") # type: ignore[assignment] resp: dict[str, Any] = {} if "command" not in data: resp = {"error": "No command found in request"} @@ -239,15 +239,13 @@ def serve(self) -> None: tb = traceback.format_exception(*sys.exc_info()) resp = {"error": "Daemon crashed!\n" + "".join(tb)} resp.update(self._response_metadata()) - resp["stdout"] = debug_stdout.getvalue() - resp["stderr"] = debug_stderr.getvalue() - server.write(json.dumps(resp).encode("utf8")) + resp["final"] = True + send(server, resp) raise - resp["stdout"] = debug_stdout.getvalue() - resp["stderr"] = debug_stderr.getvalue() + resp["final"] = True try: resp.update(self._response_metadata()) - server.write(json.dumps(resp).encode("utf8")) + send(server, resp) except OSError: pass # Maybe the client hung up if command == "stop": diff --git a/mypy/dmypy_util.py b/mypy/dmypy_util.py index 2aae41d998da..d95cba9f40b5 100644 --- a/mypy/dmypy_util.py +++ b/mypy/dmypy_util.py @@ -6,7 +6,7 @@ from __future__ import annotations import json -from typing import Any, Final +from typing import Any, Final, Iterable from mypy.ipc import IPCBase @@ -14,7 +14,7 @@ def receive(connection: IPCBase) -> Any: - """Receive JSON data from a connection until EOF. + """Receive single JSON data frame from a connection. Raise OSError if the data received is not valid JSON or if it is not a dict. @@ -23,9 +23,36 @@ def receive(connection: IPCBase) -> Any: if not bdata: raise OSError("No data received") try: - data = json.loads(bdata.decode("utf8")) + data = json.loads(bdata) except Exception as e: raise OSError("Data received is not valid JSON") from e if not isinstance(data, dict): raise OSError(f"Data received is not a dict ({type(data)})") return data + + +def send(connection: IPCBase, data: Any) -> None: + """Send data to a connection encoded and framed. + + The data must be JSON-serializable. We assume that a single send call is a + single frame to be sent on the connect. + """ + connection.write(json.dumps(data)) + + +class WriteToConn: + """Helper class to write to a connection instead of standard output.""" + + def __init__(self, server: IPCBase, output_key: str = "stdout"): + self.server = server + self.output_key = output_key + + def write(self, output: str) -> int: + resp: dict[str, Any] = {} + resp[self.output_key] = output + send(self.server, resp) + return len(output) + + def writelines(self, lines: Iterable[str]) -> None: + for s in lines: + self.write(s) diff --git a/mypy/ipc.py b/mypy/ipc.py index d026f2429a0f..ab01f1b79e7d 100644 --- a/mypy/ipc.py +++ b/mypy/ipc.py @@ -7,6 +7,7 @@ from __future__ import annotations import base64 +import codecs import os import shutil import sys @@ -40,6 +41,10 @@ class IPCBase: This contains logic shared between the client and server, such as reading and writing. + We want to be able to send multiple "messages" over a single connection and + to be able to separate the messages. We do this by encoding the messages + in an alphabet that does not contain spaces, then adding a space for + separation. The last framed message is also followed by a space. """ connection: _IPCHandle @@ -47,12 +52,30 @@ class IPCBase: def __init__(self, name: str, timeout: float | None) -> None: self.name = name self.timeout = timeout + self.buffer = bytearray() - def read(self, size: int = 100000) -> bytes: - """Read bytes from an IPC connection until its empty.""" - bdata = bytearray() + def frame_from_buffer(self) -> bytearray | None: + """Return a full frame from the bytes we have in the buffer.""" + space_pos = self.buffer.find(b" ") + if space_pos == -1: + return None + # We have a full frame + bdata = self.buffer[:space_pos] + self.buffer = self.buffer[space_pos + 1 :] + return bdata + + def read(self, size: int = 100000) -> str: + """Read bytes from an IPC connection until we have a full frame.""" + bdata: bytearray | None = bytearray() if sys.platform == "win32": while True: + # Check if we already have a message in the buffer before + # receiving any more data from the socket. + bdata = self.frame_from_buffer() + if bdata is not None: + break + + # Receive more data into the buffer. ov, err = _winapi.ReadFile(self.connection, size, overlapped=True) try: if err == _winapi.ERROR_IO_PENDING: @@ -66,7 +89,10 @@ def read(self, size: int = 100000) -> bytes: _, err = ov.GetOverlappedResult(True) more = ov.getbuffer() if more: - bdata.extend(more) + self.buffer.extend(more) + bdata = self.frame_from_buffer() + if bdata is not None: + break if err == 0: # we are done! break @@ -77,17 +103,34 @@ def read(self, size: int = 100000) -> bytes: raise IPCException("ReadFile operation aborted.") else: while True: + # Check if we already have a message in the buffer before + # receiving any more data from the socket. + bdata = self.frame_from_buffer() + if bdata is not None: + break + + # Receive more data into the buffer. more = self.connection.recv(size) if not more: + # Connection closed break - bdata.extend(more) - return bytes(bdata) + self.buffer.extend(more) + + if not bdata: + # Socket was empty and we didn't get any frame. + # This should only happen if the socket was closed. + return "" + return codecs.decode(bdata, "base64").decode("utf8") + + def write(self, data: str) -> None: + """Write to an IPC connection.""" + + # Frame the data by urlencoding it and separating by space. + encoded_data = codecs.encode(data.encode("utf8"), "base64") + b" " - def write(self, data: bytes) -> None: - """Write bytes to an IPC connection.""" if sys.platform == "win32": try: - ov, err = _winapi.WriteFile(self.connection, data, overlapped=True) + ov, err = _winapi.WriteFile(self.connection, encoded_data, overlapped=True) try: if err == _winapi.ERROR_IO_PENDING: timeout = int(self.timeout * 1000) if self.timeout else _winapi.INFINITE @@ -101,12 +144,11 @@ def write(self, data: bytes) -> None: raise bytes_written, err = ov.GetOverlappedResult(True) assert err == 0, err - assert bytes_written == len(data) + assert bytes_written == len(encoded_data) except OSError as e: raise IPCException(f"Failed to write with error: {e.winerror}") from e else: - self.connection.sendall(data) - self.connection.shutdown(socket.SHUT_WR) + self.connection.sendall(encoded_data) def close(self) -> None: if sys.platform == "win32": diff --git a/mypy/test/testipc.py b/mypy/test/testipc.py index 9034f514bb45..8ef656dc4579 100644 --- a/mypy/test/testipc.py +++ b/mypy/test/testipc.py @@ -15,14 +15,25 @@ def server(msg: str, q: Queue[str]) -> None: server = IPCServer(CONNECTION_NAME) q.put(server.connection_name) - data = b"" + data = "" while not data: with server: - server.write(msg.encode()) + server.write(msg) data = server.read() server.cleanup() +def server_multi_message_echo(q: Queue[str]) -> None: + server = IPCServer(CONNECTION_NAME) + q.put(server.connection_name) + data = "" + with server: + while data != "quit": + data = server.read() + server.write(data) + server.cleanup() + + class IPCTests(TestCase): def test_transaction_large(self) -> None: queue: Queue[str] = Queue() @@ -31,8 +42,8 @@ def test_transaction_large(self) -> None: p.start() connection_name = queue.get() with IPCClient(connection_name, timeout=1) as client: - assert client.read() == msg.encode() - client.write(b"test") + assert client.read() == msg + client.write("test") queue.close() queue.join_thread() p.join() @@ -44,12 +55,37 @@ def test_connect_twice(self) -> None: p.start() connection_name = queue.get() with IPCClient(connection_name, timeout=1) as client: - assert client.read() == msg.encode() - client.write(b"") # don't let the server hang up yet, we want to connect again. + assert client.read() == msg + client.write("") # don't let the server hang up yet, we want to connect again. with IPCClient(connection_name, timeout=1) as client: - assert client.read() == msg.encode() - client.write(b"test") + assert client.read() == msg + client.write("test") + queue.close() + queue.join_thread() + p.join() + assert p.exitcode == 0 + + def test_multiple_messages(self) -> None: + queue: Queue[str] = Queue() + p = Process(target=server_multi_message_echo, args=(queue,), daemon=True) + p.start() + connection_name = queue.get() + with IPCClient(connection_name, timeout=1) as client: + # "foo bar" with extra accents on letters. + # In UTF-8 encoding so we don't confuse editors opening this file. + fancy_text = b"f\xcc\xb6o\xcc\xb2\xf0\x9d\x91\x9c \xd0\xb2\xe2\xb7\xa1a\xcc\xb6r\xcc\x93\xcd\x98\xcd\x8c" + client.write(fancy_text.decode("utf-8")) + assert client.read() == fancy_text.decode("utf-8") + + client.write("Test with spaces") + client.write("Test write before reading previous") + time.sleep(0) # yield to the server to force reading of all messages by server. + assert client.read() == "Test with spaces" + assert client.read() == "Test write before reading previous" + + client.write("quit") + assert client.read() == "quit" queue.close() queue.join_thread() p.join() From 85f40b5c8479cbca1d30f912fb95aa243b09c334 Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Mon, 16 Oct 2023 21:18:55 +0100 Subject: [PATCH 099/144] Correctly handle variadic instances with empty arguments (#16238) Fixes https://github.com/python/mypy/issues/16199 It was surprisingly hard to fix, because all possible fixes strongly interfered with the code that makes "no-args" aliases possible: ```python l = list x: l[int] # OK, same as list[int] ``` So after all I re-organized (and actually simplified) that old code. --- mypy/checkexpr.py | 5 ++- mypy/expandtype.py | 2 +- mypy/messages.py | 6 ++- mypy/semanal.py | 28 +++++++++---- mypy/subtypes.py | 4 +- mypy/typeanal.py | 54 +++++++------------------ mypy/types.py | 2 + test-data/unit/check-flags.test | 19 +++++++++ test-data/unit/check-typevar-tuple.test | 49 +++++++++++++++++++++- 9 files changed, 116 insertions(+), 53 deletions(-) diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index a1dd6d830758..a5c8c80e1580 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -4662,7 +4662,10 @@ class C(Generic[T, Unpack[Ts]]): ... info = t.type_object() # We reuse the logic from semanal phase to reduce code duplication. fake = Instance(info, args, line=ctx.line, column=ctx.column) - if not validate_instance(fake, self.chk.fail): + # This code can be only called either from checking a type application, or from + # checking a type alias (after the caller handles no_args aliases), so we know it + # was initially an IndexExpr, and we allow empty tuple type arguments. + if not validate_instance(fake, self.chk.fail, empty_tuple_index=True): fix_instance( fake, self.chk.fail, self.chk.note, disallow_any=False, options=self.chk.options ) diff --git a/mypy/expandtype.py b/mypy/expandtype.py index 44716e6da013..cb09a1ee99f5 100644 --- a/mypy/expandtype.py +++ b/mypy/expandtype.py @@ -89,7 +89,7 @@ def expand_type_by_instance(typ: Type, instance: Instance) -> Type: def expand_type_by_instance(typ: Type, instance: Instance) -> Type: """Substitute type variables in type using values from an Instance. Type variables are considered to be bound by the class declaration.""" - if not instance.args: + if not instance.args and not instance.type.has_type_var_tuple_type: return typ else: variables: dict[TypeVarId, Type] = {} diff --git a/mypy/messages.py b/mypy/messages.py index 5d03bf1babb9..dc5056f616ea 100644 --- a/mypy/messages.py +++ b/mypy/messages.py @@ -2505,8 +2505,10 @@ def format_literal_value(typ: LiteralType) -> str: else: base_str = itype.type.name if not itype.args: - # No type arguments, just return the type name - return base_str + if not itype.type.has_type_var_tuple_type: + # No type arguments, just return the type name + return base_str + return base_str + "[()]" elif itype.type.fullname == "builtins.tuple": item_type_str = format(itype.args[0]) return f"{'tuple' if options.use_lowercase_names() else 'Tuple'}[{item_type_str}, ...]" diff --git a/mypy/semanal.py b/mypy/semanal.py index a476b62b31ec..1111b1df50e9 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -231,10 +231,11 @@ check_for_explicit_any, detect_diverging_alias, find_self_type, - fix_instance_types, + fix_instance, has_any_from_unimported_type, no_subscript_builtin_alias, type_constructors, + validate_instance, ) from mypy.typeops import function_type, get_type_vars, try_getting_str_literals_from_type from mypy.types import ( @@ -722,7 +723,9 @@ def create_alias(self, tree: MypyFile, target_name: str, alias: str, name: str) target = self.named_type_or_none(target_name, []) assert target is not None # Transform List to List[Any], etc. - fix_instance_types(target, self.fail, self.note, self.options) + fix_instance( + target, self.fail, self.note, disallow_any=False, options=self.options + ) alias_node = TypeAlias( target, alias, @@ -3455,7 +3458,7 @@ def analyze_simple_literal_type(self, rvalue: Expression, is_final: bool) -> Typ def analyze_alias( self, name: str, rvalue: Expression, allow_placeholder: bool = False - ) -> tuple[Type | None, list[TypeVarLikeType], set[str], list[str]]: + ) -> tuple[Type | None, list[TypeVarLikeType], set[str], list[str], bool]: """Check if 'rvalue' is a valid type allowed for aliasing (e.g. not a type variable). If yes, return the corresponding type, a list of @@ -3474,7 +3477,7 @@ def analyze_alias( self.fail( "Invalid type alias: expression is not a valid type", rvalue, code=codes.VALID_TYPE ) - return None, [], set(), [] + return None, [], set(), [], False found_type_vars = typ.accept(TypeVarLikeQuery(self, self.tvar_scope)) tvar_defs: list[TypeVarLikeType] = [] @@ -3508,7 +3511,8 @@ def analyze_alias( new_tvar_defs.append(td) qualified_tvars = [node.fullname for _name, node in found_type_vars] - return analyzed, new_tvar_defs, depends_on, qualified_tvars + empty_tuple_index = typ.empty_tuple_index if isinstance(typ, UnboundType) else False + return analyzed, new_tvar_defs, depends_on, qualified_tvars, empty_tuple_index def is_pep_613(self, s: AssignmentStmt) -> bool: if s.unanalyzed_type is not None and isinstance(s.unanalyzed_type, UnboundType): @@ -3591,9 +3595,10 @@ def check_and_set_up_type_alias(self, s: AssignmentStmt) -> bool: alias_tvars: list[TypeVarLikeType] = [] depends_on: set[str] = set() qualified_tvars: list[str] = [] + empty_tuple_index = False else: tag = self.track_incomplete_refs() - res, alias_tvars, depends_on, qualified_tvars = self.analyze_alias( + res, alias_tvars, depends_on, qualified_tvars, empty_tuple_index = self.analyze_alias( lvalue.name, rvalue, allow_placeholder=True ) if not res: @@ -3626,8 +3631,15 @@ def check_and_set_up_type_alias(self, s: AssignmentStmt) -> bool: # Note: with the new (lazy) type alias representation we only need to set no_args to True # if the expected number of arguments is non-zero, so that aliases like A = List work. # However, eagerly expanding aliases like Text = str is a nice performance optimization. - no_args = isinstance(res, Instance) and not res.args # type: ignore[misc] - fix_instance_types(res, self.fail, self.note, self.options) + no_args = ( + isinstance(res, ProperType) + and isinstance(res, Instance) + and not res.args + and not empty_tuple_index + ) + if isinstance(res, ProperType) and isinstance(res, Instance): + if not validate_instance(res, self.fail, empty_tuple_index): + fix_instance(res, self.fail, self.note, disallow_any=False, options=self.options) # Aliases defined within functions can't be accessed outside # the function, since the symbol table will no longer # exist. Work around by expanding them eagerly when used. diff --git a/mypy/subtypes.py b/mypy/subtypes.py index 822c4b0ebf32..638553883dd8 100644 --- a/mypy/subtypes.py +++ b/mypy/subtypes.py @@ -544,7 +544,7 @@ def visit_instance(self, left: Instance) -> bool: right_args = ( right_prefix + (TupleType(list(right_middle), fallback),) + right_suffix ) - if not self.proper_subtype: + if not self.proper_subtype and t.args: for arg in map(get_proper_type, t.args): if isinstance(arg, UnpackType): unpacked = get_proper_type(arg.type) @@ -557,6 +557,8 @@ def visit_instance(self, left: Instance) -> bool: break else: return True + if len(left_args) != len(right_args): + return False type_params = zip(left_args, right_args, right.type.defn.type_vars) else: type_params = zip(t.args, right.args, right.type.defn.type_vars) diff --git a/mypy/typeanal.py b/mypy/typeanal.py index 385c5d35d67f..4743126c3d56 100644 --- a/mypy/typeanal.py +++ b/mypy/typeanal.py @@ -62,6 +62,7 @@ ParamSpecType, PartialType, PlaceholderType, + ProperType, RawExpressionType, RequiredType, SyntheticTypeVisitor, @@ -89,7 +90,6 @@ has_type_vars, ) from mypy.types_utils import is_bad_type_type_item -from mypy.typetraverser import TypeTraverserVisitor from mypy.typevars import fill_typevars T = TypeVar("T") @@ -425,9 +425,10 @@ def visit_unbound_type_nonoptional(self, t: UnboundType, defining_literal: bool) # The only case where instantiate_type_alias() can return an incorrect instance is # when it is top-level instance, so no need to recurse. if ( - isinstance(res, Instance) # type: ignore[misc] - and not self.defining_alias - and not validate_instance(res, self.fail) + isinstance(res, ProperType) + and isinstance(res, Instance) + and not (self.defining_alias and self.nesting_level == 0) + and not validate_instance(res, self.fail, t.empty_tuple_index) ): fix_instance( res, @@ -442,7 +443,7 @@ def visit_unbound_type_nonoptional(self, t: UnboundType, defining_literal: bool) res = get_proper_type(res) return res elif isinstance(node, TypeInfo): - return self.analyze_type_with_type_info(node, t.args, t) + return self.analyze_type_with_type_info(node, t.args, t, t.empty_tuple_index) elif node.fullname in TYPE_ALIAS_NAMES: return AnyType(TypeOfAny.special_form) # Concatenate is an operator, no need for a proper type @@ -700,7 +701,7 @@ def get_omitted_any(self, typ: Type, fullname: str | None = None) -> AnyType: return get_omitted_any(disallow_any, self.fail, self.note, typ, self.options, fullname) def analyze_type_with_type_info( - self, info: TypeInfo, args: Sequence[Type], ctx: Context + self, info: TypeInfo, args: Sequence[Type], ctx: Context, empty_tuple_index: bool ) -> Type: """Bind unbound type when were able to find target TypeInfo. @@ -735,7 +736,9 @@ def analyze_type_with_type_info( # Check type argument count. instance.args = tuple(flatten_nested_tuples(instance.args)) - if not self.defining_alias and not validate_instance(instance, self.fail): + if not (self.defining_alias and self.nesting_level == 0) and not validate_instance( + instance, self.fail, empty_tuple_index + ): fix_instance( instance, self.fail, @@ -1203,7 +1206,7 @@ def visit_placeholder_type(self, t: PlaceholderType) -> Type: else: # TODO: Handle non-TypeInfo assert isinstance(n.node, TypeInfo) - return self.analyze_type_with_type_info(n.node, t.args, t) + return self.analyze_type_with_type_info(n.node, t.args, t, False) def analyze_callable_args_for_paramspec( self, callable_args: Type, ret_type: Type, fallback: Instance @@ -2256,7 +2259,7 @@ def make_optional_type(t: Type) -> Type: return UnionType([t, NoneType()], t.line, t.column) -def validate_instance(t: Instance, fail: MsgCallback) -> bool: +def validate_instance(t: Instance, fail: MsgCallback, empty_tuple_index: bool) -> bool: """Check if this is a well-formed instance with respect to argument count/positions.""" # TODO: combine logic with instantiate_type_alias(). if any(unknown_unpack(a) for a in t.args): @@ -2279,8 +2282,9 @@ def validate_instance(t: Instance, fail: MsgCallback) -> bool: ) return False elif not t.args: - # The Any arguments should be set by the caller. - return False + if not (empty_tuple_index and len(t.type.type_vars) == 1): + # The Any arguments should be set by the caller. + return False else: # We also need to check if we are not performing a type variable tuple split. unpack = find_unpack_in_list(t.args) @@ -2313,34 +2317,6 @@ def validate_instance(t: Instance, fail: MsgCallback) -> bool: return True -def fix_instance_types(t: Type, fail: MsgCallback, note: MsgCallback, options: Options) -> None: - """Recursively fix all instance types (type argument count) in a given type. - - For example 'Union[Dict, List[str, int]]' will be transformed into - 'Union[Dict[Any, Any], List[Any]]' in place. - """ - t.accept(InstanceFixer(fail, note, options)) - - -class InstanceFixer(TypeTraverserVisitor): - def __init__(self, fail: MsgCallback, note: MsgCallback, options: Options) -> None: - self.fail = fail - self.note = note - self.options = options - - def visit_instance(self, typ: Instance) -> None: - super().visit_instance(typ) - if not validate_instance(typ, self.fail): - fix_instance( - typ, - self.fail, - self.note, - disallow_any=False, - options=self.options, - use_generic_error=True, - ) - - def find_self_type(typ: Type, lookup: Callable[[str], SymbolTableNode | None]) -> bool: return typ.accept(HasSelfType(lookup)) diff --git a/mypy/types.py b/mypy/types.py index 09ba68aae88a..ea81609fc605 100644 --- a/mypy/types.py +++ b/mypy/types.py @@ -3163,6 +3163,8 @@ def visit_instance(self, t: Instance) -> str: s += f"[{self.list_str(t.args)}, ...]" else: s += f"[{self.list_str(t.args)}]" + elif t.type.has_type_var_tuple_type and len(t.type.type_vars) == 1: + s += "[()]" if self.id_mapper: s += f"<{self.id_mapper.id(t.type)}>" return s diff --git a/test-data/unit/check-flags.test b/test-data/unit/check-flags.test index 06b7cab8391b..546d02a07ad0 100644 --- a/test-data/unit/check-flags.test +++ b/test-data/unit/check-flags.test @@ -2277,3 +2277,22 @@ list(2) # E: No overload variant of "list" matches argument type "int" [call-o # N: def [T] __init__(self) -> List[T] \ # N: def [T] __init__(self, x: Iterable[T]) -> List[T] [builtins fixtures/list.pyi] + +[case testNestedGenericInAliasDisallow] +# flags: --disallow-any-generics +from typing import TypeVar, Generic, List, Union + +class C(Generic[T]): ... + +A = Union[C, List] # E: Missing type parameters for generic type "C" \ + # E: Missing type parameters for generic type "List" +[builtins fixtures/list.pyi] + +[case testNestedGenericInAliasAllow] +# flags: --allow-any-generics +from typing import TypeVar, Generic, List, Union + +class C(Generic[T]): ... + +A = Union[C, List] # OK +[builtins fixtures/list.pyi] diff --git a/test-data/unit/check-typevar-tuple.test b/test-data/unit/check-typevar-tuple.test index 22a30432d098..4a281fbf0b49 100644 --- a/test-data/unit/check-typevar-tuple.test +++ b/test-data/unit/check-typevar-tuple.test @@ -118,7 +118,10 @@ variadic_single: Variadic[int] reveal_type(variadic_single) # N: Revealed type is "__main__.Variadic[builtins.int]" empty: Variadic[()] -reveal_type(empty) # N: Revealed type is "__main__.Variadic[Unpack[builtins.tuple[Any, ...]]]" +reveal_type(empty) # N: Revealed type is "__main__.Variadic[()]" + +omitted: Variadic +reveal_type(omitted) # N: Revealed type is "__main__.Variadic[Unpack[builtins.tuple[Any, ...]]]" bad: Variadic[Unpack[Tuple[int, ...]], str, Unpack[Tuple[bool, ...]]] # E: More than one Unpack in a type is not allowed reveal_type(bad) # N: Revealed type is "__main__.Variadic[Unpack[builtins.tuple[builtins.int, ...]], builtins.str]" @@ -1846,6 +1849,50 @@ def foo3(func: Callable[[int, Unpack[Args2]], T], *args: Unpack[Args2]) -> T: return submit(func, 1, *args) [builtins fixtures/tuple.pyi] +[case testTypeVarTupleEmptySpecialCase] +from typing import Any, Callable, Generic +from typing_extensions import Unpack, TypeVarTuple + +Ts = TypeVarTuple("Ts") +class MyClass(Generic[Unpack[Ts]]): + func: Callable[[Unpack[Ts]], object] + + def __init__(self, func: Callable[[Unpack[Ts]], object]) -> None: + self.func = func + +explicit: MyClass[()] +reveal_type(explicit) # N: Revealed type is "__main__.MyClass[()]" +reveal_type(explicit.func) # N: Revealed type is "def () -> builtins.object" + +a: Any +explicit_2 = MyClass[()](a) +reveal_type(explicit_2) # N: Revealed type is "__main__.MyClass[()]" +reveal_type(explicit_2.func) # N: Revealed type is "def () -> builtins.object" + +Alias = MyClass[()] +explicit_3: Alias +reveal_type(explicit_3) # N: Revealed type is "__main__.MyClass[()]" +reveal_type(explicit_3.func) # N: Revealed type is "def () -> builtins.object" + +explicit_4 = Alias(a) +reveal_type(explicit_4) # N: Revealed type is "__main__.MyClass[()]" +reveal_type(explicit_4.func) # N: Revealed type is "def () -> builtins.object" + +def no_args() -> None: ... +implicit = MyClass(no_args) +reveal_type(implicit) # N: Revealed type is "__main__.MyClass[()]" +reveal_type(implicit.func) # N: Revealed type is "def () -> builtins.object" + +def one_arg(__a: int) -> None: ... +x = MyClass(one_arg) +x = explicit # E: Incompatible types in assignment (expression has type "MyClass[()]", variable has type "MyClass[int]") + +# Consistently handle special case for no argument aliases +Direct = MyClass +y = Direct(one_arg) +reveal_type(y) # N: Revealed type is "__main__.MyClass[builtins.int]" +[builtins fixtures/tuple.pyi] + [case testTypeVarTupleRuntimeTypeApplication] from typing import Generic, TypeVar, Tuple from typing_extensions import Unpack, TypeVarTuple From f5a3e233c99077317c4cf6fee7745686d67fd21b Mon Sep 17 00:00:00 2001 From: Nikita Sobolev Date: Tue, 17 Oct 2023 13:23:05 +0300 Subject: [PATCH 100/144] Bump test deps: `ruff` and `pre-commit-hooks` (#16273) Release post: https://astral.sh/blog/ruff-v0.1.0 --- .pre-commit-config.yaml | 4 ++-- test-requirements.txt | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index e92d498fa3cc..bd2a09b7a8cf 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,7 +1,7 @@ exclude: '^(mypyc/external/)|(mypy/typeshed/)' # Exclude all vendored code from lints repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.4.0 # must match test-requirements.txt + rev: v4.5.0 # must match test-requirements.txt hooks: - id: trailing-whitespace - id: end-of-file-fixer @@ -10,7 +10,7 @@ repos: hooks: - id: black - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.0.292 # must match test-requirements.txt + rev: v0.1.0 # must match test-requirements.txt hooks: - id: ruff args: [--exit-non-zero-on-fix] diff --git a/test-requirements.txt b/test-requirements.txt index bdaad16fa88e..a1fa98917872 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -6,11 +6,11 @@ filelock>=3.3.0 # lxml 4.9.3 switched to manylinux_2_28, the wheel builder still uses manylinux2014 lxml>=4.9.1,<4.9.3; (python_version<'3.11' or sys_platform!='win32') and python_version<'3.12' pre-commit -pre-commit-hooks==4.4.0 +pre-commit-hooks==4.5.0 psutil>=4.0 pytest>=7.4.0 pytest-xdist>=1.34.0 pytest-cov>=2.10.0 -ruff==0.0.292 # must match version in .pre-commit-config.yaml +ruff==0.1.0 # must match version in .pre-commit-config.yaml setuptools>=65.5.1 tomli>=1.1.0 # needed even on py311+ so the self check passes with --python-version 3.7 From 4a9e6e60884c0bab89eb2ec6e947373c871f8aee Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Tue, 17 Oct 2023 15:46:34 +0100 Subject: [PATCH 101/144] Attempt to fix daemon crash related to ABCs (#16275) I couldn't reproduce the crash with a small example, but this seems to fix this crash in a large codebase: ``` Traceback (most recent call last): File "/Users/jukka/src/mypy/mypy/dmypy_server.py", line 234, in serve resp = self.run_command(command, data) File "/Users/jukka/src/mypy/mypy/dmypy_server.py", line 281, in run_command ret = method(self, **data) File "/Users/jukka/src/mypy/mypy/dmypy_server.py", line 359, in cmd_check return self.check(sources, export_types, is_tty, terminal_width) File "/Users/jukka/src/mypy/mypy/dmypy_server.py", line 413, in check res = self.initialize_fine_grained(sources, is_tty, terminal_width) File "/Users/jukka/src/mypy/mypy/dmypy_server.py", line 498, in initialize_fine_grained messages = self.fine_grained_manager.update(changed, removed) File "/Users/jukka/src/mypy/mypy/server/update.py", line 267, in update result = self.update_one( File "/Users/jukka/src/mypy/mypy/server/update.py", line 369, in update_one result = self.update_module(next_id, next_path, next_id in removed_set, followed) File "/Users/jukka/src/mypy/mypy/server/update.py", line 431, in update_module result = update_module_isolated( File "/Users/jukka/src/mypy/mypy/server/update.py", line 667, in update_module_isolated state.type_check_first_pass() File "/Users/jukka/src/mypy/mypy/build.py", line 2306, in type_check_first_pass self.type_checker().check_first_pass() File "/Users/jukka/src/mypy/mypy/checker.py", line 475, in check_first_pass self.accept(d) File "/Users/jukka/src/mypy/mypy/checker.py", line 587, in accept report_internal_error(err, self.errors.file, stmt.line, self.errors, self.options) File "/Users/jukka/src/mypy/mypy/errors.py", line 1261, in report_internal_error raise err File "/Users/jukka/src/mypy/mypy/checker.py", line 585, in accept stmt.accept(self) File "/Users/jukka/src/mypy/mypy/nodes.py", line 900, in accept return visitor.visit_decorator(self) File "/Users/jukka/src/mypy/mypy/checker.py", line 4773, in visit_decorator self.visit_decorator_inner(e) File "/Users/jukka/src/mypy/mypy/checker.py", line 4778, in visit_decorator_inner self.check_func_item(e.func, name=e.func.name, allow_empty=allow_empty) File "/Users/jukka/src/mypy/mypy/checker.py", line 1071, in check_func_item self.check_func_def(defn, typ, name, allow_empty) File "/Users/jukka/src/mypy/mypy/checker.py", line 1281, in check_func_def self.accept(item.body) File "/Users/jukka/src/mypy/mypy/checker.py", line 587, in accept report_internal_error(err, self.errors.file, stmt.line, self.errors, self.options) File "/Users/jukka/src/mypy/mypy/errors.py", line 1261, in report_internal_error raise err File "/Users/jukka/src/mypy/mypy/checker.py", line 585, in accept stmt.accept(self) File "/Users/jukka/src/mypy/mypy/nodes.py", line 1226, in accept return visitor.visit_block(self) File "/Users/jukka/src/mypy/mypy/checker.py", line 2754, in visit_block self.accept(s) File "/Users/jukka/src/mypy/mypy/checker.py", line 587, in accept report_internal_error(err, self.errors.file, stmt.line, self.errors, self.options) File "/Users/jukka/src/mypy/mypy/errors.py", line 1261, in report_internal_error raise err File "/Users/jukka/src/mypy/mypy/checker.py", line 585, in accept stmt.accept(self) File "/Users/jukka/src/mypy/mypy/nodes.py", line 1313, in accept return visitor.visit_assignment_stmt(self) File "/Users/jukka/src/mypy/mypy/checker.py", line 2802, in visit_assignment_stmt self.check_assignment(s.lvalues[-1], s.rvalue, s.type is None, s.new_syntax) File "/Users/jukka/src/mypy/mypy/checker.py", line 3009, in check_assignment rvalue_type = self.expr_checker.accept(rvalue, type_context=type_context) File "/Users/jukka/src/mypy/mypy/checkexpr.py", line 5372, in accept report_internal_error( File "/Users/jukka/src/mypy/mypy/errors.py", line 1261, in report_internal_error raise err File "/Users/jukka/src/mypy/mypy/checkexpr.py", line 5370, in accept typ = node.accept(self) File "/Users/jukka/src/mypy/mypy/nodes.py", line 1907, in accept return visitor.visit_call_expr(self) File "/Users/jukka/src/mypy/mypy/checkexpr.py", line 452, in visit_call_expr return self.visit_call_expr_inner(e, allow_none_return=allow_none_return) File "/Users/jukka/src/mypy/mypy/checkexpr.py", line 581, in visit_call_expr_inner ret_type = self.check_call_expr_with_callee_type( File "/Users/jukka/src/mypy/mypy/checkexpr.py", line 1420, in check_call_expr_with_callee_type ret_type, callee_type = self.check_call( File "/Users/jukka/src/mypy/mypy/checkexpr.py", line 1514, in check_call return self.check_callable_call( File "/Users/jukka/src/mypy/mypy/checkexpr.py", line 1638, in check_callable_call self.msg.cannot_instantiate_abstract_class( File "/Users/jukka/src/mypy/mypy/messages.py", line 1479, in cannot_instantiate_abstract_class attrs = format_string_list([f'"{a}"' for a in abstract_attributes]) File "/Users/jukka/src/mypy/mypy/messages.py", line 2948, in format_string_list assert lst AssertionError ``` I suspect that we first set `is_abstract` to true, and later the class was no longer abstract and `abstract_attributes` got cleared, but `is_abstract` was stuck at true. --- mypy/semanal_classprop.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/mypy/semanal_classprop.py b/mypy/semanal_classprop.py index dfd4e5b6f122..b5f1b2181761 100644 --- a/mypy/semanal_classprop.py +++ b/mypy/semanal_classprop.py @@ -46,6 +46,8 @@ def calculate_class_abstract_status(typ: TypeInfo, is_stub_file: bool, errors: E abstract attribute. Also compute a list of abstract attributes. Report error is required ABCMeta metaclass is missing. """ + typ.is_abstract = False + typ.abstract_attributes = [] if typ.typeddict_type: return # TypedDict can't be abstract concrete: set[str] = set() @@ -56,7 +58,6 @@ def calculate_class_abstract_status(typ: TypeInfo, is_stub_file: bool, errors: E # Special case: NewTypes are considered as always non-abstract, so they can be used as: # Config = NewType('Config', Mapping[str, str]) # default = Config({'cannot': 'modify'}) # OK - typ.abstract_attributes = [] return for base in typ.mro: for name, symnode in base.names.items(): From f3bdf5caaf6ccbba6c5df21b483fb9b716f13851 Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Wed, 18 Oct 2023 04:40:46 +0100 Subject: [PATCH 102/144] Support fancy new syntax for variadic types (#16242) This is the last significant thing I am aware of that is needed for PEP 646 support. After this and other currently open PRs are merged, I will make an additional pass grepping for usual suspects and verifying we didn't miss anything. Then we can flip the switch and announce this as supported. --- mypy/exprtotype.py | 5 ++- mypy/fastparse.py | 8 +--- mypy/messages.py | 2 + mypy/options.py | 3 ++ mypy/semanal.py | 56 ++++++++++++++----------- mypy/typeanal.py | 5 ++- test-data/unit/check-python311.test | 65 +++++++++++++++++++++++++++++ test-data/unit/check-python312.test | 2 - 8 files changed, 111 insertions(+), 35 deletions(-) diff --git a/mypy/exprtotype.py b/mypy/exprtotype.py index 5f0ef79acbd7..7a50429b81d1 100644 --- a/mypy/exprtotype.py +++ b/mypy/exprtotype.py @@ -103,7 +103,10 @@ def expr_to_unanalyzed_type( return expr_to_unanalyzed_type(args[0], options, allow_new_syntax, expr) else: base.args = tuple( - expr_to_unanalyzed_type(arg, options, allow_new_syntax, expr) for arg in args + expr_to_unanalyzed_type( + arg, options, allow_new_syntax, expr, allow_unpack=True + ) + for arg in args ) if not base.args: base.empty_tuple_index = True diff --git a/mypy/fastparse.py b/mypy/fastparse.py index fe158d468ce8..95d99db84a15 100644 --- a/mypy/fastparse.py +++ b/mypy/fastparse.py @@ -1762,7 +1762,6 @@ def __init__( self.override_column = override_column self.node_stack: list[AST] = [] self.is_evaluated = is_evaluated - self.allow_unpack = False def convert_column(self, column: int) -> int: """Apply column override if defined; otherwise return column. @@ -2039,19 +2038,14 @@ def visit_Attribute(self, n: Attribute) -> Type: else: return self.invalid_type(n) - # Used for Callable[[X *Ys, Z], R] + # Used for Callable[[X *Ys, Z], R] etc. def visit_Starred(self, n: ast3.Starred) -> Type: return UnpackType(self.visit(n.value), from_star_syntax=True) # List(expr* elts, expr_context ctx) def visit_List(self, n: ast3.List) -> Type: assert isinstance(n.ctx, ast3.Load) - old_allow_unpack = self.allow_unpack - # We specifically only allow starred expressions in a list to avoid - # confusing errors for top-level unpacks (e.g. in base classes). - self.allow_unpack = True result = self.translate_argument_list(n.elts) - self.allow_unpack = old_allow_unpack return result diff --git a/mypy/messages.py b/mypy/messages.py index dc5056f616ea..19aafedd5586 100644 --- a/mypy/messages.py +++ b/mypy/messages.py @@ -2516,6 +2516,8 @@ def format_literal_value(typ: LiteralType) -> str: # There are type arguments. Convert the arguments to strings. return f"{base_str}[{format_list(itype.args)}]" elif isinstance(typ, UnpackType): + if options.use_star_unpack(): + return f"*{format(typ.type)}" return f"Unpack[{format(typ.type)}]" elif isinstance(typ, TypeVarType): # This is similar to non-generic instance types. diff --git a/mypy/options.py b/mypy/options.py index 007ae0a78aa1..603ba79935ee 100644 --- a/mypy/options.py +++ b/mypy/options.py @@ -385,6 +385,9 @@ def use_or_syntax(self) -> bool: return not self.force_union_syntax return False + def use_star_unpack(self) -> bool: + return self.python_version >= (3, 11) + # To avoid breaking plugin compatibility, keep providing new_semantic_analyzer @property def new_semantic_analyzer(self) -> bool: diff --git a/mypy/semanal.py b/mypy/semanal.py index 1111b1df50e9..9c2452252208 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -1992,38 +1992,42 @@ def analyze_class_typevar_declaration(self, base: Type) -> tuple[TypeVarLikeList return None def analyze_unbound_tvar(self, t: Type) -> tuple[str, TypeVarLikeExpr] | None: - if not isinstance(t, UnboundType): - return None - unbound = t - sym = self.lookup_qualified(unbound.name, unbound) + if isinstance(t, UnpackType) and isinstance(t.type, UnboundType): + return self.analyze_unbound_tvar_impl(t.type, allow_tvt=True) + if isinstance(t, UnboundType): + sym = self.lookup_qualified(t.name, t) + if sym and sym.fullname in ("typing.Unpack", "typing_extensions.Unpack"): + inner_t = t.args[0] + if isinstance(inner_t, UnboundType): + return self.analyze_unbound_tvar_impl(inner_t, allow_tvt=True) + return None + return self.analyze_unbound_tvar_impl(t) + return None + + def analyze_unbound_tvar_impl( + self, t: UnboundType, allow_tvt: bool = False + ) -> tuple[str, TypeVarLikeExpr] | None: + sym = self.lookup_qualified(t.name, t) if sym and isinstance(sym.node, PlaceholderNode): self.record_incomplete_ref() - if sym and isinstance(sym.node, ParamSpecExpr): + if not allow_tvt and sym and isinstance(sym.node, ParamSpecExpr): if sym.fullname and not self.tvar_scope.allow_binding(sym.fullname): # It's bound by our type variable scope return None - return unbound.name, sym.node - if sym and sym.fullname in ("typing.Unpack", "typing_extensions.Unpack"): - inner_t = unbound.args[0] - if not isinstance(inner_t, UnboundType): + return t.name, sym.node + if allow_tvt and sym and isinstance(sym.node, TypeVarTupleExpr): + if sym.fullname and not self.tvar_scope.allow_binding(sym.fullname): + # It's bound by our type variable scope return None - inner_unbound = inner_t - inner_sym = self.lookup_qualified(inner_unbound.name, inner_unbound) - if inner_sym and isinstance(inner_sym.node, PlaceholderNode): - self.record_incomplete_ref() - if inner_sym and isinstance(inner_sym.node, TypeVarTupleExpr): - if inner_sym.fullname and not self.tvar_scope.allow_binding(inner_sym.fullname): - # It's bound by our type variable scope - return None - return inner_unbound.name, inner_sym.node - if sym is None or not isinstance(sym.node, TypeVarExpr): + return t.name, sym.node + if sym is None or not isinstance(sym.node, TypeVarExpr) or allow_tvt: return None elif sym.fullname and not self.tvar_scope.allow_binding(sym.fullname): # It's bound by our type variable scope return None else: assert isinstance(sym.node, TypeVarExpr) - return unbound.name, sym.node + return t.name, sym.node def get_all_bases_tvars( self, base_type_exprs: list[Expression], removed: list[int] @@ -5333,7 +5337,9 @@ def analyze_type_application_args(self, expr: IndexExpr) -> list[Type] | None: has_param_spec = False num_args = -1 elif isinstance(base, RefExpr) and isinstance(base.node, TypeInfo): - allow_unpack = base.node.has_type_var_tuple_type + allow_unpack = ( + base.node.has_type_var_tuple_type or base.node.fullname == "builtins.tuple" + ) has_param_spec = base.node.has_param_spec_type num_args = len(base.node.type_vars) else: @@ -5343,7 +5349,7 @@ def analyze_type_application_args(self, expr: IndexExpr) -> list[Type] | None: for item in items: try: - typearg = self.expr_to_unanalyzed_type(item) + typearg = self.expr_to_unanalyzed_type(item, allow_unpack=True) except TypeTranslationError: self.fail("Type expected within [...]", expr) return None @@ -6608,8 +6614,10 @@ def type_analyzer( tpan.global_scope = not self.type and not self.function_stack return tpan - def expr_to_unanalyzed_type(self, node: Expression) -> ProperType: - return expr_to_unanalyzed_type(node, self.options, self.is_stub_file) + def expr_to_unanalyzed_type(self, node: Expression, allow_unpack: bool = False) -> ProperType: + return expr_to_unanalyzed_type( + node, self.options, self.is_stub_file, allow_unpack=allow_unpack + ) def anal_type( self, diff --git a/mypy/typeanal.py b/mypy/typeanal.py index 4743126c3d56..b16d0ac066b4 100644 --- a/mypy/typeanal.py +++ b/mypy/typeanal.py @@ -964,7 +964,10 @@ def visit_unpack_type(self, t: UnpackType) -> Type: if not self.allow_unpack: self.fail(message_registry.INVALID_UNPACK_POSITION, t.type, code=codes.VALID_TYPE) return AnyType(TypeOfAny.from_error) - return UnpackType(self.anal_type(t.type), from_star_syntax=t.from_star_syntax) + self.allow_type_var_tuple = True + result = UnpackType(self.anal_type(t.type), from_star_syntax=t.from_star_syntax) + self.allow_type_var_tuple = False + return result def visit_parameters(self, t: Parameters) -> Type: raise NotImplementedError("ParamSpec literals cannot have unbound TypeVars") diff --git a/test-data/unit/check-python311.test b/test-data/unit/check-python311.test index 5870c7e17bcc..37dc3ca0f5b4 100644 --- a/test-data/unit/check-python311.test +++ b/test-data/unit/check-python311.test @@ -77,3 +77,68 @@ async def coro() -> Generator[List[Any], None, None]: reveal_type(coro) # N: Revealed type is "def () -> typing.Coroutine[Any, Any, typing.Generator[builtins.list[Any], None, None]]" [builtins fixtures/async_await.pyi] [typing fixtures/typing-async.pyi] + +[case testTypeVarTupleNewSyntaxAnnotations] +Ints = tuple[int, int, int] +x: tuple[str, *Ints] +reveal_type(x) # N: Revealed type is "Tuple[builtins.str, builtins.int, builtins.int, builtins.int]" +y: tuple[int, *tuple[int, ...]] +reveal_type(y) # N: Revealed type is "Tuple[builtins.int, Unpack[builtins.tuple[builtins.int, ...]]]" +[builtins fixtures/tuple.pyi] + +[case testTypeVarTupleNewSyntaxGenerics] +from typing import Generic, TypeVar, TypeVarTuple + +T = TypeVar("T") +Ts = TypeVarTuple("Ts") +class C(Generic[T, *Ts]): + attr: tuple[int, *Ts, str] + + def test(self) -> None: + reveal_type(self.attr) # N: Revealed type is "Tuple[builtins.int, Unpack[Ts`2], builtins.str]" + self.attr = ci # E: Incompatible types in assignment (expression has type "C[*Tuple[int, ...]]", variable has type "Tuple[int, *Ts, str]") + def meth(self, *args: *Ts) -> T: ... + +ci: C[*tuple[int, ...]] +reveal_type(ci) # N: Revealed type is "__main__.C[Unpack[builtins.tuple[builtins.int, ...]]]" +reveal_type(ci.meth) # N: Revealed type is "def (*args: builtins.int) -> builtins.int" +c3: C[str, str, str] +reveal_type(c3) # N: Revealed type is "__main__.C[builtins.str, builtins.str, builtins.str]" + +A = C[int, *Ts] +B = tuple[str, *tuple[str, str], str] +z: A[*B] +reveal_type(z) # N: Revealed type is "__main__.C[builtins.int, builtins.str, builtins.str, builtins.str, builtins.str]" +[builtins fixtures/tuple.pyi] + +[case testTypeVarTupleNewSyntaxCallables] +from typing import Generic, overload, TypeVar + +T1 = TypeVar("T1") +T2 = TypeVar("T2") +class MyClass(Generic[T1, T2]): + @overload + def __init__(self: MyClass[None, None]) -> None: ... + + @overload + def __init__(self: MyClass[T1, None], *types: *tuple[type[T1]]) -> None: ... + + @overload + def __init__(self: MyClass[T1, T2], *types: *tuple[type[T1], type[T2]]) -> None: ... + + def __init__(self: MyClass[T1, T2], *types: *tuple[type, ...]) -> None: + pass + +myclass = MyClass() +reveal_type(myclass) # N: Revealed type is "__main__.MyClass[None, None]" +myclass1 = MyClass(float) +reveal_type(myclass1) # N: Revealed type is "__main__.MyClass[builtins.float, None]" +myclass2 = MyClass(float, float) +reveal_type(myclass2) # N: Revealed type is "__main__.MyClass[builtins.float, builtins.float]" +myclass3 = MyClass(float, float, float) # E: No overload variant of "MyClass" matches argument types "Type[float]", "Type[float]", "Type[float]" \ + # N: Possible overload variants: \ + # N: def [T1, T2] __init__(self) -> MyClass[None, None] \ + # N: def [T1, T2] __init__(self, Type[T1], /) -> MyClass[T1, None] \ + # N: def [T1, T2] __init__(Type[T1], Type[T2], /) -> MyClass[T1, T2] +reveal_type(myclass3) # N: Revealed type is "Any" +[builtins fixtures/tuple.pyi] diff --git a/test-data/unit/check-python312.test b/test-data/unit/check-python312.test index 91aca7794071..cb89eb34880c 100644 --- a/test-data/unit/check-python312.test +++ b/test-data/unit/check-python312.test @@ -41,8 +41,6 @@ type Alias2[**P] = Callable[P, int] # E: PEP 695 type aliases are not yet suppo # E: Value of type "int" is not indexable \ # E: Name "P" is not defined type Alias3[*Ts] = tuple[*Ts] # E: PEP 695 type aliases are not yet supported \ - # E: Type expected within [...] \ - # E: The type "Type[Tuple[Any, ...]]" is not generic and not indexable \ # E: Name "Ts" is not defined class Cls1[T: int]: ... # E: PEP 695 generics are not yet supported From ffe89a21058eaa6eb1c1796d9ab87aece965e2d9 Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Tue, 17 Oct 2023 20:51:22 -0700 Subject: [PATCH 103/144] Add a changelog (#16280) I pre-populated it with blog post entries since mypy 1.0. There might be some markdown or backslashes that are borked, feel free to push to this PR if you notice anything. --- CHANGELOG.md | 1254 ++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 1254 insertions(+) create mode 100644 CHANGELOG.md diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 000000000000..d8237795112b --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,1254 @@ +# Mypy Release Notes + +## Unreleased + +... + +#### Other Notable Changes and Fixes +... + +#### Acknowledgements +... + +## Mypy 1.6 + +[Tuesday, 10 October 2023](https://mypy-lang.blogspot.com/2023/10/mypy-16-released.html) + +We’ve just uploaded mypy 1.6 to the Python Package Index ([PyPI](https://pypi.org/project/mypy/)). Mypy is a static type checker for Python. This release includes new features, performance improvements and bug fixes. You can install it as follows: + + python3 -m pip install -U mypy + +You can read the full documentation for this release on [Read the Docs](http://mypy.readthedocs.io). + +#### Introduce Error Subcodes for Import Errors + +Mypy now uses the error code import-untyped if an import targets an installed library that doesn’t support static type checking, and no stub files are available. Other invalid imports produce the import-not-found error code. They both are subcodes of the import error code, which was previously used for both kinds of import-related errors. + +Use \--disable-error-code=import-untyped to only ignore import errors about installed libraries without stubs. This way mypy will still report errors about typos in import statements, for example. + +If you use \--warn-unused-ignore or \--strict, mypy will complain if you use \# type: ignore\[import\] to ignore an import error. You are expected to use one of the more specific error codes instead. Otherwise, ignoring the import error code continues to silence both errors. + +This feature was contributed by Shantanu (PR [15840](https://github.com/python/mypy/pull/15840), PR [14740](https://github.com/python/mypy/pull/14740)). + +#### Remove Support for Targeting Python 3.6 and Earlier + +Running mypy with \--python-version 3.6, for example, is no longer supported. Python 3.6 hasn’t been properly supported by mypy for some time now, and this makes it explicit. This was contributed by Nikita Sobolev (PR [15668](https://github.com/python/mypy/pull/15668)). + +#### Selective Filtering of \--disallow-untyped-calls Targets + +Using \--disallow-untyped-calls could be annoying when using libraries with missing type information, as mypy would generate many errors about code that uses the library. Now you can use \--untyped-calls-exclude=acme, for example, to disable these errors about calls targeting functions defined in the acme package. Refer to the [documentation](https://mypy.readthedocs.io/en/latest/command_line.html#cmdoption-mypy-untyped-calls-exclude) for more information. + +This feature was contributed by Ivan Levkivskyi (PR [15845](https://github.com/python/mypy/pull/15845)). + +#### Improved Type Inference between Callable Types + +Mypy now does a better job inferring type variables inside arguments of callable types. For example, this code fragment now type checks correctly: + +```python +def f(c: Callable[[T, S], None]) -> Callable[[str, T, S], None]: ... +def g(*x: int) -> None: ... + +reveal_type(f(g)) # Callable[[str, int, int], None] +``` + +This was contributed by Ivan Levkivskyi (PR [15910](https://github.com/python/mypy/pull/15910)). + +#### Don’t Consider None and TypeVar to Overlap in Overloads + +Mypy now doesn’t consider an overload item with an argument type None to overlap with a type variable: + +```python +@overload +def f(x: None) -> None: .. +@overload +def f(x: T) -> Foo[T]: ... +... +``` + +Previously mypy would generate an error about the definition of f above. This is slightly unsafe if the upper bound of T is object, since the value of the type variable could be None. We relaxed the rules a little, since this solves a common issue. + +This feature was contributed by Ivan Levkivskyi (PR [15846](https://github.com/python/mypy/pull/15846)). + +#### Improvements to \--new-type-inference + +The experimental new type inference algorithm (polymorphic inference) introduced as an opt-in feature in mypy 1.5 has several improvements: + +* Improve transitive closure computation during constraint solving (Ivan Levkivskyi, PR [15754](https://github.com/python/mypy/pull/15754)) +* Add support for upper bounds and values with \--new-type-inference (Ivan Levkivskyi, PR [15813](https://github.com/python/mypy/pull/15813)) +* Basic support for variadic types with \--new-type-inference (Ivan Levkivskyi, PR [15879](https://github.com/python/mypy/pull/15879)) +* Polymorphic inference: support for parameter specifications and lambdas (Ivan Levkivskyi, PR [15837](https://github.com/python/mypy/pull/15837)) +* Invalidate cache when adding \--new-type-inference (Marc Mueller, PR [16059](https://github.com/python/mypy/pull/16059)) + +**Note:** We are planning to enable \--new-type-inference by default in mypy 1.7. Please try this out and let us know if you encounter any issues. + +#### ParamSpec Improvements + +* Support self-types containing ParamSpec (Ivan Levkivskyi, PR [15903](https://github.com/python/mypy/pull/15903)) +* Allow “…” in Concatenate, and clean up ParamSpec literals (Ivan Levkivskyi, PR [15905](https://github.com/python/mypy/pull/15905)) +* Fix ParamSpec inference for callback protocols (Ivan Levkivskyi, PR [15986](https://github.com/python/mypy/pull/15986)) +* Infer ParamSpec constraint from arguments (Ivan Levkivskyi, PR [15896](https://github.com/python/mypy/pull/15896)) +* Fix crash on invalid type variable with ParamSpec (Ivan Levkivskyi, PR [15953](https://github.com/python/mypy/pull/15953)) +* Fix subtyping between ParamSpecs (Ivan Levkivskyi, PR [15892](https://github.com/python/mypy/pull/15892)) + +#### Stubgen Improvements + +* Add option to include docstrings with stubgen (chylek, PR [13284](https://github.com/python/mypy/pull/13284)) +* Add required ... initializer to NamedTuple fields with default values (Nikita Sobolev, PR [15680](https://github.com/python/mypy/pull/15680)) + +#### Stubtest Improvements + +* Fix \_\_mypy-replace false positives (Alex Waygood, PR [15689](https://github.com/python/mypy/pull/15689)) +* Fix edge case for bytes enum subclasses (Alex Waygood, PR [15943](https://github.com/python/mypy/pull/15943)) +* Generate error if typeshed is missing modules from the stdlib (Alex Waygood, PR [15729](https://github.com/python/mypy/pull/15729)) +* Fixes to new check for missing stdlib modules (Alex Waygood, PR [15960](https://github.com/python/mypy/pull/15960)) +* Fix stubtest enum.Flag edge case (Alex Waygood, PR [15933](https://github.com/python/mypy/pull/15933)) + +#### Documentation Improvements + +* Do not advertise to create your own assert\_never helper (Nikita Sobolev, PR [15947](https://github.com/python/mypy/pull/15947)) +* Fix all the missing references found within the docs (Albert Tugushev, PR [15875](https://github.com/python/mypy/pull/15875)) +* Document await-not-async error code (Shantanu, PR [15858](https://github.com/python/mypy/pull/15858)) +* Improve documentation of disabling error codes (Shantanu, PR [15841](https://github.com/python/mypy/pull/15841)) + +#### Other Notable Changes and Fixes + +* Make unsupported PEP 695 features (introduced in Python 3.12) give a reasonable error message (Shantanu, PR [16013](https://github.com/python/mypy/pull/16013)) +* Remove the \--py2 command-line argument (Marc Mueller, PR [15670](https://github.com/python/mypy/pull/15670)) +* Change empty tuple from tuple\[\] to tuple\[()\] in error messages (Nikita Sobolev, PR [15783](https://github.com/python/mypy/pull/15783)) +* Fix assert\_type failures when some nodes are deferred (Nikita Sobolev, PR [15920](https://github.com/python/mypy/pull/15920)) +* Generate error on unbound TypeVar with values (Nikita Sobolev, PR [15732](https://github.com/python/mypy/pull/15732)) +* Fix over-eager types-google-cloud-ndb suggestion (Shantanu, PR [15347](https://github.com/python/mypy/pull/15347)) +* Fix type narrowing of \== None and in (None,) conditions (Marti Raudsepp, PR [15760](https://github.com/python/mypy/pull/15760)) +* Fix inference for attrs.fields (Shantanu, PR [15688](https://github.com/python/mypy/pull/15688)) +* Make “await in non-async function” a non-blocking error and give it an error code (Gregory Santosa, PR [15384](https://github.com/python/mypy/pull/15384)) +* Add basic support for decorated overloads (Ivan Levkivskyi, PR [15898](https://github.com/python/mypy/pull/15898)) +* Fix TypeVar regression with self types (Ivan Levkivskyi, PR [15945](https://github.com/python/mypy/pull/15945)) +* Add \_\_match\_args\_\_ to dataclasses with no fields (Ali Hamdan, PR [15749](https://github.com/python/mypy/pull/15749)) +* Include stdout and stderr in dmypy verbose output (Valentin Stanciu, PR [15881](https://github.com/python/mypy/pull/15881)) +* Improve match narrowing and reachability analysis (Shantanu, PR [15882](https://github.com/python/mypy/pull/15882)) +* Support \_\_bool\_\_ with Literal in \--warn-unreachable (Jannic Warken, PR [15645](https://github.com/python/mypy/pull/15645)) +* Fix inheriting from generic @frozen attrs class (Ilya Priven, PR [15700](https://github.com/python/mypy/pull/15700)) +* Correctly narrow types for tuple\[type\[X\], ...\] (Nikita Sobolev, PR [15691](https://github.com/python/mypy/pull/15691)) +* Don't flag intentionally empty generators unreachable (Ilya Priven, PR [15722](https://github.com/python/mypy/pull/15722)) +* Add tox.ini to mypy sdist (Marcel Telka, PR [15853](https://github.com/python/mypy/pull/15853)) +* Fix mypyc regression with pretty (Shantanu, PR [16124](https://github.com/python/mypy/pull/16124)) + +#### Typeshed Updates + +Typeshed is now modular and distributed as separate PyPI packages for everything except the standard library stubs. Please see [git log](https://github.com/python/typeshed/commits/main?after=6a8d653a671925b0a3af61729ff8cf3f90c9c662+0&branch=main&path=stdlib) for full list of typeshed changes. + +#### Acknowledgements + +Thanks to Max Murin, who did most of the release manager work for this release (I just did the final steps). + +Thanks to all mypy contributors who contributed to this release: + +* Albert Tugushev +* Alex Waygood +* Ali Hamdan +* chylek +* EXPLOSION +* Gregory Santosa +* Ilya Priven +* Ivan Levkivskyi +* Jannic Warken +* KotlinIsland +* Marc Mueller +* Marcel Johannesmann +* Marcel Telka +* Mark Byrne +* Marti Raudsepp +* Max Murin +* Nikita Sobolev +* Shantanu +* Valentin Stanciu + +Posted by Jukka Lehtosalo + + +## Mypy 1.5 + +[Thursday, 10 August 2023](https://mypy-lang.blogspot.com/2023/08/mypy-15-released.html) + +We’ve just uploaded mypy 1.5 to the Python Package Index ([PyPI](https://pypi.org/project/mypy/)). Mypy is a static type checker for Python. This release includes new features, deprecations and bug fixes. You can install it as follows: + + python3 -m pip install -U mypy + +You can read the full documentation for this release on [Read the Docs](http://mypy.readthedocs.io). + +#### Drop Support for Python 3.7 + +Mypy no longer supports running with Python 3.7, which has reached end-of-life. This was contributed by Shantanu (PR [15566](https://github.com/python/mypy/pull/15566)). + +#### Optional Check to Require Explicit @override + +If you enable the explicit-override error code, mypy will generate an error if a method override doesn’t use the @typing.override decorator (as discussed in [PEP 698](https://peps.python.org/pep-0698/#strict-enforcement-per-project)). This way mypy will detect accidentally introduced overrides. Example: + +```python +# mypy: enable-error-code="explicit-override" + +from typing_extensions import override + +class C: + def foo(self) -> None: pass + def bar(self) -> None: pass + +class D(C): + # Error: Method "foo" is not using @override but is + # overriding a method + def foo(self) -> None: + ... + + @override + def bar(self) -> None: # OK + ... +``` + +You can enable the error code via \--enable-error-code=explicit-override on the mypy command line or enable\_error\_code = explicit-override in the mypy config file. + +The override decorator will be available in typing in Python 3.12, but you can also use the backport from a recent version of `typing_extensions` on all supported Python versions. + +This feature was contributed by Marc Mueller(PR [15512](https://github.com/python/mypy/pull/15512)). + +#### More Flexible TypedDict Creation and Update + +Mypy was previously overly strict when type checking TypedDict creation and update operations. Though these checks were often technically correct, they sometimes triggered for apparently valid code. These checks have now been relaxed by default. You can enable stricter checking by using the new \--extra-checks flag. + +Construction using the `**` syntax is now more flexible: + +```python +from typing import TypedDict + +class A(TypedDict): + foo: int + bar: int + +class B(TypedDict): + foo: int + +a: A = {"foo": 1, "bar": 2} +b: B = {"foo": 3} +a2: A = { **a, **b} # OK (previously an error) +``` + +You can also call update() with a TypedDict argument that contains a subset of the keys in the updated TypedDict: +```python +a.update(b) # OK (previously an error) +``` + +This feature was contributed by Ivan Levkivskyi (PR [15425](https://github.com/python/mypy/pull/15425)). + +#### Deprecated Flag: \--strict-concatenate + +The behavior of \--strict-concatenate is now included in the new \--extra-checks flag, and the old flag is deprecated. + +#### Optionally Show Links to Error Code Documentation + +If you use \--show-error-code-links, mypy will add documentation links to (many) reported errors. The links are not shown for error messages that are sufficiently obvious, and they are shown once per error code only. + +Example output: +``` +a.py:1: error: Need type annotation for "foo" (hint: "x: List[] = ...") [var-annotated] +a.py:1: note: See https://mypy.rtfd.io/en/stable/_refs.html#code-var-annotated for more info +``` +This was contributed by Ivan Levkivskyi (PR [15449](https://github.com/python/mypy/pull/15449)). + +#### Consistently Avoid Type Checking Unreachable Code + +If a module top level has unreachable code, mypy won’t type check the unreachable statements. This is consistent with how functions behave. The behavior of \--warn-unreachable is also more consistent now. + +This was contributed by Ilya Priven (PR [15386](https://github.com/python/mypy/pull/15386)). + +#### Experimental Improved Type Inference for Generic Functions + +You can use \--new-type-inference to opt into an experimental new type inference algorithm. It fixes issues when calling a generic functions with an argument that is also a generic function, in particular. This current implementation is still incomplete, but we encourage trying it out and reporting bugs if you encounter regressions. We are planning to enable the new algorithm by default in a future mypy release. + +This feature was contributed by Ivan Levkivskyi (PR [15287](https://github.com/python/mypy/pull/15287)). + +#### Partial Support for Python 3.12 + +Mypy and mypyc now support running on recent Python 3.12 development versions. Not all new Python 3.12 features are supported, and we don’t ship compiled wheels for Python 3.12 yet. + +* Fix ast warnings for Python 3.12 (Nikita Sobolev, PR [15558](https://github.com/python/mypy/pull/15558)) +* mypyc: Fix multiple inheritance with a protocol on Python 3.12 (Jukka Lehtosalo, PR [15572](https://github.com/python/mypy/pull/15572)) +* mypyc: Fix self-compilation on Python 3.12 (Jukka Lehtosalo, PR [15582](https://github.com/python/mypy/pull/15582)) +* mypyc: Fix 3.12 issue with pickling of instances with \_\_dict\_\_ (Jukka Lehtosalo, PR [15574](https://github.com/python/mypy/pull/15574)) +* mypyc: Fix i16 on Python 3.12 (Jukka Lehtosalo, PR [15510](https://github.com/python/mypy/pull/15510)) +* mypyc: Fix int operations on Python 3.12 (Jukka Lehtosalo, PR [15470](https://github.com/python/mypy/pull/15470)) +* mypyc: Fix generators on Python 3.12 (Jukka Lehtosalo, PR [15472](https://github.com/python/mypy/pull/15472)) +* mypyc: Fix classes with \_\_dict\_\_ on 3.12 (Jukka Lehtosalo, PR [15471](https://github.com/python/mypy/pull/15471)) +* mypyc: Fix coroutines on Python 3.12 (Jukka Lehtosalo, PR [15469](https://github.com/python/mypy/pull/15469)) +* mypyc: Don't use \_PyErr\_ChainExceptions on 3.12, since it's deprecated (Jukka Lehtosalo, PR [15468](https://github.com/python/mypy/pull/15468)) +* mypyc: Add Python 3.12 feature macro (Jukka Lehtosalo, PR [15465](https://github.com/python/mypy/pull/15465)) + +#### Improvements to Dataclasses + +* Improve signature of dataclasses.replace (Ilya Priven, PR [14849](https://github.com/python/mypy/pull/14849)) +* Fix dataclass/protocol crash on joining types (Ilya Priven, PR [15629](https://github.com/python/mypy/pull/15629)) +* Fix strict optional handling in dataclasses (Ivan Levkivskyi, PR [15571](https://github.com/python/mypy/pull/15571)) +* Support optional types for custom dataclass descriptors (Marc Mueller, PR [15628](https://github.com/python/mypy/pull/15628)) +* Add `__slots__` attribute to dataclasses (Nikita Sobolev, PR [15649](https://github.com/python/mypy/pull/15649)) +* Support better \_\_post\_init\_\_ method signature for dataclasses (Nikita Sobolev, PR [15503](https://github.com/python/mypy/pull/15503)) + +#### Mypyc Improvements + +* Support unsigned 8-bit native integer type: mypy\_extensions.u8 (Jukka Lehtosalo, PR [15564](https://github.com/python/mypy/pull/15564)) +* Support signed 16-bit native integer type: mypy\_extensions.i16 (Jukka Lehtosalo, PR [15464](https://github.com/python/mypy/pull/15464)) +* Define mypy\_extensions.i16 in stubs (Jukka Lehtosalo, PR [15562](https://github.com/python/mypy/pull/15562)) +* Document more unsupported features and update supported features (Richard Si, PR [15524](https://github.com/python/mypy/pull/15524)) +* Fix final NamedTuple classes (Richard Si, PR [15513](https://github.com/python/mypy/pull/15513)) +* Use C99 compound literals for undefined tuple values (Jukka Lehtosalo, PR [15453](https://github.com/python/mypy/pull/15453)) +* Don't explicitly assign NULL values in setup functions (Logan Hunt, PR [15379](https://github.com/python/mypy/pull/15379)) + +#### Stubgen Improvements + +* Teach stubgen to work with complex and unary expressions (Nikita Sobolev, PR [15661](https://github.com/python/mypy/pull/15661)) +* Support ParamSpec and TypeVarTuple (Ali Hamdan, PR [15626](https://github.com/python/mypy/pull/15626)) +* Fix crash on non-str docstring (Ali Hamdan, PR [15623](https://github.com/python/mypy/pull/15623)) + +#### Documentation Updates + +* Add documentation for additional error codes (Ivan Levkivskyi, PR [15539](https://github.com/python/mypy/pull/15539)) +* Improve documentation of type narrowing (Ilya Priven, PR [15652](https://github.com/python/mypy/pull/15652)) +* Small improvements to protocol documentation (Shantanu, PR [15460](https://github.com/python/mypy/pull/15460)) +* Remove confusing instance variable example in cheat sheet (Adel Atallah, PR [15441](https://github.com/python/mypy/pull/15441)) + +#### Other Notable Fixes and Improvements + +* Constant fold additional unary and binary expressions (Richard Si, PR [15202](https://github.com/python/mypy/pull/15202)) +* Exclude the same special attributes from Protocol as CPython (Kyle Benesch, PR [15490](https://github.com/python/mypy/pull/15490)) +* Change the default value of the slots argument of attrs.define to True, to match runtime behavior (Ilya Priven, PR [15642](https://github.com/python/mypy/pull/15642)) +* Fix type of class attribute if attribute is defined in both class and metaclass (Alex Waygood, PR [14988](https://github.com/python/mypy/pull/14988)) +* Handle type the same as typing.Type in the first argument of classmethods (Erik Kemperman, PR [15297](https://github.com/python/mypy/pull/15297)) +* Fix \--find-occurrences flag (Shantanu, PR [15528](https://github.com/python/mypy/pull/15528)) +* Fix error location for class patterns (Nikita Sobolev, PR [15506](https://github.com/python/mypy/pull/15506)) +* Fix re-added file with errors in mypy daemon (Ivan Levkivskyi, PR [15440](https://github.com/python/mypy/pull/15440)) +* Fix dmypy run on Windows (Ivan Levkivskyi, PR [15429](https://github.com/python/mypy/pull/15429)) +* Fix abstract and non-abstract variant error for property deleter (Shantanu, PR [15395](https://github.com/python/mypy/pull/15395)) +* Remove special casing for "cannot" in error messages (Ilya Priven, PR [15428](https://github.com/python/mypy/pull/15428)) +* Add runtime `__slots__` attribute to attrs classes (Nikita Sobolev, PR [15651](https://github.com/python/mypy/pull/15651)) +* Add get\_expression\_type to CheckerPluginInterface (Ilya Priven, PR [15369](https://github.com/python/mypy/pull/15369)) +* Remove parameters that no longer exist from NamedTuple.\_make() (Alex Waygood, PR [15578](https://github.com/python/mypy/pull/15578)) +* Allow using typing.Self in `__all__` with an explicit @staticmethod decorator (Erik Kemperman, PR [15353](https://github.com/python/mypy/pull/15353)) +* Fix self types in subclass methods without Self annotation (Ivan Levkivskyi, PR [15541](https://github.com/python/mypy/pull/15541)) +* Check for abstract class objects in tuples (Nikita Sobolev, PR [15366](https://github.com/python/mypy/pull/15366)) + +#### Typeshed Updates + +Typeshed is now modular and distributed as separate PyPI packages for everything except the standard library stubs. Please see [git log](https://github.com/python/typeshed/commits/main?after=fc7d4722eaa54803926cee5730e1f784979c0531+0&branch=main&path=stdlib) for full list of typeshed changes. + +#### Acknowledgements + +Thanks to all mypy contributors who contributed to this release: + +* Adel Atallah +* Alex Waygood +* Ali Hamdan +* Erik Kemperman +* Federico Padua +* Ilya Priven +* Ivan Levkivskyi +* Jelle Zijlstra +* Jared Hance +* Jukka Lehtosalo +* Kyle Benesch +* Logan Hunt +* Marc Mueller +* Nikita Sobolev +* Richard Si +* Shantanu +* Stavros Ntentos +* Valentin Stanciu + +Posted by Valentin Stanciu + + +## Mypy 1.4 + +[Tuesday, 20 June 2023](https://mypy-lang.blogspot.com/2023/06/mypy-140-released.html) + +We’ve just uploaded mypy 1.4 to the Python Package Index ([PyPI](https://pypi.org/project/mypy/)). Mypy is a static type checker for Python. This release includes new features, performance improvements and bug fixes. You can install it as follows: + + python3 -m pip install -U mypy + +You can read the full documentation for this release on [Read the Docs](http://mypy.readthedocs.io). + +#### The Override Decorator + +Mypy can now ensure that when renaming a method, overrides are also renamed. You can explicitly mark a method as overriding a base class method by using the @typing.override decorator ([PEP 698](https://peps.python.org/pep-0698/)). If the method is then renamed in the base class while the method override is not, mypy will generate an error. The decorator will be available in typing in Python 3.12, but you can also use the backport from a recent version of `typing_extensions` on all supported Python versions. + +This feature was contributed byThomas M Kehrenberg (PR [14609](https://github.com/python/mypy/pull/14609)). + +#### Propagating Type Narrowing to Nested Functions + +Previously, type narrowing was not propagated to nested functions because it would not be sound if the narrowed variable changed between the definition of the nested function and the call site. Mypy will now propagate the narrowed type if the variable is not assigned to after the definition of the nested function: + +```python +def outer(x: str | None = None) -> None: + if x is None: + x = calculate_default() + reveal_type(x) # "str" (narrowed) + + def nested() -> None: + reveal_type(x) # Now "str" (used to be "str | None") + + nested() +``` + +This may generate some new errors because asserts that were previously necessary may become tautological or no-ops. + +This was contributed by Jukka Lehtosalo (PR [15133](https://github.com/python/mypy/pull/15133)). + +#### Narrowing Enum Values Using “==” + +Mypy now allows narrowing enum types using the \== operator. Previously this was only supported when using the is operator. This makes exhaustiveness checking with enum types more usable, as the requirement to use the is operator was not very intuitive. In this example mypy can detect that the developer forgot to handle the value MyEnum.C in example + +```python +from enum import Enum + +class MyEnum(Enum): + A = 0 + B = 1 + C = 2 + +def example(e: MyEnum) -> str: # Error: Missing return statement + if e == MyEnum.A: + return 'x' + elif e == MyEnum.B: + return 'y' +``` + +Adding an extra elif case resolves the error: + +```python +... +def example(e: MyEnum) -> str: # No error -- all values covered + if e == MyEnum.A: + return 'x' + elif e == MyEnum.B: + return 'y' + elif e == MyEnum.C: + return 'z' +``` + +This change can cause false positives in test cases that have assert statements like assert o.x == SomeEnum.X when using \--strict-equality. Example: + +```python +# mypy: strict-equality + +from enum import Enum + +class MyEnum(Enum): + A = 0 + B = 1 + +class C: + x: MyEnum + ... + +def test_something() -> None: + c = C(...) + assert c.x == MyEnum.A + c.do_something_that_changes_x() + assert c.x == MyEnum.B # Error: Non-overlapping equality check +``` + +These errors can be ignored using \# type: ignore\[comparison-overlap\], or you can perform the assertion using a temporary variable as a workaround: + +```python +... +def test_something() -> None: + ... + x = c.x + assert x == MyEnum.A # Does not narrow c.x + c.do_something_that_changes_x() + x = c.x + assert x == MyEnum.B # OK +``` + +This feature was contributed by Shantanu (PR [11521](https://github.com/python/mypy/pull/11521)). + +#### Performance Improvements + +* Speed up simplification of large union types and also fix a recursive tuple crash (Shantanu, PR [15128](https://github.com/python/mypy/pull/15128)) +* Speed up union subtyping (Shantanu, PR [15104](https://github.com/python/mypy/pull/15104)) +* Don't type check most function bodies when type checking third-party library code, or generally when ignoring errors (Jukka Lehtosalo, PR [14150](https://github.com/python/mypy/pull/14150)) + +#### Improvements to Plugins + +* attrs.evolve: Support generics and unions (Ilya Konstantinov, PR [15050](https://github.com/python/mypy/pull/15050)) +* Fix ctypes plugin (Alex Waygood) + +#### Fixes to Crashes + +* Fix a crash when function-scope recursive alias appears as upper bound (Ivan Levkivskyi, PR [15159](https://github.com/python/mypy/pull/15159)) +* Fix crash on follow\_imports\_for\_stubs (Ivan Levkivskyi, PR [15407](https://github.com/python/mypy/pull/15407)) +* Fix stubtest crash in explicit init subclass (Shantanu, PR [15399](https://github.com/python/mypy/pull/15399)) +* Fix crash when indexing TypedDict with empty key (Shantanu, PR [15392](https://github.com/python/mypy/pull/15392)) +* Fix crash on NamedTuple as attribute (Ivan Levkivskyi, PR [15404](https://github.com/python/mypy/pull/15404)) +* Correctly track loop depth for nested functions/classes (Ivan Levkivskyi, PR [15403](https://github.com/python/mypy/pull/15403)) +* Fix crash on joins with recursive tuples (Ivan Levkivskyi, PR [15402](https://github.com/python/mypy/pull/15402)) +* Fix crash with custom ErrorCode subclasses (Marc Mueller, PR [15327](https://github.com/python/mypy/pull/15327)) +* Fix crash in dataclass protocol with self attribute assignment (Ivan Levkivskyi, PR [15157](https://github.com/python/mypy/pull/15157)) +* Fix crash on lambda in generic context with generic method in body (Ivan Levkivskyi, PR [15155](https://github.com/python/mypy/pull/15155)) +* Fix recursive type alias crash in make\_simplified\_union (Ivan Levkivskyi, PR [15216](https://github.com/python/mypy/pull/15216)) + +#### Improvements to Error Messages + +* Use lower-case built-in collection types such as list\[…\] instead of List\[…\] in errors when targeting Python 3.9+ (Max Murin, PR [15070](https://github.com/python/mypy/pull/15070)) +* Use X | Y union syntax in error messages when targeting Python 3.10+ (Omar Silva, PR [15102](https://github.com/python/mypy/pull/15102)) +* Use type instead of Type in errors when targeting Python 3.9+ (Rohit Sanjay, PR [15139](https://github.com/python/mypy/pull/15139)) +* Do not show unused-ignore errors in unreachable code, and make it a real error code (Ivan Levkivskyi, PR [15164](https://github.com/python/mypy/pull/15164)) +* Don’t limit the number of errors shown by default (Rohit Sanjay, PR [15138](https://github.com/python/mypy/pull/15138)) +* Improver message for truthy functions (madt2709, PR [15193](https://github.com/python/mypy/pull/15193)) +* Output distinct types when type names are ambiguous (teresa0605, PR [15184](https://github.com/python/mypy/pull/15184)) +* Update message about invalid exception type in try (AJ Rasmussen, PR [15131](https://github.com/python/mypy/pull/15131)) +* Add explanation if argument type is incompatible because of an unsupported numbers type (Jukka Lehtosalo, PR [15137](https://github.com/python/mypy/pull/15137)) +* Add more detail to 'signature incompatible with supertype' messages for non-callables (Ilya Priven, PR [15263](https://github.com/python/mypy/pull/15263)) + +#### Documentation Updates + +* Add \--local-partial-types note to dmypy docs (Alan Du, PR [15259](https://github.com/python/mypy/pull/15259)) +* Update getting started docs for mypyc for Windows (Valentin Stanciu, PR [15233](https://github.com/python/mypy/pull/15233)) +* Clarify usage of callables regarding type object in docs (Viicos, PR [15079](https://github.com/python/mypy/pull/15079)) +* Clarify difference between disallow\_untyped\_defs and disallow\_incomplete\_defs (Ilya Priven, PR [15247](https://github.com/python/mypy/pull/15247)) +* Use attrs and @attrs.define in documentation and tests (Ilya Priven, PR [15152](https://github.com/python/mypy/pull/15152)) + +#### Mypyc Improvements + +* Fix unexpected TypeError for certain variables with an inferred optional type (Richard Si, PR [15206](https://github.com/python/mypy/pull/15206)) +* Inline math literals (Logan Hunt, PR [15324](https://github.com/python/mypy/pull/15324)) +* Support unpacking mappings in dict display (Richard Si, PR [15203](https://github.com/python/mypy/pull/15203)) + +#### Changes to Stubgen + +* Do not remove Generic from base classes (Ali Hamdan, PR [15316](https://github.com/python/mypy/pull/15316)) +* Support yield from statements (Ali Hamdan, PR [15271](https://github.com/python/mypy/pull/15271)) +* Fix missing total from TypedDict class (Ali Hamdan, PR [15208](https://github.com/python/mypy/pull/15208)) +* Fix call-based namedtuple omitted from class bases (Ali Hamdan, PR [14680](https://github.com/python/mypy/pull/14680)) +* Support TypedDict alternative syntax (Ali Hamdan, PR [14682](https://github.com/python/mypy/pull/14682)) +* Make stubgen respect MYPY\_CACHE\_DIR (Henrik Bäärnhielm, PR [14722](https://github.com/python/mypy/pull/14722)) +* Fixes and simplifications (Ali Hamdan, PR [15232](https://github.com/python/mypy/pull/15232)) + +#### Other Notable Fixes and Improvements + +* Fix nested async functions when using TypeVar value restriction (Jukka Lehtosalo, PR [14705](https://github.com/python/mypy/pull/14705)) +* Always allow returning Any from lambda (Ivan Levkivskyi, PR [15413](https://github.com/python/mypy/pull/15413)) +* Add foundation for TypeVar defaults (PEP 696) (Marc Mueller, PR [14872](https://github.com/python/mypy/pull/14872)) +* Update semantic analyzer for TypeVar defaults (PEP 696) (Marc Mueller, PR [14873](https://github.com/python/mypy/pull/14873)) +* Make dict expression inference more consistent (Ivan Levkivskyi, PR [15174](https://github.com/python/mypy/pull/15174)) +* Do not block on duplicate base classes (Nikita Sobolev, PR [15367](https://github.com/python/mypy/pull/15367)) +* Generate an error when both staticmethod and classmethod decorators are used (Juhi Chandalia, PR [15118](https://github.com/python/mypy/pull/15118)) +* Fix assert\_type behaviour with literals (Carl Karsten, PR [15123](https://github.com/python/mypy/pull/15123)) +* Fix match subject ignoring redefinitions (Vincent Vanlaer, PR [15306](https://github.com/python/mypy/pull/15306)) +* Support `__all__`.remove (Shantanu, PR [15279](https://github.com/python/mypy/pull/15279)) + +#### Typeshed Updates + +Typeshed is now modular and distributed as separate PyPI packages for everything except the standard library stubs. Please see [git log](https://github.com/python/typeshed/commits/main?after=877e06ad1cfd9fd9967c0b0340a86d0c23ea89ce+0&branch=main&path=stdlib) for full list of typeshed changes. + +#### Acknowledgements + +Thanks to all mypy contributors who contributed to this release: + +* Adrian Garcia Badaracco +* AJ Rasmussen +* Alan Du +* Alex Waygood +* Ali Hamdan +* Carl Karsten +* dosisod +* Ethan Smith +* Gregory Santosa +* Heather White +* Henrik Bäärnhielm +* Ilya Konstantinov +* Ilya Priven +* Ivan Levkivskyi +* Juhi Chandalia +* Jukka Lehtosalo +* Logan Hunt +* madt2709 +* Marc Mueller +* Max Murin +* Nikita Sobolev +* Omar Silva +* Özgür +* Richard Si +* Rohit Sanjay +* Shantanu +* teresa0605 +* Thomas M Kehrenberg +* Tin Tvrtković +* Tushar Sadhwani +* Valentin Stanciu +* Viicos +* Vincent Vanlaer +* Wesley Collin Wright +* William Santosa +* yaegassy + +I’d also like to thank my employer, Dropbox, for supporting mypy development. + +Posted by Jared Hance + + +## Mypy 1.3 + +[Wednesday, 10 May 2023](https://mypy-lang.blogspot.com/2023/05/mypy-13-released.html) + + We’ve just uploaded mypy 1.3 to the Python Package Index ([PyPI](https://pypi.org/project/mypy/)). Mypy is a static type checker for Python. This release includes new features, performance improvements and bug fixes. You can install it as follows: + + python3 -m pip install -U mypy + +You can read the full documentation for this release on [Read the Docs](http://mypy.readthedocs.io). + +#### Performance Improvements + +* Improve performance of union subtyping (Shantanu, PR [15104](https://github.com/python/mypy/pull/15104)) +* Add negative subtype caches (Ivan Levkivskyi, PR [14884](https://github.com/python/mypy/pull/14884)) + +#### Stub Tooling Improvements + +* Stubtest: Check that the stub is abstract if the runtime is, even when the stub is an overloaded method (Alex Waygood, PR [14955](https://github.com/python/mypy/pull/14955)) +* Stubtest: Verify stub methods or properties are decorated with @final if they are decorated with @final at runtime (Alex Waygood, PR [14951](https://github.com/python/mypy/pull/14951)) +* Stubtest: Fix stubtest false positives with TypedDicts at runtime (Alex Waygood, PR [14984](https://github.com/python/mypy/pull/14984)) +* Stubgen: Support @functools.cached\_property (Nikita Sobolev, PR [14981](https://github.com/python/mypy/pull/14981)) +* Improvements to stubgenc (Chad Dombrova, PR [14564](https://github.com/python/mypy/pull/14564)) + +#### Improvements to attrs + +* Add support for converters with TypeVars on generic attrs classes (Chad Dombrova, PR [14908](https://github.com/python/mypy/pull/14908)) +* Fix attrs.evolve on bound TypeVar (Ilya Konstantinov, PR [15022](https://github.com/python/mypy/pull/15022)) + +#### Documentation Updates + +* Improve async documentation (Shantanu, PR [14973](https://github.com/python/mypy/pull/14973)) +* Improvements to cheat sheet (Shantanu, PR [14972](https://github.com/python/mypy/pull/14972)) +* Add documentation for bytes formatting error code (Shantanu, PR [14971](https://github.com/python/mypy/pull/14971)) +* Convert insecure links to use HTTPS (Marti Raudsepp, PR [14974](https://github.com/python/mypy/pull/14974)) +* Also mention overloads in async iterator documentation (Shantanu, PR [14998](https://github.com/python/mypy/pull/14998)) +* stubtest: Improve allowlist documentation (Shantanu, PR [15008](https://github.com/python/mypy/pull/15008)) +* Clarify "Using types... but not at runtime" (Jon Shea, PR [15029](https://github.com/python/mypy/pull/15029)) +* Fix alignment of cheat sheet example (Ondřej Cvacho, PR [15039](https://github.com/python/mypy/pull/15039)) +* Fix error for callback protocol matching against callable type object (Shantanu, PR [15042](https://github.com/python/mypy/pull/15042)) + +#### Error Reporting Improvements + +* Improve bytes formatting error (Shantanu, PR [14959](https://github.com/python/mypy/pull/14959)) + +#### Mypyc Improvements + +* Fix unions of bools and ints (Tomer Chachamu, PR [15066](https://github.com/python/mypy/pull/15066)) + +#### Other Fixes and Improvements + +* Fix narrowing union types that include Self with isinstance (Christoph Tyralla, PR [14923](https://github.com/python/mypy/pull/14923)) +* Allow objects matching SupportsKeysAndGetItem to be unpacked (Bryan Forbes, PR [14990](https://github.com/python/mypy/pull/14990)) +* Check type guard validity for staticmethods (EXPLOSION, PR [14953](https://github.com/python/mypy/pull/14953)) +* Fix sys.platform when cross-compiling with emscripten (Ethan Smith, PR [14888](https://github.com/python/mypy/pull/14888)) + +#### Typeshed Updates + +Typeshed is now modular and distributed as separate PyPI packages for everything except the standard library stubs. Please see [git log](https://github.com/python/typeshed/commits/main?after=b0ed50e9392a23e52445b630a808153e0e256976+0&branch=main&path=stdlib) for full list of typeshed changes. + +#### Acknowledgements + +Thanks to all mypy contributors who contributed to this release: + +* Alex Waygood +* Amin Alaee +* Bryan Forbes +* Chad Dombrova +* Charlie Denton +* Christoph Tyralla +* dosisod +* Ethan Smith +* EXPLOSION +* Ilya Konstantinov +* Ivan Levkivskyi +* Jon Shea +* Jukka Lehtosalo +* KotlinIsland +* Marti Raudsepp +* Nikita Sobolev +* Ondřej Cvacho +* Shantanu +* sobolevn +* Tomer Chachamu +* Yaroslav Halchenko + +Posted by Wesley Collin Wright. + + +## Mypy 1.2 + +[Thursday, 6 April 2023](https://mypy-lang.blogspot.com/2023/04/mypy-12-released.html) + +We’ve just uploaded mypy 1.2 to the Python Package Index ([PyPI](https://pypi.org/project/mypy/)). Mypy is a static type checker for Python. This release includes new features, performance improvements and bug fixes. You can install it as follows: + + python3 -m pip install -U mypy + +You can read the full documentation for this release on [Read the Docs](http://mypy.readthedocs.io). + +#### Improvements to Dataclass Transforms + +* Support implicit default for "init" parameter in field specifiers (Wesley Collin Wright and Jukka Lehtosalo, PR [15010](https://github.com/python/mypy/pull/15010)) +* Support descriptors in dataclass transform (Jukka Lehtosalo, PR [15006](https://github.com/python/mypy/pull/15006)) +* Fix frozen\_default in incremental mode (Wesley Collin Wright) +* Fix frozen behavior for base classes with direct metaclasses (Wesley Collin Wright, PR [14878](https://github.com/python/mypy/pull/14878)) + +#### Mypyc: Native Floats + +Mypyc now uses a native, unboxed representation for values of type float. Previously these were heap-allocated Python objects. Native floats are faster and use less memory. Code that uses floating-point operations heavily can be several times faster when using native floats. + +Various float operations and math functions also now have optimized implementations. Refer to the [documentation](https://mypyc.readthedocs.io/en/latest/float_operations.html) for a full list. + +This can change the behavior of existing code that uses subclasses of float. When assigning an instance of a subclass of float to a variable with the float type, it gets implicitly converted to a float instance when compiled: + +```python +from lib import MyFloat # MyFloat ia a subclass of "float" + +def example() -> None: + x = MyFloat(1.5) + y: float = x # Implicit conversion from MyFloat to float + print(type(y)) # float, not MyFloat +``` + +Previously, implicit conversions were applied to int subclasses but not float subclasses. + +Also, int values can no longer be assigned to a variable with type float in compiled code, since these types now have incompatible representations. An explicit conversion is required: + +```python +def example(n: int) -> None: + a: float = 1 # Error: cannot assign "int" to "float" + b: float = 1.0 # OK + c: float = n # Error + d: float = float(n) # OK +``` + +This restriction only applies to assignments, since they could otherwise narrow down the type of a variable from float to int. int values can still be implicitly converted to float when passed as arguments to functions that expect float values. + +Note that mypyc still doesn’t support arrays of unboxed float values. Using list\[float\] involves heap-allocated float objects, since list can only store boxed values. Support for efficient floating point arrays is one of the next major planned mypyc features. + +Related changes: + +* Use a native unboxed representation for floats (Jukka Lehtosalo, PR [14880](https://github.com/python/mypy/pull/14880)) +* Document native floats and integers (Jukka Lehtosalo, PR [14927](https://github.com/python/mypy/pull/14927)) +* Fixes to float to int conversion (Jukka Lehtosalo, PR [14936](https://github.com/python/mypy/pull/14936)) + +#### Mypyc: Native Integers + +Mypyc now supports signed 32-bit and 64-bit integer types in addition to the arbitrary-precision int type. You can use the types mypy\_extensions.i32 and mypy\_extensions.i64 to speed up code that uses integer operations heavily. + +Simple example: +```python +from mypy_extensions import i64 + +def inc(x: i64) -> i64: + return x + 1 +``` + +Refer to the [documentation](https://mypyc.readthedocs.io/en/latest/using_type_annotations.html#native-integer-types) for more information. This feature was contributed by Jukka Lehtosalo. + +#### Other Mypyc Fixes and Improvements + +* Support iterating over a TypedDict (Richard Si, PR [14747](https://github.com/python/mypy/pull/14747)) +* Faster coercions between different tuple types (Jukka Lehtosalo, PR [14899](https://github.com/python/mypy/pull/14899)) +* Faster calls via type aliases (Jukka Lehtosalo, PR [14784](https://github.com/python/mypy/pull/14784)) +* Faster classmethod calls via cls (Jukka Lehtosalo, PR [14789](https://github.com/python/mypy/pull/14789)) + +#### Fixes to Crashes + +* Fix crash on class-level import in protocol definition (Ivan Levkivskyi, PR [14926](https://github.com/python/mypy/pull/14926)) +* Fix crash on single item union of alias (Ivan Levkivskyi, PR [14876](https://github.com/python/mypy/pull/14876)) +* Fix crash on ParamSpec in incremental mode (Ivan Levkivskyi, PR [14885](https://github.com/python/mypy/pull/14885)) + +#### Documentation Updates + +* Update adopting \--strict documentation for 1.0 (Shantanu, PR [14865](https://github.com/python/mypy/pull/14865)) +* Some minor documentation tweaks (Jukka Lehtosalo, PR [14847](https://github.com/python/mypy/pull/14847)) +* Improve documentation of top level mypy: disable-error-code comment (Nikita Sobolev, PR [14810](https://github.com/python/mypy/pull/14810)) + +#### Error Reporting Improvements + +* Add error code to `typing_extensions` suggestion (Shantanu, PR [14881](https://github.com/python/mypy/pull/14881)) +* Add a separate error code for top-level await (Nikita Sobolev, PR [14801](https://github.com/python/mypy/pull/14801)) +* Don’t suggest two obsolete stub packages (Jelle Zijlstra, PR [14842](https://github.com/python/mypy/pull/14842)) +* Add suggestions for pandas-stubs and lxml-stubs (Shantanu, PR [14737](https://github.com/python/mypy/pull/14737)) + +#### Other Fixes and Improvements + +* Multiple inheritance considers callable objects as subtypes of functions (Christoph Tyralla, PR [14855](https://github.com/python/mypy/pull/14855)) +* stubtest: Respect @final runtime decorator and enforce it in stubs (Nikita Sobolev, PR [14922](https://github.com/python/mypy/pull/14922)) +* Fix false positives related to type\[\] (sterliakov, PR [14756](https://github.com/python/mypy/pull/14756)) +* Fix duplication of ParamSpec prefixes and properly substitute ParamSpecs (EXPLOSION, PR [14677](https://github.com/python/mypy/pull/14677)) +* Fix line number if `__iter__` is incorrectly reported as missing (Jukka Lehtosalo, PR [14893](https://github.com/python/mypy/pull/14893)) +* Fix incompatible overrides of overloaded generics with self types (Shantanu, PR [14882](https://github.com/python/mypy/pull/14882)) +* Allow SupportsIndex in slice expressions (Shantanu, PR [14738](https://github.com/python/mypy/pull/14738)) +* Support if statements in bodies of dataclasses and classes that use dataclass\_transform (Jacek Chałupka, PR [14854](https://github.com/python/mypy/pull/14854)) +* Allow iterable class objects to be unpacked (including enums) (Alex Waygood, PR [14827](https://github.com/python/mypy/pull/14827)) +* Fix narrowing for walrus expressions used in match statements (Shantanu, PR [14844](https://github.com/python/mypy/pull/14844)) +* Add signature for attr.evolve (Ilya Konstantinov, PR [14526](https://github.com/python/mypy/pull/14526)) +* Fix Any inference when unpacking iterators that don't directly inherit from typing.Iterator (Alex Waygood, PR [14821](https://github.com/python/mypy/pull/14821)) +* Fix unpack with overloaded `__iter__` method (Nikita Sobolev, PR [14817](https://github.com/python/mypy/pull/14817)) +* Reduce size of JSON data in mypy cache (dosisod, PR [14808](https://github.com/python/mypy/pull/14808)) +* Improve “used before definition” checks when a local definition has the same name as a global definition (Stas Ilinskiy, PR [14517](https://github.com/python/mypy/pull/14517)) +* Honor NoReturn as \_\_setitem\_\_ return type to mark unreachable code (sterliakov, PR [12572](https://github.com/python/mypy/pull/12572)) + +#### Typeshed Updates + +Typeshed is now modular and distributed as separate PyPI packages for everything except the standard library stubs. Please see [git log](https://github.com/python/typeshed/commits/main?after=a544b75320e97424d2d927605316383c755cdac0+0&branch=main&path=stdlib) for full list of typeshed changes. + +#### Acknowledgements + +Thanks to all mypy contributors who contributed to this release: + +* Alex Waygood +* Avasam +* Christoph Tyralla +* dosisod +* EXPLOSION +* Ilya Konstantinov +* Ivan Levkivskyi +* Jacek Chałupka +* Jelle Zijlstra +* Jukka Lehtosalo +* Marc Mueller +* Max Murin +* Nikita Sobolev +* Richard Si +* Shantanu +* Stas Ilinskiy +* sterliakov +* Wesley Collin Wright + +Posted by Jukka Lehtosalo + + +## Mypy 1.1.1 + +[Monday, 6 March 2023](https://mypy-lang.blogspot.com/2023/03/mypy-111-released.html) + + We’ve just uploaded mypy 1.1.1 to the Python Package Index ([PyPI](https://pypi.org/project/mypy/)). Mypy is a static type checker for Python. This release includes new features, performance improvements and bug fixes. You can install it as follows: + + python3 -m pip install -U mypy + +You can read the full documentation for this release on [Read the Docs](http://mypy.readthedocs.io). + +#### Support for `dataclass_transform`` + +This release adds full support for the dataclass\_transform decorator defined in [PEP 681](https://peps.python.org/pep-0681/#decorator-function-example). This allows decorators, base classes, and metaclasses that generate a \_\_init\_\_ method or other methods based on the properties of that class (similar to dataclasses) to have those methods recognized by mypy. + +This was contributed by Wesley Collin Wright. + +#### Dedicated Error Code for Method Assignments + +Mypy can’t safely check all assignments to methods (a form of monkey patching), so mypy generates an error by default. To make it easier to ignore this error, mypy now uses the new error code method-assign for this. By disabling this error code in a file or globally, mypy will no longer complain about assignments to methods if the signatures are compatible. + +Mypy also supports the old error code assignment for these assignments to prevent a backward compatibility break. More generally, we can use this mechanism in the future if we wish to split or rename another existing error code without causing backward compatibility issues. + +This was contributed by Ivan Levkivskyi (PR [14570](https://github.com/python/mypy/pull/14570)). + +#### Fixes to Crashes + +* Fix a crash on walrus in comprehension at class scope (Ivan Levkivskyi, PR [14556](https://github.com/python/mypy/pull/14556)) +* Fix crash related to value-constrained TypeVar (Shantanu, PR [14642](https://github.com/python/mypy/pull/14642)) + +#### Fixes to Cache Corruption + +* Fix generic TypedDict/NamedTuple caching (Ivan Levkivskyi, PR [14675](https://github.com/python/mypy/pull/14675)) + +#### Mypyc Fixes and Improvements + +* Raise "non-trait base must be first..." error less frequently (Richard Si, PR [14468](https://github.com/python/mypy/pull/14468)) +* Generate faster code for bool comparisons and arithmetic (Jukka Lehtosalo, PR [14489](https://github.com/python/mypy/pull/14489)) +* Optimize \_\_(a)enter\_\_/\_\_(a)exit\_\_ for native classes (Jared Hance, PR [14530](https://github.com/python/mypy/pull/14530)) +* Detect if attribute definition conflicts with base class/trait (Jukka Lehtosalo, PR [14535](https://github.com/python/mypy/pull/14535)) +* Support \_\_(r)divmod\_\_ dunders (Richard Si, PR [14613](https://github.com/python/mypy/pull/14613)) +* Support \_\_pow\_\_, \_\_rpow\_\_, and \_\_ipow\_\_ dunders (Richard Si, PR [14616](https://github.com/python/mypy/pull/14616)) +* Fix crash on star unpacking to underscore (Ivan Levkivskyi, PR [14624](https://github.com/python/mypy/pull/14624)) +* Fix iterating over a union of dicts (Richard Si, PR [14713](https://github.com/python/mypy/pull/14713)) + +#### Fixes to Detecting Undefined Names (used-before-def) + +* Correctly handle walrus operator (Stas Ilinskiy, PR [14646](https://github.com/python/mypy/pull/14646)) +* Handle walrus declaration in match subject correctly (Stas Ilinskiy, PR [14665](https://github.com/python/mypy/pull/14665)) + +#### Stubgen Improvements + +Stubgen is a tool for automatically generating draft stubs for libraries. + +* Allow aliases below the top level (Chad Dombrova, PR [14388](https://github.com/python/mypy/pull/14388)) +* Fix crash with PEP 604 union in type variable bound (Shantanu, PR [14557](https://github.com/python/mypy/pull/14557)) +* Preserve PEP 604 unions in generated .pyi files (hamdanal, PR [14601](https://github.com/python/mypy/pull/14601)) + +#### Stubtest Improvements + +Stubtest is a tool for testing that stubs conform to the implementations. + +* Update message format so that it’s easier to go to error location (Avasam, PR [14437](https://github.com/python/mypy/pull/14437)) +* Handle name-mangling edge cases better (Alex Waygood, PR [14596](https://github.com/python/mypy/pull/14596)) + +#### Changes to Error Reporting and Messages + +* Add new TypedDict error code typeddict-unknown-key (JoaquimEsteves, PR [14225](https://github.com/python/mypy/pull/14225)) +* Give arguments a more reasonable location in error messages (Max Murin, PR [14562](https://github.com/python/mypy/pull/14562)) +* In error messages, quote just the module's name (Ilya Konstantinov, PR [14567](https://github.com/python/mypy/pull/14567)) +* Improve misleading message about Enum() (Rodrigo Silva, PR [14590](https://github.com/python/mypy/pull/14590)) +* Suggest importing from `typing_extensions` if definition is not in typing (Shantanu, PR [14591](https://github.com/python/mypy/pull/14591)) +* Consistently use type-abstract error code (Ivan Levkivskyi, PR [14619](https://github.com/python/mypy/pull/14619)) +* Consistently use literal-required error code for TypedDicts (Ivan Levkivskyi, PR [14621](https://github.com/python/mypy/pull/14621)) +* Adjust inconsistent dataclasses plugin error messages (Wesley Collin Wright, PR [14637](https://github.com/python/mypy/pull/14637)) +* Consolidate literal bool argument error messages (Wesley Collin Wright, PR [14693](https://github.com/python/mypy/pull/14693)) + +#### Other Fixes and Improvements + +* Check that type guards accept a positional argument (EXPLOSION, PR [14238](https://github.com/python/mypy/pull/14238)) +* Fix bug with in operator used with a union of Container and Iterable (Max Murin, PR [14384](https://github.com/python/mypy/pull/14384)) +* Support protocol inference for type\[T\] via metaclass (Ivan Levkivskyi, PR [14554](https://github.com/python/mypy/pull/14554)) +* Allow overlapping comparisons between bytes-like types (Shantanu, PR [14658](https://github.com/python/mypy/pull/14658)) +* Fix mypy daemon documentation link in README (Ivan Levkivskyi, PR [14644](https://github.com/python/mypy/pull/14644)) + +#### Typeshed Updates + +Typeshed is now modular and distributed as separate PyPI packages for everything except the standard library stubs. Please see [git log](https://github.com/python/typeshed/commits/main?after=5ebf892d0710a6e87925b8d138dfa597e7bb11cc+0&branch=main&path=stdlib) for full list of typeshed changes. + +#### Acknowledgements + +Thanks to all mypy contributors who contributed to this release: + +* Alex Waygood +* Avasam +* Chad Dombrova +* dosisod +* EXPLOSION +* hamdanal +* Ilya Konstantinov +* Ivan Levkivskyi +* Jared Hance +* JoaquimEsteves +* Jukka Lehtosalo +* Marc Mueller +* Max Murin +* Michael Lee +* Michael R. Crusoe +* Richard Si +* Rodrigo Silva +* Shantanu +* Stas Ilinskiy +* Wesley Collin Wright +* Yilei "Dolee" Yang +* Yurii Karabas + +We’d also like to thank our employer, Dropbox, for funding the mypy core team. + +Posted by Max Murin + + +## Mypy 1.0 + +[Monday, 6 February 2023](https://mypy-lang.blogspot.com/2023/02/mypy-10-released.html) + +We’ve just uploaded mypy 1.0 to the Python Package Index ([PyPI](https://pypi.org/project/mypy/)). Mypy is a static type checker for Python. This release includes new features, performance improvements and bug fixes. You can install it as follows: + + python3 -m pip install -U mypy + +You can read the full documentation for this release on [Read the Docs](http://mypy.readthedocs.io). + +#### New Release Versioning Scheme + +Now that mypy reached 1.0, we’ll switch to a new versioning scheme. Mypy version numbers will be of form x.y.z. + +Rules: + +* The major release number (x) is incremented if a feature release includes a significant backward incompatible change that affects a significant fraction of users. +* The minor release number (y) is incremented on each feature release. Minor releases include updated stdlib stubs from typeshed. +* The point release number (z) is incremented when there are fixes only. + +Mypy doesn't use SemVer, since most minor releases have at least minor backward incompatible changes in typeshed, at the very least. Also, many type checking features find new legitimate issues in code. These are not considered backward incompatible changes, unless the number of new errors is very high. + +Any significant backward incompatible change must be announced in the blog post for the previous feature release, before making the change. The previous release must also provide a flag to explicitly enable or disable the new behavior (whenever practical), so that users will be able to prepare for the changes and report issues. We should keep the feature flag for at least a few releases after we've switched the default. + +See [”Release Process” in the mypy wiki](https://github.com/python/mypy/wiki/Release-Process) for more details and for the most up-to-date version of the versioning scheme. + +#### Performance Improvements + +Mypy 1.0 is up to 40% faster than mypy 0.991 when type checking the Dropbox internal codebase. We also set up a daily job to measure the performance of the most recent development version of mypy to make it easier to track changes in performance. + +Many optimizations contributed to this improvement: + +* Improve performance for errors on class with many attributes (Shantanu, PR [14379](https://github.com/python/mypy/pull/14379)) +* Speed up make\_simplified\_union (Jukka Lehtosalo, PR [14370](https://github.com/python/mypy/pull/14370)) +* Micro-optimize get\_proper\_type(s) (Jukka Lehtosalo, PR [14369](https://github.com/python/mypy/pull/14369)) +* Micro-optimize flatten\_nested\_unions (Jukka Lehtosalo, PR [14368](https://github.com/python/mypy/pull/14368)) +* Some semantic analyzer micro-optimizations (Jukka Lehtosalo, PR [14367](https://github.com/python/mypy/pull/14367)) +* A few miscellaneous micro-optimizations (Jukka Lehtosalo, PR [14366](https://github.com/python/mypy/pull/14366)) +* Optimization: Avoid a few uses of contextmanagers in semantic analyzer (Jukka Lehtosalo, PR [14360](https://github.com/python/mypy/pull/14360)) +* Optimization: Enable always defined attributes in Type subclasses (Jukka Lehtosalo, PR [14356](https://github.com/python/mypy/pull/14356)) +* Optimization: Remove expensive context manager in type analyzer (Jukka Lehtosalo, PR [14357](https://github.com/python/mypy/pull/14357)) +* subtypes: fast path for Union/Union subtype check (Hugues, PR [14277](https://github.com/python/mypy/pull/14277)) +* Micro-optimization: avoid Bogus\[int\] types that cause needless boxing (Jukka Lehtosalo, PR [14354](https://github.com/python/mypy/pull/14354)) +* Avoid slow error message logic if errors not shown to user (Jukka Lehtosalo, PR [14336](https://github.com/python/mypy/pull/14336)) +* Speed up the implementation of hasattr() checks (Jukka Lehtosalo, PR [14333](https://github.com/python/mypy/pull/14333)) +* Avoid the use of a context manager in hot code path (Jukka Lehtosalo, PR [14331](https://github.com/python/mypy/pull/14331)) +* Change various type queries into faster bool type queries (Jukka Lehtosalo, PR [14330](https://github.com/python/mypy/pull/14330)) +* Speed up recursive type check (Jukka Lehtosalo, PR [14326](https://github.com/python/mypy/pull/14326)) +* Optimize subtype checking by avoiding a nested function (Jukka Lehtosalo, PR [14325](https://github.com/python/mypy/pull/14325)) +* Optimize type parameter checks in subtype checking (Jukka Lehtosalo, PR [14324](https://github.com/python/mypy/pull/14324)) +* Speed up freshening type variables (Jukka Lehtosalo, PR [14323](https://github.com/python/mypy/pull/14323)) +* Optimize implementation of TypedDict types for \*\*kwds (Jukka Lehtosalo, PR [14316](https://github.com/python/mypy/pull/14316)) + +#### Warn About Variables Used Before Definition + +Mypy will now generate an error if you use a variable before it’s defined. This feature is enabled by default. By default mypy reports an error when it infers that a variable is always undefined. +```python +y = x # E: Name "x" is used before definition [used-before-def] +x = 0 +``` +This feature was contributed by Stas Ilinskiy. + +#### Detect Possibly Undefined Variables (Experimental) + +A new experimental possibly-undefined error code is now available that will detect variables that may be undefined: +```python + if b: + x = 0 + print(x) # Error: Name "x" may be undefined [possibly-undefined] +``` +The error code is disabled be default, since it can generate false positives. + +This feature was contributed by Stas Ilinskiy. + +#### Support the “Self” Type + +There is now a simpler syntax for declaring [generic self types](https://mypy.readthedocs.io/en/stable/generics.html#generic-methods-and-generic-self) introduced in [PEP 673](https://peps.python.org/pep-0673/): the Self type. You no longer have to define a type variable to use “self types”, and you can use them with attributes. Example from mypy documentation: +```python +from typing import Self + +class Friend: + other: Self | None = None + + @classmethod + def make_pair(cls) -> tuple[Self, Self]: + a, b = cls(), cls() + a.other = b + b.other = a + return a, b + +class SuperFriend(Friend): + pass + +# a and b have the inferred type "SuperFriend", not "Friend" +a, b = SuperFriend.make_pair() +``` +The feature was introduced in Python 3.11. In earlier Python versions a backport of Self is available in `typing_extensions`. + +This was contributed by Ivan Levkivskyi (PR [14041](https://github.com/python/mypy/pull/14041)). + +#### Support ParamSpec in Type Aliases + +ParamSpec and Concatenate can now be used in type aliases. Example: +```python +from typing import ParamSpec, Callable + +P = ParamSpec("P") +A = Callable[P, None] + +def f(c: A[int, str]) -> None: + c(1, "x") +``` +This feature was contributed by Ivan Levkivskyi (PR [14159](https://github.com/python/mypy/pull/14159)). + +#### ParamSpec and Generic Self Types No Longer Experimental + +Support for ParamSpec ([PEP 612](https://www.python.org/dev/peps/pep-0612/)) and generic self types are no longer considered experimental. + +#### Miscellaneous New Features + +* Minimal, partial implementation of dataclass\_transform ([PEP 681](https://peps.python.org/pep-0681/)) (Wesley Collin Wright, PR [14523](https://github.com/python/mypy/pull/14523)) +* Add basic support for `typing_extensions`.TypeVar (Marc Mueller, PR [14313](https://github.com/python/mypy/pull/14313)) +* Add \--debug-serialize option (Marc Mueller, PR [14155](https://github.com/python/mypy/pull/14155)) +* Constant fold initializers of final variables (Jukka Lehtosalo, PR [14283](https://github.com/python/mypy/pull/14283)) +* Enable Final instance attributes for attrs (Tin Tvrtković, PR [14232](https://github.com/python/mypy/pull/14232)) +* Allow function arguments as base classes (Ivan Levkivskyi, PR [14135](https://github.com/python/mypy/pull/14135)) +* Allow super() with mixin protocols (Ivan Levkivskyi, PR [14082](https://github.com/python/mypy/pull/14082)) +* Add type inference for dict.keys membership (Matthew Hughes, PR [13372](https://github.com/python/mypy/pull/13372)) +* Generate error for class attribute access if attribute is defined with `__slots__` (Harrison McCarty, PR [14125](https://github.com/python/mypy/pull/14125)) +* Support additional attributes in callback protocols (Ivan Levkivskyi, PR [14084](https://github.com/python/mypy/pull/14084)) + +#### Fixes to Crashes + +* Fix crash on prefixed ParamSpec with forward reference (Ivan Levkivskyi, PR [14569](https://github.com/python/mypy/pull/14569)) +* Fix internal crash when resolving the same partial type twice (Shantanu, PR [14552](https://github.com/python/mypy/pull/14552)) +* Fix crash in daemon mode on new import cycle (Ivan Levkivskyi, PR [14508](https://github.com/python/mypy/pull/14508)) +* Fix crash in mypy daemon (Ivan Levkivskyi, PR [14497](https://github.com/python/mypy/pull/14497)) +* Fix crash on Any metaclass in incremental mode (Ivan Levkivskyi, PR [14495](https://github.com/python/mypy/pull/14495)) +* Fix crash in await inside comprehension outside function (Ivan Levkivskyi, PR [14486](https://github.com/python/mypy/pull/14486)) +* Fix crash in Self type on forward reference in upper bound (Ivan Levkivskyi, PR [14206](https://github.com/python/mypy/pull/14206)) +* Fix a crash when incorrect super() is used outside a method (Ivan Levkivskyi, PR [14208](https://github.com/python/mypy/pull/14208)) +* Fix crash on overriding with frozen attrs (Ivan Levkivskyi, PR [14186](https://github.com/python/mypy/pull/14186)) +* Fix incremental mode crash on generic function appearing in nested position (Ivan Levkivskyi, PR [14148](https://github.com/python/mypy/pull/14148)) +* Fix daemon crash on malformed NamedTuple (Ivan Levkivskyi, PR [14119](https://github.com/python/mypy/pull/14119)) +* Fix crash during ParamSpec inference (Ivan Levkivskyi, PR [14118](https://github.com/python/mypy/pull/14118)) +* Fix crash on nested generic callable (Ivan Levkivskyi, PR [14093](https://github.com/python/mypy/pull/14093)) +* Fix crashes with unpacking SyntaxError (Shantanu, PR [11499](https://github.com/python/mypy/pull/11499)) +* Fix crash on partial type inference within a lambda (Ivan Levkivskyi, PR [14087](https://github.com/python/mypy/pull/14087)) +* Fix crash with enums (Michael Lee, PR [14021](https://github.com/python/mypy/pull/14021)) +* Fix crash with malformed TypedDicts and disllow-any-expr (Michael Lee, PR [13963](https://github.com/python/mypy/pull/13963)) + +#### Error Reporting Improvements + +* More helpful error for missing self (Shantanu, PR [14386](https://github.com/python/mypy/pull/14386)) +* Add error-code truthy-iterable (Marc Mueller, PR [13762](https://github.com/python/mypy/pull/13762)) +* Fix pluralization in error messages (KotlinIsland, PR [14411](https://github.com/python/mypy/pull/14411)) + +#### Mypyc: Support Match Statement + +Mypyc can now compile Python 3.10 match statements. + +This was contributed by dosisod (PR [13953](https://github.com/python/mypy/pull/13953)). + +#### Other Mypyc Fixes and Improvements + +* Optimize int(x)/float(x)/complex(x) on instances of native classes (Richard Si, PR [14450](https://github.com/python/mypy/pull/14450)) +* Always emit warnings (Richard Si, PR [14451](https://github.com/python/mypy/pull/14451)) +* Faster bool and integer conversions (Jukka Lehtosalo, PR [14422](https://github.com/python/mypy/pull/14422)) +* Support attributes that override properties (Jukka Lehtosalo, PR [14377](https://github.com/python/mypy/pull/14377)) +* Precompute set literals for "in" operations and iteration (Richard Si, PR [14409](https://github.com/python/mypy/pull/14409)) +* Don't load targets with forward references while setting up non-extension class `__all__` (Richard Si, PR [14401](https://github.com/python/mypy/pull/14401)) +* Compile away NewType type calls (Richard Si, PR [14398](https://github.com/python/mypy/pull/14398)) +* Improve error message for multiple inheritance (Joshua Bronson, PR [14344](https://github.com/python/mypy/pull/14344)) +* Simplify union types (Jukka Lehtosalo, PR [14363](https://github.com/python/mypy/pull/14363)) +* Fixes to union simplification (Jukka Lehtosalo, PR [14364](https://github.com/python/mypy/pull/14364)) +* Fix for typeshed changes to Collection (Shantanu, PR [13994](https://github.com/python/mypy/pull/13994)) +* Allow use of enum.Enum (Shantanu, PR [13995](https://github.com/python/mypy/pull/13995)) +* Fix compiling on Arch Linux (dosisod, PR [13978](https://github.com/python/mypy/pull/13978)) + +#### Documentation Improvements + +* Various documentation and error message tweaks (Jukka Lehtosalo, PR [14574](https://github.com/python/mypy/pull/14574)) +* Improve Generics documentation (Shantanu, PR [14587](https://github.com/python/mypy/pull/14587)) +* Improve protocols documentation (Shantanu, PR [14577](https://github.com/python/mypy/pull/14577)) +* Improve dynamic typing documentation (Shantanu, PR [14576](https://github.com/python/mypy/pull/14576)) +* Improve the Common Issues page (Shantanu, PR [14581](https://github.com/python/mypy/pull/14581)) +* Add a top-level TypedDict page (Shantanu, PR [14584](https://github.com/python/mypy/pull/14584)) +* More improvements to getting started documentation (Shantanu, PR [14572](https://github.com/python/mypy/pull/14572)) +* Move truthy-function documentation from “optional checks” to “enabled by default” (Anders Kaseorg, PR [14380](https://github.com/python/mypy/pull/14380)) +* Avoid use of implicit optional in decorator factory documentation (Tom Schraitle, PR [14156](https://github.com/python/mypy/pull/14156)) +* Clarify documentation surrounding install-types (Shantanu, PR [14003](https://github.com/python/mypy/pull/14003)) +* Improve searchability for module level type ignore errors (Shantanu, PR [14342](https://github.com/python/mypy/pull/14342)) +* Advertise mypy daemon in README (Ivan Levkivskyi, PR [14248](https://github.com/python/mypy/pull/14248)) +* Add link to error codes in README (Ivan Levkivskyi, PR [14249](https://github.com/python/mypy/pull/14249)) +* Document that report generation disables cache (Ilya Konstantinov, PR [14402](https://github.com/python/mypy/pull/14402)) +* Stop saying mypy is beta software (Ivan Levkivskyi, PR [14251](https://github.com/python/mypy/pull/14251)) +* Flycheck-mypy is deprecated, since its functionality was merged to Flycheck (Ivan Levkivskyi, PR [14247](https://github.com/python/mypy/pull/14247)) +* Update code example in "Declaring decorators" (ChristianWitzler, PR [14131](https://github.com/python/mypy/pull/14131)) + +#### Stubtest Improvements + +Stubtest is a tool for testing that stubs conform to the implementations. + +* Improve error message for `__all__`\-related errors (Alex Waygood, PR [14362](https://github.com/python/mypy/pull/14362)) +* Improve heuristics for determining whether global-namespace names are imported (Alex Waygood, PR [14270](https://github.com/python/mypy/pull/14270)) +* Catch BaseException on module imports (Shantanu, PR [14284](https://github.com/python/mypy/pull/14284)) +* Associate exported symbol error with `__all__` object\_path (Nikita Sobolev, PR [14217](https://github.com/python/mypy/pull/14217)) +* Add \_\_warningregistry\_\_ to the list of ignored module dunders (Nikita Sobolev, PR [14218](https://github.com/python/mypy/pull/14218)) +* If a default is present in the stub, check that it is correct (Jelle Zijlstra, PR [14085](https://github.com/python/mypy/pull/14085)) + +#### Stubgen Improvements + +Stubgen is a tool for automatically generating draft stubs for libraries. + +* Treat dlls as C modules (Shantanu, PR [14503](https://github.com/python/mypy/pull/14503)) + +#### Other Notable Fixes and Improvements + +* Update stub suggestions based on recent typeshed changes (Alex Waygood, PR [14265](https://github.com/python/mypy/pull/14265)) +* Fix attrs protocol check with cache (Marc Mueller, PR [14558](https://github.com/python/mypy/pull/14558)) +* Fix strict equality check if operand item type has custom \_\_eq\_\_ (Jukka Lehtosalo, PR [14513](https://github.com/python/mypy/pull/14513)) +* Don't consider object always truthy (Jukka Lehtosalo, PR [14510](https://github.com/python/mypy/pull/14510)) +* Properly support union of TypedDicts as dict literal context (Ivan Levkivskyi, PR [14505](https://github.com/python/mypy/pull/14505)) +* Properly expand type in generic class with Self and TypeVar with values (Ivan Levkivskyi, PR [14491](https://github.com/python/mypy/pull/14491)) +* Fix recursive TypedDicts/NamedTuples defined with call syntax (Ivan Levkivskyi, PR [14488](https://github.com/python/mypy/pull/14488)) +* Fix type inference issue when a class inherits from Any (Shantanu, PR [14404](https://github.com/python/mypy/pull/14404)) +* Fix false positive on generic base class with six (Ivan Levkivskyi, PR [14478](https://github.com/python/mypy/pull/14478)) +* Don't read scripts without extensions as modules in namespace mode (Tim Geypens, PR [14335](https://github.com/python/mypy/pull/14335)) +* Fix inference for constrained type variables within unions (Christoph Tyralla, PR [14396](https://github.com/python/mypy/pull/14396)) +* Fix Unpack imported from typing (Marc Mueller, PR [14378](https://github.com/python/mypy/pull/14378)) +* Allow trailing commas in ini configuration of multiline values (Nikita Sobolev, PR [14240](https://github.com/python/mypy/pull/14240)) +* Fix false negatives involving Unions and generators or coroutines (Shantanu, PR [14224](https://github.com/python/mypy/pull/14224)) +* Fix ParamSpec constraint for types as callable (Vincent Vanlaer, PR [14153](https://github.com/python/mypy/pull/14153)) +* Fix type aliases with fixed-length tuples (Jukka Lehtosalo, PR [14184](https://github.com/python/mypy/pull/14184)) +* Fix issues with type aliases and new style unions (Jukka Lehtosalo, PR [14181](https://github.com/python/mypy/pull/14181)) +* Simplify unions less aggressively (Ivan Levkivskyi, PR [14178](https://github.com/python/mypy/pull/14178)) +* Simplify callable overlap logic (Ivan Levkivskyi, PR [14174](https://github.com/python/mypy/pull/14174)) +* Try empty context when assigning to union typed variables (Ivan Levkivskyi, PR [14151](https://github.com/python/mypy/pull/14151)) +* Improvements to recursive types (Ivan Levkivskyi, PR [14147](https://github.com/python/mypy/pull/14147)) +* Make non-numeric non-empty FORCE\_COLOR truthy (Shantanu, PR [14140](https://github.com/python/mypy/pull/14140)) +* Fix to recursive type aliases (Ivan Levkivskyi, PR [14136](https://github.com/python/mypy/pull/14136)) +* Correctly handle Enum name on Python 3.11 (Ivan Levkivskyi, PR [14133](https://github.com/python/mypy/pull/14133)) +* Fix class objects falling back to metaclass for callback protocol (Ivan Levkivskyi, PR [14121](https://github.com/python/mypy/pull/14121)) +* Correctly support self types in callable ClassVar (Ivan Levkivskyi, PR [14115](https://github.com/python/mypy/pull/14115)) +* Fix type variable clash in nested positions and in attributes (Ivan Levkivskyi, PR [14095](https://github.com/python/mypy/pull/14095)) +* Allow class variable as implementation for read only attribute (Ivan Levkivskyi, PR [14081](https://github.com/python/mypy/pull/14081)) +* Prevent warnings from causing dmypy to fail (Andrzej Bartosiński, PR [14102](https://github.com/python/mypy/pull/14102)) +* Correctly process nested definitions in mypy daemon (Ivan Levkivskyi, PR [14104](https://github.com/python/mypy/pull/14104)) +* Don't consider a branch unreachable if there is a possible promotion (Ivan Levkivskyi, PR [14077](https://github.com/python/mypy/pull/14077)) +* Fix incompatible overrides of overloaded methods in concrete subclasses (Shantanu, PR [14017](https://github.com/python/mypy/pull/14017)) +* Fix new style union syntax in type aliases (Jukka Lehtosalo, PR [14008](https://github.com/python/mypy/pull/14008)) +* Fix and optimise overload compatibility checking (Shantanu, PR [14018](https://github.com/python/mypy/pull/14018)) +* Improve handling of redefinitions through imports (Shantanu, PR [13969](https://github.com/python/mypy/pull/13969)) +* Preserve (some) implicitly exported types (Shantanu, PR [13967](https://github.com/python/mypy/pull/13967)) + +#### Typeshed Updates + +Typeshed is now modular and distributed as separate PyPI packages for everything except the standard library stubs. Please see [git log](https://github.com/python/typeshed/commits/main?after=ea0ae2155e8a04c9837903c3aff8dd5ad5f36ebc+0&branch=main&path=stdlib) for full list of typeshed changes. + +#### Acknowledgements + +Thanks to all mypy contributors who contributed to this release: + +* Alessio Izzo +* Alex Waygood +* Anders Kaseorg +* Andrzej Bartosiński +* Avasam +* ChristianWitzler +* Christoph Tyralla +* dosisod +* Harrison McCarty +* Hugo van Kemenade +* Hugues +* Ilya Konstantinov +* Ivan Levkivskyi +* Jelle Zijlstra +* jhance +* johnthagen +* Jonathan Daniel +* Joshua Bronson +* Jukka Lehtosalo +* KotlinIsland +* Lakshay Bisht +* Lefteris Karapetsas +* Marc Mueller +* Matthew Hughes +* Michael Lee +* Nick Drozd +* Nikita Sobolev +* Richard Si +* Shantanu +* Stas Ilinskiy +* Tim Geypens +* Tin Tvrtković +* Tom Schraitle +* Valentin Stanciu +* Vincent Vanlaer + +We’d also like to thank our employer, Dropbox, for funding the mypy core team. + +Posted by Stas Ilinskiy + +## Previous releases + +For information about previous releases, refer to the posts at https://mypy-lang.blogspot.com/ From 838a1d4be1f3cad230d028b0e9cb8e1fb7a4fa5b Mon Sep 17 00:00:00 2001 From: Nikita Sobolev Date: Wed, 18 Oct 2023 11:33:10 +0300 Subject: [PATCH 104/144] Add `unimported-reveal` error code (#16271) Note: `reveal_type(1) # type: ignore` is problematic, because it silences the output. So, I've added some docs to advertise not doing so. Closes https://github.com/python/mypy/issues/16270 --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- docs/source/error_code_list2.rst | 44 ++++++++++++++++++ mypy/checkexpr.py | 26 +++++++++++ mypy/errorcodes.py | 6 +++ mypy/nodes.py | 11 +++-- mypy/semanal.py | 13 +++++- mypy/types.py | 7 +-- test-data/unit/check-errorcodes.test | 62 +++++++++++++++++++++++++ test-data/unit/fixtures/typing-full.pyi | 3 ++ 8 files changed, 163 insertions(+), 9 deletions(-) diff --git a/docs/source/error_code_list2.rst b/docs/source/error_code_list2.rst index 30fad0793771..cc5c9b0a1bc6 100644 --- a/docs/source/error_code_list2.rst +++ b/docs/source/error_code_list2.rst @@ -481,3 +481,47 @@ Example: @override def g(self, y: int) -> None: pass + + +.. _code-unimported-reveal: + +Check that ``reveal_type`` is imported from typing or typing_extensions [unimported-reveal] +------------------------------------------------------------------------------------------- + +Mypy used to have ``reveal_type`` as a special builtin +that only existed during type-checking. +In runtime it fails with expected ``NameError``, +which can cause real problem in production, hidden from mypy. + +But, in Python3.11 ``reveal_type`` +`was added to typing.py `_. +``typing_extensions`` ported this helper to all supported Python versions. + +Now users can actually import ``reveal_type`` to make the runtime code safe. + +.. note:: + + Starting with Python 3.11, the ``reveal_type`` function can be imported from ``typing``. + To use it with older Python versions, import it from ``typing_extensions`` instead. + +.. code-block:: python + + # Use "mypy --enable-error-code unimported-reveal" + + x = 1 + reveal_type(x) # Note: Revealed type is "builtins.int" \ + # Error: Name "reveal_type" is not defined + +Correct usage: + +.. code-block:: python + + # Use "mypy --enable-error-code unimported-reveal" + from typing import reveal_type # or `typing_extensions` + + x = 1 + # This won't raise an error: + reveal_type(x) # Note: Revealed type is "builtins.int" + +When this code is enabled, using ``reveal_locals`` is always an error, +because there's no way one can import it. diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index a5c8c80e1580..1d5233170a10 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -36,6 +36,7 @@ ARG_STAR2, IMPLICITLY_ABSTRACT, LITERAL_TYPE, + REVEAL_LOCALS, REVEAL_TYPE, ArgKind, AssertTypeExpr, @@ -4498,6 +4499,7 @@ def visit_reveal_expr(self, expr: RevealExpr) -> Type: self.msg.note( "'reveal_type' always outputs 'Any' in unchecked functions", expr.expr ) + self.check_reveal_imported(expr) return revealed_type else: # REVEAL_LOCALS @@ -4512,8 +4514,32 @@ def visit_reveal_expr(self, expr: RevealExpr) -> Type: ) self.msg.reveal_locals(names_to_types, expr) + self.check_reveal_imported(expr) return NoneType() + def check_reveal_imported(self, expr: RevealExpr) -> None: + if codes.UNIMPORTED_REVEAL not in self.chk.options.enabled_error_codes: + return + + name = "" + if expr.kind == REVEAL_LOCALS: + name = "reveal_locals" + elif expr.kind == REVEAL_TYPE and not expr.is_imported: + name = "reveal_type" + else: + return + + self.chk.fail(f'Name "{name}" is not defined', expr, code=codes.UNIMPORTED_REVEAL) + if name == "reveal_type": + module = ( + "typing" if self.chk.options.python_version >= (3, 11) else "typing_extensions" + ) + hint = ( + 'Did you forget to import it from "{module}"?' + ' (Suggestion: "from {module} import {name}")' + ).format(module=module, name=name) + self.chk.note(hint, expr, code=codes.UNIMPORTED_REVEAL) + def visit_type_application(self, tapp: TypeApplication) -> Type: """Type check a type application (expr[type, ...]). diff --git a/mypy/errorcodes.py b/mypy/errorcodes.py index cd9978c2f31c..98600679da53 100644 --- a/mypy/errorcodes.py +++ b/mypy/errorcodes.py @@ -249,6 +249,12 @@ def __hash__(self) -> int: "General", default_enabled=False, ) +UNIMPORTED_REVEAL: Final = ErrorCode( + "unimported-reveal", + "Require explicit import from typing or typing_extensions for reveal_type", + "General", + default_enabled=False, +) # Syntax errors are often blocking. diff --git a/mypy/nodes.py b/mypy/nodes.py index 6556cd910b46..0e5c078d0227 100644 --- a/mypy/nodes.py +++ b/mypy/nodes.py @@ -2135,21 +2135,26 @@ def accept(self, visitor: ExpressionVisitor[T]) -> T: class RevealExpr(Expression): """Reveal type expression reveal_type(expr) or reveal_locals() expression.""" - __slots__ = ("expr", "kind", "local_nodes") + __slots__ = ("expr", "kind", "local_nodes", "is_imported") - __match_args__ = ("expr", "kind", "local_nodes") + __match_args__ = ("expr", "kind", "local_nodes", "is_imported") expr: Expression | None kind: int local_nodes: list[Var] | None def __init__( - self, kind: int, expr: Expression | None = None, local_nodes: list[Var] | None = None + self, + kind: int, + expr: Expression | None = None, + local_nodes: list[Var] | None = None, + is_imported: bool = False, ) -> None: super().__init__() self.expr = expr self.kind = kind self.local_nodes = local_nodes + self.is_imported = is_imported def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_reveal_expr(self) diff --git a/mypy/semanal.py b/mypy/semanal.py index 9c2452252208..179ee7c70bfb 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -243,6 +243,7 @@ DATACLASS_TRANSFORM_NAMES, FINAL_DECORATOR_NAMES, FINAL_TYPE_NAMES, + IMPORTED_REVEAL_TYPE_NAMES, NEVER_NAMES, OVERLOAD_NAMES, OVERRIDE_DECORATOR_NAMES, @@ -5056,7 +5057,17 @@ def visit_call_expr(self, expr: CallExpr) -> None: elif refers_to_fullname(expr.callee, REVEAL_TYPE_NAMES): if not self.check_fixed_args(expr, 1, "reveal_type"): return - expr.analyzed = RevealExpr(kind=REVEAL_TYPE, expr=expr.args[0]) + reveal_imported = False + reveal_type_node = self.lookup("reveal_type", expr, suppress_errors=True) + if ( + reveal_type_node + and isinstance(reveal_type_node.node, FuncBase) + and reveal_type_node.fullname in IMPORTED_REVEAL_TYPE_NAMES + ): + reveal_imported = True + expr.analyzed = RevealExpr( + kind=REVEAL_TYPE, expr=expr.args[0], is_imported=reveal_imported + ) expr.analyzed.line = expr.line expr.analyzed.column = expr.column expr.analyzed.accept(self) diff --git a/mypy/types.py b/mypy/types.py index ea81609fc605..d0c19a08e60a 100644 --- a/mypy/types.py +++ b/mypy/types.py @@ -128,11 +128,8 @@ "typing.Reversible", ) -REVEAL_TYPE_NAMES: Final = ( - "builtins.reveal_type", - "typing.reveal_type", - "typing_extensions.reveal_type", -) +IMPORTED_REVEAL_TYPE_NAMES: Final = ("typing.reveal_type", "typing_extensions.reveal_type") +REVEAL_TYPE_NAMES: Final = ("builtins.reveal_type", *IMPORTED_REVEAL_TYPE_NAMES) ASSERT_TYPE_NAMES: Final = ("typing.assert_type", "typing_extensions.assert_type") diff --git a/test-data/unit/check-errorcodes.test b/test-data/unit/check-errorcodes.test index ac7c8b4c9f9d..2282f21bcfa6 100644 --- a/test-data/unit/check-errorcodes.test +++ b/test-data/unit/check-errorcodes.test @@ -1086,3 +1086,65 @@ def unsafe_func(x: object) -> Union[int, str]: else: return "some string" [builtins fixtures/isinstancelist.pyi] + + +### +# unimported-reveal +### + +[case testUnimportedRevealType] +# flags: --enable-error-code=unimported-reveal +x = 1 +reveal_type(x) +[out] +main:3: error: Name "reveal_type" is not defined [unimported-reveal] +main:3: note: Did you forget to import it from "typing_extensions"? (Suggestion: "from typing_extensions import reveal_type") +main:3: note: Revealed type is "builtins.int" +[builtins fixtures/isinstancelist.pyi] + +[case testUnimportedRevealTypePy311] +# flags: --enable-error-code=unimported-reveal --python-version=3.11 +x = 1 +reveal_type(x) +[out] +main:3: error: Name "reveal_type" is not defined [unimported-reveal] +main:3: note: Did you forget to import it from "typing"? (Suggestion: "from typing import reveal_type") +main:3: note: Revealed type is "builtins.int" +[builtins fixtures/isinstancelist.pyi] + +[case testUnimportedRevealTypeInUncheckedFunc] +# flags: --enable-error-code=unimported-reveal +def unchecked(): + x = 1 + reveal_type(x) +[out] +main:4: error: Name "reveal_type" is not defined [unimported-reveal] +main:4: note: Did you forget to import it from "typing_extensions"? (Suggestion: "from typing_extensions import reveal_type") +main:4: note: Revealed type is "Any" +main:4: note: 'reveal_type' always outputs 'Any' in unchecked functions +[builtins fixtures/isinstancelist.pyi] + +[case testUnimportedRevealTypeImportedTypingExtensions] +# flags: --enable-error-code=unimported-reveal +from typing_extensions import reveal_type +x = 1 +reveal_type(x) # N: Revealed type is "builtins.int" +[builtins fixtures/isinstancelist.pyi] + +[case testUnimportedRevealTypeImportedTyping311] +# flags: --enable-error-code=unimported-reveal --python-version=3.11 +from typing import reveal_type +x = 1 +reveal_type(x) # N: Revealed type is "builtins.int" +[builtins fixtures/isinstancelist.pyi] +[typing fixtures/typing-full.pyi] + +[case testUnimportedRevealLocals] +# flags: --enable-error-code=unimported-reveal +x = 1 +reveal_locals() +[out] +main:3: note: Revealed local types are: +main:3: note: x: builtins.int +main:3: error: Name "reveal_locals" is not defined [unimported-reveal] +[builtins fixtures/isinstancelist.pyi] diff --git a/test-data/unit/fixtures/typing-full.pyi b/test-data/unit/fixtures/typing-full.pyi index 417ae6baf491..e9f0aa199bb4 100644 --- a/test-data/unit/fixtures/typing-full.pyi +++ b/test-data/unit/fixtures/typing-full.pyi @@ -192,3 +192,6 @@ def dataclass_transform( **kwargs: Any, ) -> Callable[[T], T]: ... def override(__arg: T) -> T: ... + +# Was added in 3.11 +def reveal_type(__obj: T) -> T: ... From e1f6d6b4547f118787a68bf503f5c86a2801a2bf Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Thu, 19 Oct 2023 21:22:40 +0100 Subject: [PATCH 105/144] [mypyc] Avoid cyclic reference in nested functions (#16268) Mypyc used to always put nested functions into the environment object, which results in cyclic references, since the function object contains a reference to the environment. Now we only do this if the body of a nested function refers to a nested function (e.g. due to a recursive call). This means that in the majority of cases we can avoid the cyclic reference. This speeds up self check by an impressive 7%. I'm not sure exactly why the impact is so big, but spending less time in the cyclic garbage collector is probably a big part. --- mypyc/irbuild/builder.py | 5 + mypyc/irbuild/context.py | 2 + mypyc/irbuild/env_class.py | 2 +- mypyc/irbuild/function.py | 43 +++- mypyc/test-data/irbuild-basic.test | 219 ++++++++-------- mypyc/test-data/irbuild-nested.test | 380 ++++++++++++---------------- 6 files changed, 305 insertions(+), 346 deletions(-) diff --git a/mypyc/irbuild/builder.py b/mypyc/irbuild/builder.py index 8c68f91bf633..0757415f6753 100644 --- a/mypyc/irbuild/builder.py +++ b/mypyc/irbuild/builder.py @@ -502,6 +502,11 @@ def non_function_scope(self) -> bool: # Currently the stack always has at least two items: dummy and top-level. return len(self.fn_infos) <= 2 + def top_level_fn_info(self) -> FuncInfo | None: + if self.non_function_scope(): + return None + return self.fn_infos[2] + def init_final_static( self, lvalue: Lvalue, diff --git a/mypyc/irbuild/context.py b/mypyc/irbuild/context.py index 676afb507504..a740f0b821d9 100644 --- a/mypyc/irbuild/context.py +++ b/mypyc/irbuild/context.py @@ -22,6 +22,7 @@ def __init__( contains_nested: bool = False, is_decorated: bool = False, in_non_ext: bool = False, + add_nested_funcs_to_env: bool = False, ) -> None: self.fitem = fitem self.name = name @@ -47,6 +48,7 @@ def __init__( self.contains_nested = contains_nested self.is_decorated = is_decorated self.in_non_ext = in_non_ext + self.add_nested_funcs_to_env = add_nested_funcs_to_env # TODO: add field for ret_type: RType = none_rprimitive diff --git a/mypyc/irbuild/env_class.py b/mypyc/irbuild/env_class.py index ded8072deb63..aa223fe20176 100644 --- a/mypyc/irbuild/env_class.py +++ b/mypyc/irbuild/env_class.py @@ -107,7 +107,7 @@ def load_env_registers(builder: IRBuilder) -> None: load_outer_envs(builder, fn_info.callable_class) # If this is a FuncDef, then make sure to load the FuncDef into its own environment # class so that the function can be called recursively. - if isinstance(fitem, FuncDef): + if isinstance(fitem, FuncDef) and fn_info.add_nested_funcs_to_env: setup_func_for_recursive_call(builder, fitem, fn_info.callable_class) diff --git a/mypyc/irbuild/function.py b/mypyc/irbuild/function.py index 822350ea829b..ebf7fa9a54de 100644 --- a/mypyc/irbuild/function.py +++ b/mypyc/irbuild/function.py @@ -19,6 +19,7 @@ ArgKind, ClassDef, Decorator, + FuncBase, FuncDef, FuncItem, LambdaExpr, @@ -222,6 +223,7 @@ def c() -> None: is_decorated = fitem in builder.fdefs_to_decorators is_singledispatch = fitem in builder.singledispatch_impls in_non_ext = False + add_nested_funcs_to_env = has_nested_func_self_reference(builder, fitem) class_name = None if cdef: ir = builder.mapper.type_to_ir[cdef.info] @@ -234,14 +236,15 @@ def c() -> None: func_name = name builder.enter( FuncInfo( - fitem, - func_name, - class_name, - gen_func_ns(builder), - is_nested, - contains_nested, - is_decorated, - in_non_ext, + fitem=fitem, + name=func_name, + class_name=class_name, + namespace=gen_func_ns(builder), + is_nested=is_nested, + contains_nested=contains_nested, + is_decorated=is_decorated, + in_non_ext=in_non_ext, + add_nested_funcs_to_env=add_nested_funcs_to_env, ) ) @@ -267,7 +270,13 @@ def c() -> None: builder.enter(fn_info) setup_env_for_generator_class(builder) load_outer_envs(builder, builder.fn_info.generator_class) - if builder.fn_info.is_nested and isinstance(fitem, FuncDef): + top_level = builder.top_level_fn_info() + if ( + builder.fn_info.is_nested + and isinstance(fitem, FuncDef) + and top_level + and top_level.add_nested_funcs_to_env + ): setup_func_for_recursive_call(builder, fitem, builder.fn_info.generator_class) create_switch_for_generator_class(builder) add_raise_exception_blocks_to_generator_class(builder, fitem.line) @@ -344,6 +353,20 @@ def c() -> None: return func_ir, func_reg +def has_nested_func_self_reference(builder: IRBuilder, fitem: FuncItem) -> bool: + """Does a nested function contain a self-reference in its body? + + If a nested function only has references in the surrounding function, + we don't need to add it to the environment. + """ + if any(isinstance(sym, FuncBase) for sym in builder.free_variables.get(fitem, set())): + return True + return any( + has_nested_func_self_reference(builder, nested) + for nested in builder.encapsulating_funcs.get(fitem, []) + ) + + def gen_func_ir( builder: IRBuilder, args: list[Register], @@ -768,7 +791,7 @@ def get_func_target(builder: IRBuilder, fdef: FuncDef) -> AssignmentTarget: # Get the target associated with the previously defined FuncDef. return builder.lookup(fdef.original_def) - if builder.fn_info.is_generator or builder.fn_info.contains_nested: + if builder.fn_info.is_generator or builder.fn_info.add_nested_funcs_to_env: return builder.lookup(fdef) return builder.add_local_reg(fdef, object_rprimitive) diff --git a/mypyc/test-data/irbuild-basic.test b/mypyc/test-data/irbuild-basic.test index 33fc8cfaa83b..bf608abb87ad 100644 --- a/mypyc/test-data/irbuild-basic.test +++ b/mypyc/test-data/irbuild-basic.test @@ -2694,47 +2694,43 @@ L2: def g_a_obj.__call__(__mypyc_self__): __mypyc_self__ :: __main__.g_a_obj r0 :: __main__.a_env - r1, g :: object - r2 :: str - r3 :: object - r4 :: str - r5, r6, r7, r8 :: object - r9 :: str - r10 :: object - r11 :: str - r12, r13 :: object + r1 :: str + r2 :: object + r3 :: str + r4, r5, r6, r7 :: object + r8 :: str + r9 :: object + r10 :: str + r11, r12 :: object L0: r0 = __mypyc_self__.__mypyc_env__ - r1 = r0.g - g = r1 - r2 = 'Entering' - r3 = builtins :: module - r4 = 'print' - r5 = CPyObject_GetAttr(r3, r4) - r6 = PyObject_CallFunctionObjArgs(r5, r2, 0) - r7 = r0.f - r8 = PyObject_CallFunctionObjArgs(r7, 0) - r9 = 'Exited' - r10 = builtins :: module - r11 = 'print' - r12 = CPyObject_GetAttr(r10, r11) - r13 = PyObject_CallFunctionObjArgs(r12, r9, 0) + r1 = 'Entering' + r2 = builtins :: module + r3 = 'print' + r4 = CPyObject_GetAttr(r2, r3) + r5 = PyObject_CallFunctionObjArgs(r4, r1, 0) + r6 = r0.f + r7 = PyObject_CallFunctionObjArgs(r6, 0) + r8 = 'Exited' + r9 = builtins :: module + r10 = 'print' + r11 = CPyObject_GetAttr(r9, r10) + r12 = PyObject_CallFunctionObjArgs(r11, r8, 0) return 1 def a(f): f :: object r0 :: __main__.a_env r1 :: bool r2 :: __main__.g_a_obj - r3, r4 :: bool - r5 :: object + r3 :: bool + g :: object L0: r0 = a_env() r0.f = f; r1 = is_error r2 = g_a_obj() r2.__mypyc_env__ = r0; r3 = is_error - r0.g = r2; r4 = is_error - r5 = r0.g - return r5 + g = r2 + return g def g_b_obj.__get__(__mypyc_self__, instance, owner): __mypyc_self__, instance, owner, r0 :: object r1 :: bit @@ -2751,47 +2747,43 @@ L2: def g_b_obj.__call__(__mypyc_self__): __mypyc_self__ :: __main__.g_b_obj r0 :: __main__.b_env - r1, g :: object - r2 :: str - r3 :: object - r4 :: str - r5, r6, r7, r8 :: object - r9 :: str - r10 :: object - r11 :: str - r12, r13 :: object + r1 :: str + r2 :: object + r3 :: str + r4, r5, r6, r7 :: object + r8 :: str + r9 :: object + r10 :: str + r11, r12 :: object L0: r0 = __mypyc_self__.__mypyc_env__ - r1 = r0.g - g = r1 - r2 = '---' - r3 = builtins :: module - r4 = 'print' - r5 = CPyObject_GetAttr(r3, r4) - r6 = PyObject_CallFunctionObjArgs(r5, r2, 0) - r7 = r0.f - r8 = PyObject_CallFunctionObjArgs(r7, 0) - r9 = '---' - r10 = builtins :: module - r11 = 'print' - r12 = CPyObject_GetAttr(r10, r11) - r13 = PyObject_CallFunctionObjArgs(r12, r9, 0) + r1 = '---' + r2 = builtins :: module + r3 = 'print' + r4 = CPyObject_GetAttr(r2, r3) + r5 = PyObject_CallFunctionObjArgs(r4, r1, 0) + r6 = r0.f + r7 = PyObject_CallFunctionObjArgs(r6, 0) + r8 = '---' + r9 = builtins :: module + r10 = 'print' + r11 = CPyObject_GetAttr(r9, r10) + r12 = PyObject_CallFunctionObjArgs(r11, r8, 0) return 1 def b(f): f :: object r0 :: __main__.b_env r1 :: bool r2 :: __main__.g_b_obj - r3, r4 :: bool - r5 :: object + r3 :: bool + g :: object L0: r0 = b_env() r0.f = f; r1 = is_error r2 = g_b_obj() r2.__mypyc_env__ = r0; r3 = is_error - r0.g = r2; r4 = is_error - r5 = r0.g - return r5 + g = r2 + return g def d_c_obj.__get__(__mypyc_self__, instance, owner): __mypyc_self__, instance, owner, r0 :: object r1 :: bit @@ -2808,20 +2800,17 @@ L2: def d_c_obj.__call__(__mypyc_self__): __mypyc_self__ :: __main__.d_c_obj r0 :: __main__.c_env - r1, d :: object - r2 :: str - r3 :: object - r4 :: str - r5, r6 :: object + r1 :: str + r2 :: object + r3 :: str + r4, r5 :: object L0: r0 = __mypyc_self__.__mypyc_env__ - r1 = r0.d - d = r1 - r2 = 'd' - r3 = builtins :: module - r4 = 'print' - r5 = CPyObject_GetAttr(r3, r4) - r6 = PyObject_CallFunctionObjArgs(r5, r2, 0) + r1 = 'd' + r2 = builtins :: module + r3 = 'print' + r4 = CPyObject_GetAttr(r2, r3) + r5 = PyObject_CallFunctionObjArgs(r4, r1, 0) return 1 def c(): r0 :: __main__.c_env @@ -2832,16 +2821,15 @@ def c(): r5, r6 :: object r7 :: dict r8 :: str - r9, r10 :: object - r11 :: bool - r12 :: dict - r13 :: str - r14 :: i32 - r15 :: bit - r16 :: str - r17 :: object - r18 :: str - r19, r20, r21, r22 :: object + r9, r10, d :: object + r11 :: dict + r12 :: str + r13 :: i32 + r14 :: bit + r15 :: str + r16 :: object + r17 :: str + r18, r19, r20 :: object L0: r0 = c_env() r1 = d_c_obj() @@ -2854,18 +2842,17 @@ L0: r8 = 'a' r9 = CPyDict_GetItem(r7, r8) r10 = PyObject_CallFunctionObjArgs(r9, r6, 0) - r0.d = r10; r11 = is_error - r12 = __main__.globals :: static - r13 = 'd' - r14 = CPyDict_SetItem(r12, r13, r10) - r15 = r14 >= 0 :: signed - r16 = 'c' - r17 = builtins :: module - r18 = 'print' - r19 = CPyObject_GetAttr(r17, r18) - r20 = PyObject_CallFunctionObjArgs(r19, r16, 0) - r21 = r0.d - r22 = PyObject_CallFunctionObjArgs(r21, 0) + d = r10 + r11 = __main__.globals :: static + r12 = 'd' + r13 = CPyDict_SetItem(r11, r12, r10) + r14 = r13 >= 0 :: signed + r15 = 'c' + r16 = builtins :: module + r17 = 'print' + r18 = CPyObject_GetAttr(r16, r17) + r19 = PyObject_CallFunctionObjArgs(r18, r15, 0) + r20 = PyObject_CallFunctionObjArgs(d, 0) return 1 def __top_level__(): r0, r1 :: object @@ -2947,47 +2934,43 @@ L2: def g_a_obj.__call__(__mypyc_self__): __mypyc_self__ :: __main__.g_a_obj r0 :: __main__.a_env - r1, g :: object - r2 :: str - r3 :: object - r4 :: str - r5, r6, r7, r8 :: object - r9 :: str - r10 :: object - r11 :: str - r12, r13 :: object + r1 :: str + r2 :: object + r3 :: str + r4, r5, r6, r7 :: object + r8 :: str + r9 :: object + r10 :: str + r11, r12 :: object L0: r0 = __mypyc_self__.__mypyc_env__ - r1 = r0.g - g = r1 - r2 = 'Entering' - r3 = builtins :: module - r4 = 'print' - r5 = CPyObject_GetAttr(r3, r4) - r6 = PyObject_CallFunctionObjArgs(r5, r2, 0) - r7 = r0.f - r8 = PyObject_CallFunctionObjArgs(r7, 0) - r9 = 'Exited' - r10 = builtins :: module - r11 = 'print' - r12 = CPyObject_GetAttr(r10, r11) - r13 = PyObject_CallFunctionObjArgs(r12, r9, 0) + r1 = 'Entering' + r2 = builtins :: module + r3 = 'print' + r4 = CPyObject_GetAttr(r2, r3) + r5 = PyObject_CallFunctionObjArgs(r4, r1, 0) + r6 = r0.f + r7 = PyObject_CallFunctionObjArgs(r6, 0) + r8 = 'Exited' + r9 = builtins :: module + r10 = 'print' + r11 = CPyObject_GetAttr(r9, r10) + r12 = PyObject_CallFunctionObjArgs(r11, r8, 0) return 1 def a(f): f :: object r0 :: __main__.a_env r1 :: bool r2 :: __main__.g_a_obj - r3, r4 :: bool - r5 :: object + r3 :: bool + g :: object L0: r0 = a_env() r0.f = f; r1 = is_error r2 = g_a_obj() r2.__mypyc_env__ = r0; r3 = is_error - r0.g = r2; r4 = is_error - r5 = r0.g - return r5 + g = r2 + return g def __top_level__(): r0, r1 :: object r2 :: bit diff --git a/mypyc/test-data/irbuild-nested.test b/mypyc/test-data/irbuild-nested.test index adef80263533..b2b884705366 100644 --- a/mypyc/test-data/irbuild-nested.test +++ b/mypyc/test-data/irbuild-nested.test @@ -50,25 +50,22 @@ L2: def inner_a_obj.__call__(__mypyc_self__): __mypyc_self__ :: __main__.inner_a_obj r0 :: __main__.a_env - r1, inner, r2 :: object + r1 :: object L0: r0 = __mypyc_self__.__mypyc_env__ - r1 = r0.inner - inner = r1 - r2 = box(None, 1) - return r2 + r1 = box(None, 1) + return r1 def a(): r0 :: __main__.a_env r1 :: __main__.inner_a_obj - r2, r3 :: bool - r4 :: object + r2 :: bool + inner :: object L0: r0 = a_env() r1 = inner_a_obj() r1.__mypyc_env__ = r0; r2 = is_error - r0.inner = r1; r3 = is_error - r4 = r0.inner - return r4 + inner = r1 + return inner def second_b_first_obj.__get__(__mypyc_self__, instance, owner): __mypyc_self__, instance, owner, r0 :: object r1 :: bit @@ -86,15 +83,12 @@ def second_b_first_obj.__call__(__mypyc_self__): __mypyc_self__ :: __main__.second_b_first_obj r0 :: __main__.first_b_env r1 :: __main__.b_env - r2, second :: object - r3 :: str + r2 :: str L0: r0 = __mypyc_self__.__mypyc_env__ r1 = r0.__mypyc_env__ - r2 = r0.second - second = r2 - r3 = 'b.first.second: nested function' - return r3 + r2 = 'b.first.second: nested function' + return r2 def first_b_obj.__get__(__mypyc_self__, instance, owner): __mypyc_self__, instance, owner, r0 :: object r1 :: bit @@ -111,35 +105,30 @@ L2: def first_b_obj.__call__(__mypyc_self__): __mypyc_self__ :: __main__.first_b_obj r0 :: __main__.b_env - r1, first :: object - r2 :: __main__.first_b_env - r3 :: bool - r4 :: __main__.second_b_first_obj - r5, r6 :: bool - r7 :: object + r1 :: __main__.first_b_env + r2 :: bool + r3 :: __main__.second_b_first_obj + r4 :: bool + second :: object L0: r0 = __mypyc_self__.__mypyc_env__ - r1 = r0.first - first = r1 - r2 = first_b_env() - r2.__mypyc_env__ = r0; r3 = is_error - r4 = second_b_first_obj() - r4.__mypyc_env__ = r2; r5 = is_error - r2.second = r4; r6 = is_error - r7 = r2.second - return r7 + r1 = first_b_env() + r1.__mypyc_env__ = r0; r2 = is_error + r3 = second_b_first_obj() + r3.__mypyc_env__ = r1; r4 = is_error + second = r3 + return second def b(): r0 :: __main__.b_env r1 :: __main__.first_b_obj - r2, r3 :: bool - r4 :: object + r2 :: bool + first :: object L0: r0 = b_env() r1 = first_b_obj() r1.__mypyc_env__ = r0; r2 = is_error - r0.first = r1; r3 = is_error - r4 = r0.first - return r4 + first = r1 + return first def inner_c_obj.__get__(__mypyc_self__, instance, owner): __mypyc_self__, instance, owner, r0 :: object r1 :: bit @@ -157,28 +146,24 @@ def inner_c_obj.__call__(__mypyc_self__, s): __mypyc_self__ :: __main__.inner_c_obj s :: str r0 :: __main__.c_env - r1, inner :: object - r2, r3 :: str + r1, r2 :: str L0: r0 = __mypyc_self__.__mypyc_env__ - r1 = r0.inner - inner = r1 - r2 = '!' - r3 = PyUnicode_Concat(s, r2) - return r3 + r1 = '!' + r2 = PyUnicode_Concat(s, r1) + return r2 def c(num): num :: float r0 :: __main__.c_env r1 :: __main__.inner_c_obj - r2, r3 :: bool - r4 :: object + r2 :: bool + inner :: object L0: r0 = c_env() r1 = inner_c_obj() r1.__mypyc_env__ = r0; r2 = is_error - r0.inner = r1; r3 = is_error - r4 = r0.inner - return r4 + inner = r1 + return inner def inner_d_obj.__get__(__mypyc_self__, instance, owner): __mypyc_self__, instance, owner, r0 :: object r1 :: bit @@ -196,40 +181,36 @@ def inner_d_obj.__call__(__mypyc_self__, s): __mypyc_self__ :: __main__.inner_d_obj s :: str r0 :: __main__.d_env - r1, inner :: object - r2, r3 :: str + r1, r2 :: str L0: r0 = __mypyc_self__.__mypyc_env__ - r1 = r0.inner - inner = r1 - r2 = '?' - r3 = PyUnicode_Concat(s, r2) - return r3 + r1 = '?' + r2 = PyUnicode_Concat(s, r1) + return r2 def d(num): num :: float r0 :: __main__.d_env r1 :: __main__.inner_d_obj - r2, r3 :: bool - r4 :: str - r5, r6 :: object - r7, a, r8 :: str - r9, r10 :: object - r11, b :: str + r2 :: bool + inner :: object + r3 :: str + r4 :: object + r5, a, r6 :: str + r7 :: object + r8, b :: str L0: r0 = d_env() r1 = inner_d_obj() r1.__mypyc_env__ = r0; r2 = is_error - r0.inner = r1; r3 = is_error - r4 = 'one' - r5 = r0.inner - r6 = PyObject_CallFunctionObjArgs(r5, r4, 0) - r7 = cast(str, r6) - a = r7 - r8 = 'two' - r9 = r0.inner - r10 = PyObject_CallFunctionObjArgs(r9, r8, 0) - r11 = cast(str, r10) - b = r11 + inner = r1 + r3 = 'one' + r4 = PyObject_CallFunctionObjArgs(inner, r3, 0) + r5 = cast(str, r4) + a = r5 + r6 = 'two' + r7 = PyObject_CallFunctionObjArgs(inner, r6, 0) + r8 = cast(str, r7) + b = r8 return a def inner(): r0 :: str @@ -290,32 +271,28 @@ L2: def inner_a_obj.__call__(__mypyc_self__): __mypyc_self__ :: __main__.inner_a_obj r0 :: __main__.a_env - r1, inner :: object - r2 :: int + r1 :: int L0: r0 = __mypyc_self__.__mypyc_env__ - r1 = r0.inner - inner = r1 - r2 = r0.num - return r2 + r1 = r0.num + return r1 def a(num): num :: int r0 :: __main__.a_env r1 :: bool r2 :: __main__.inner_a_obj - r3, r4 :: bool - r5, r6 :: object - r7 :: int + r3 :: bool + inner, r4 :: object + r5 :: int L0: r0 = a_env() r0.num = num; r1 = is_error r2 = inner_a_obj() r2.__mypyc_env__ = r0; r3 = is_error - r0.inner = r2; r4 = is_error - r5 = r0.inner - r6 = PyObject_CallFunctionObjArgs(r5, 0) - r7 = unbox(int, r6) - return r7 + inner = r2 + r4 = PyObject_CallFunctionObjArgs(inner, 0) + r5 = unbox(int, r4) + return r5 def inner_b_obj.__get__(__mypyc_self__, instance, owner): __mypyc_self__, instance, owner, r0 :: object r1 :: bit @@ -332,36 +309,32 @@ L2: def inner_b_obj.__call__(__mypyc_self__): __mypyc_self__ :: __main__.inner_b_obj r0 :: __main__.b_env - r1, inner :: object - r2 :: bool - foo, r3 :: int + r1 :: bool + foo, r2 :: int L0: r0 = __mypyc_self__.__mypyc_env__ - r1 = r0.inner - inner = r1 - r0.num = 8; r2 = is_error + r0.num = 8; r1 = is_error foo = 12 - r3 = r0.num - return r3 + r2 = r0.num + return r2 def b(): r0 :: __main__.b_env r1 :: bool r2 :: __main__.inner_b_obj - r3, r4 :: bool - r5, r6 :: object - r7, r8, r9 :: int + r3 :: bool + inner, r4 :: object + r5, r6, r7 :: int L0: r0 = b_env() r0.num = 6; r1 = is_error r2 = inner_b_obj() r2.__mypyc_env__ = r0; r3 = is_error - r0.inner = r2; r4 = is_error - r5 = r0.inner - r6 = PyObject_CallFunctionObjArgs(r5, 0) - r7 = unbox(int, r6) - r8 = r0.num - r9 = CPyTagged_Add(r7, r8) - return r9 + inner = r2 + r4 = PyObject_CallFunctionObjArgs(inner, 0) + r5 = unbox(int, r4) + r6 = r0.num + r7 = CPyTagged_Add(r5, r6) + return r7 def inner_c_obj.__get__(__mypyc_self__, instance, owner): __mypyc_self__, instance, owner, r0 :: object r1 :: bit @@ -378,14 +351,11 @@ L2: def inner_c_obj.__call__(__mypyc_self__): __mypyc_self__ :: __main__.inner_c_obj r0 :: __main__.c_env - r1, inner :: object - r2 :: str + r1 :: str L0: r0 = __mypyc_self__.__mypyc_env__ - r1 = r0.inner - inner = r1 - r2 = 'f.inner: first definition' - return r2 + r1 = 'f.inner: first definition' + return r1 def inner_c_obj_0.__get__(__mypyc_self__, instance, owner): __mypyc_self__, instance, owner, r0 :: object r1 :: bit @@ -402,40 +372,37 @@ L2: def inner_c_obj_0.__call__(__mypyc_self__): __mypyc_self__ :: __main__.inner_c_obj_0 r0 :: __main__.c_env - r1, inner :: object - r2 :: str + r1 :: str L0: r0 = __mypyc_self__.__mypyc_env__ - r1 = r0.inner - inner = r1 - r2 = 'f.inner: second definition' - return r2 + r1 = 'f.inner: second definition' + return r1 def c(flag): flag :: bool r0 :: __main__.c_env r1 :: __main__.inner_c_obj - r2, r3 :: bool - r4 :: __main__.inner_c_obj_0 - r5, r6 :: bool - r7, r8 :: object - r9 :: str + r2 :: bool + inner :: object + r3 :: __main__.inner_c_obj_0 + r4 :: bool + r5 :: object + r6 :: str L0: r0 = c_env() if flag goto L1 else goto L2 :: bool L1: r1 = inner_c_obj() r1.__mypyc_env__ = r0; r2 = is_error - r0.inner = r1; r3 = is_error + inner = r1 goto L3 L2: - r4 = inner_c_obj_0() - r4.__mypyc_env__ = r0; r5 = is_error - r0.inner = r4; r6 = is_error + r3 = inner_c_obj_0() + r3.__mypyc_env__ = r0; r4 = is_error + inner = r3 L3: - r7 = r0.inner - r8 = PyObject_CallFunctionObjArgs(r7, 0) - r9 = cast(str, r8) - return r9 + r5 = PyObject_CallFunctionObjArgs(inner, 0) + r6 = cast(str, r5) + return r6 [case testSpecialNested] def a() -> int: @@ -465,15 +432,12 @@ def c_a_b_obj.__call__(__mypyc_self__): __mypyc_self__ :: __main__.c_a_b_obj r0 :: __main__.b_a_env r1 :: __main__.a_env - r2, c :: object - r3 :: int + r2 :: int L0: r0 = __mypyc_self__.__mypyc_env__ r1 = r0.__mypyc_env__ - r2 = r0.c - c = r2 - r3 = r1.x - return r3 + r2 = r1.x + return r2 def b_a_obj.__get__(__mypyc_self__, instance, owner): __mypyc_self__, instance, owner, r0 :: object r1 :: bit @@ -490,48 +454,43 @@ L2: def b_a_obj.__call__(__mypyc_self__): __mypyc_self__ :: __main__.b_a_obj r0 :: __main__.a_env - r1, b :: object - r2 :: __main__.b_a_env - r3 :: bool - r4, r5 :: int - r6 :: bool - r7 :: __main__.c_a_b_obj - r8, r9 :: bool - r10, r11 :: object - r12 :: int + r1 :: __main__.b_a_env + r2 :: bool + r3, r4 :: int + r5 :: bool + r6 :: __main__.c_a_b_obj + r7 :: bool + c, r8 :: object + r9 :: int L0: r0 = __mypyc_self__.__mypyc_env__ - r1 = r0.b - b = r1 - r2 = b_a_env() - r2.__mypyc_env__ = r0; r3 = is_error - r4 = r0.x - r5 = CPyTagged_Add(r4, 2) - r0.x = r5; r6 = is_error - r7 = c_a_b_obj() - r7.__mypyc_env__ = r2; r8 = is_error - r2.c = r7; r9 = is_error - r10 = r2.c - r11 = PyObject_CallFunctionObjArgs(r10, 0) - r12 = unbox(int, r11) - return r12 + r1 = b_a_env() + r1.__mypyc_env__ = r0; r2 = is_error + r3 = r0.x + r4 = CPyTagged_Add(r3, 2) + r0.x = r4; r5 = is_error + r6 = c_a_b_obj() + r6.__mypyc_env__ = r1; r7 = is_error + c = r6 + r8 = PyObject_CallFunctionObjArgs(c, 0) + r9 = unbox(int, r8) + return r9 def a(): r0 :: __main__.a_env r1 :: bool r2 :: __main__.b_a_obj - r3, r4 :: bool - r5, r6 :: object - r7 :: int + r3 :: bool + b, r4 :: object + r5 :: int L0: r0 = a_env() r0.x = 2; r1 = is_error r2 = b_a_obj() r2.__mypyc_env__ = r0; r3 = is_error - r0.b = r2; r4 = is_error - r5 = r0.b - r6 = PyObject_CallFunctionObjArgs(r5, 0) - r7 = unbox(int, r6) - return r7 + b = r2 + r4 = PyObject_CallFunctionObjArgs(b, 0) + r5 = unbox(int, r4) + return r5 [case testNestedFunctionInsideStatements] def f(flag: bool) -> str: @@ -559,14 +518,11 @@ L2: def inner_f_obj.__call__(__mypyc_self__): __mypyc_self__ :: __main__.inner_f_obj r0 :: __main__.f_env - r1, inner :: object - r2 :: str + r1 :: str L0: r0 = __mypyc_self__.__mypyc_env__ - r1 = r0.inner - inner = r1 - r2 = 'f.inner: first definition' - return r2 + r1 = 'f.inner: first definition' + return r1 def inner_f_obj_0.__get__(__mypyc_self__, instance, owner): __mypyc_self__, instance, owner, r0 :: object r1 :: bit @@ -583,40 +539,37 @@ L2: def inner_f_obj_0.__call__(__mypyc_self__): __mypyc_self__ :: __main__.inner_f_obj_0 r0 :: __main__.f_env - r1, inner :: object - r2 :: str + r1 :: str L0: r0 = __mypyc_self__.__mypyc_env__ - r1 = r0.inner - inner = r1 - r2 = 'f.inner: second definition' - return r2 + r1 = 'f.inner: second definition' + return r1 def f(flag): flag :: bool r0 :: __main__.f_env r1 :: __main__.inner_f_obj - r2, r3 :: bool - r4 :: __main__.inner_f_obj_0 - r5, r6 :: bool - r7, r8 :: object - r9 :: str + r2 :: bool + inner :: object + r3 :: __main__.inner_f_obj_0 + r4 :: bool + r5 :: object + r6 :: str L0: r0 = f_env() if flag goto L1 else goto L2 :: bool L1: r1 = inner_f_obj() r1.__mypyc_env__ = r0; r2 = is_error - r0.inner = r1; r3 = is_error + inner = r1 goto L3 L2: - r4 = inner_f_obj_0() - r4.__mypyc_env__ = r0; r5 = is_error - r0.inner = r4; r6 = is_error + r3 = inner_f_obj_0() + r3.__mypyc_env__ = r0; r4 = is_error + inner = r3 L3: - r7 = r0.inner - r8 = PyObject_CallFunctionObjArgs(r7, 0) - r9 = cast(str, r8) - return r9 + r5 = PyObject_CallFunctionObjArgs(inner, 0) + r6 = cast(str, r5) + return r6 [case testNestedFunctionsCallEachOther] from typing import Callable, List @@ -652,15 +605,12 @@ L2: def foo_f_obj.__call__(__mypyc_self__): __mypyc_self__ :: __main__.foo_f_obj r0 :: __main__.f_env - r1, foo :: object - r2, r3 :: int + r1, r2 :: int L0: r0 = __mypyc_self__.__mypyc_env__ - r1 = r0.foo - foo = r1 - r2 = r0.a - r3 = CPyTagged_Add(r2, 2) - return r3 + r1 = r0.a + r2 = CPyTagged_Add(r1, 2) + return r2 def bar_f_obj.__get__(__mypyc_self__, instance, owner): __mypyc_self__, instance, owner, r0 :: object r1 :: bit @@ -677,16 +627,14 @@ L2: def bar_f_obj.__call__(__mypyc_self__): __mypyc_self__ :: __main__.bar_f_obj r0 :: __main__.f_env - r1, bar, r2, r3 :: object - r4 :: int + r1, r2 :: object + r3 :: int L0: r0 = __mypyc_self__.__mypyc_env__ - r1 = r0.bar - bar = r1 - r2 = r0.foo - r3 = PyObject_CallFunctionObjArgs(r2, 0) - r4 = unbox(int, r3) - return r4 + r1 = r0.foo + r2 = PyObject_CallFunctionObjArgs(r1, 0) + r3 = unbox(int, r2) + return r3 def baz_f_obj.__get__(__mypyc_self__, instance, owner): __mypyc_self__, instance, owner, r0 :: object r1 :: bit @@ -704,23 +652,21 @@ def baz_f_obj.__call__(__mypyc_self__, n): __mypyc_self__ :: __main__.baz_f_obj n :: int r0 :: __main__.f_env - r1, baz :: object - r2 :: bit - r3 :: int - r4, r5 :: object + r1 :: bit + r2 :: int + r3, r4, r5 :: object r6, r7 :: int L0: r0 = __mypyc_self__.__mypyc_env__ - r1 = r0.baz - baz = r1 - r2 = n == 0 - if r2 goto L1 else goto L2 :: bool + r1 = n == 0 + if r1 goto L1 else goto L2 :: bool L1: return 0 L2: - r3 = CPyTagged_Subtract(n, 2) - r4 = box(int, r3) - r5 = PyObject_CallFunctionObjArgs(baz, r4, 0) + r2 = CPyTagged_Subtract(n, 2) + r3 = r0.baz + r4 = box(int, r2) + r5 = PyObject_CallFunctionObjArgs(r3, r4, 0) r6 = unbox(int, r5) r7 = CPyTagged_Add(n, r6) return r7 From 1c218ea2c674d7a06c8ed4c2f95855f1d3fd26da Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Fri, 20 Oct 2023 00:13:46 +0100 Subject: [PATCH 106/144] Fix daemon false positives related to module-level __getattr__ (#16292) In some cases, mypy daemon could generate false positives about imports targeting packages with a module-level `__getattr__` methods. The root cause was that the `mypy.build.in_partial_package` function would leave a partially initialized module in the `modules` dictionary of `BuildManager`, which could probably cause all sorts of confusion. I fixed this by making sure that ASTs related to temporary `State` objects don't get persisted. Also updated a test case to properly delete a package -- an empty directory is now actually a valid namespace package, so to delete a package we should delete the directory, not just the files inside it. --- mypy/build.py | 10 +++++---- test-data/unit/fine-grained-modules.test | 6 ++---- test-data/unit/fine-grained.test | 27 ++++++++++++++++++++++++ 3 files changed, 35 insertions(+), 8 deletions(-) diff --git a/mypy/build.py b/mypy/build.py index b481cc6ad0dc..1385021aac48 100644 --- a/mypy/build.py +++ b/mypy/build.py @@ -1991,7 +1991,7 @@ def __init__( raise ModuleNotFound # Parse the file (and then some) to get the dependencies. - self.parse_file() + self.parse_file(temporary=temporary) self.compute_dependencies() @property @@ -2109,7 +2109,7 @@ def fix_cross_refs(self) -> None: # Methods for processing modules from source code. - def parse_file(self) -> None: + def parse_file(self, *, temporary: bool = False) -> None: """Parse file and run first pass of semantic analysis. Everything done here is local to the file. Don't depend on imported @@ -2194,12 +2194,14 @@ def parse_file(self) -> None: else: self.early_errors = manager.ast_cache[self.id][1] - modules[self.id] = self.tree + if not temporary: + modules[self.id] = self.tree if not cached: self.semantic_analysis_pass1() - self.check_blockers() + if not temporary: + self.check_blockers() manager.ast_cache[self.id] = (self.tree, self.early_errors) diff --git a/test-data/unit/fine-grained-modules.test b/test-data/unit/fine-grained-modules.test index 163e859276cb..f28dbaa1113b 100644 --- a/test-data/unit/fine-grained-modules.test +++ b/test-data/unit/fine-grained-modules.test @@ -837,15 +837,13 @@ p.a.f(1) [file p/__init__.py] [file p/a.py] def f(x: str) -> None: pass -[delete p/__init__.py.2] -[delete p/a.py.2] -def f(x: str) -> None: pass +[delete p.2] [out] main:2: error: Argument 1 to "f" has incompatible type "int"; expected "str" == main:1: error: Cannot find implementation or library stub for module named "p.a" main:1: note: See https://mypy.readthedocs.io/en/stable/running_mypy.html#missing-imports -main:2: error: "object" has no attribute "a" +main:1: error: Cannot find implementation or library stub for module named "p" [case testDeletePackage2] import p diff --git a/test-data/unit/fine-grained.test b/test-data/unit/fine-grained.test index 68f72a2aa992..cb24467cbf41 100644 --- a/test-data/unit/fine-grained.test +++ b/test-data/unit/fine-grained.test @@ -10337,3 +10337,30 @@ b.py:1: note: Use "-> None" if function does not return a value == a.py:1: error: Function is missing a return type annotation a.py:1: note: Use "-> None" if function does not return a value + +[case testModuleLevelGetAttrInStub] +import stub +import a +import b + +[file stub/__init__.pyi] +s: str +def __getattr__(self): pass + +[file a.py] + +[file a.py.2] +from stub import x +from stub.pkg import y +from stub.pkg.sub import z + +[file b.py] + +[file b.py.3] +from stub import s +reveal_type(s) + +[out] +== +== +b.py:2: note: Revealed type is "builtins.str" From 5506cba158d76cd11697d1178d73a552aa617b7c Mon Sep 17 00:00:00 2001 From: Ihor <31508183+nautics889@users.noreply.github.com> Date: Fri, 20 Oct 2023 23:00:39 +0300 Subject: [PATCH 107/144] fix: remove redundant `.format()` (#16288) Originally this was added in 040f3ab revision at 562th line. --- mypyc/codegen/emit.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mypyc/codegen/emit.py b/mypyc/codegen/emit.py index 7d41ee7e162b..fce6896e8d11 100644 --- a/mypyc/codegen/emit.py +++ b/mypyc/codegen/emit.py @@ -686,7 +686,7 @@ def emit_cast( if likely: check = f"(likely{check})" self.emit_arg_check(src, dest, typ, check, optional) - self.emit_lines(f" {dest} = {src};".format(dest, src), "else {") + self.emit_lines(f" {dest} = {src};", "else {") self.emit_cast_error_handler(error, src, dest, typ, raise_exception) self.emit_line("}") elif is_none_rprimitive(typ): From eecbcb981708bded48d9c17f5fd7ab843b57b2c0 Mon Sep 17 00:00:00 2001 From: Ganden Schaffner Date: Fri, 20 Oct 2023 16:29:04 -0700 Subject: [PATCH 108/144] Correctly recognize `typing_extensions.NewType` (#16298) fixes #16297. since the `.+_NAMES` constants in `types.py` are each referenced multiple times while other examples like this (i.e. a `.+_NAMES` tuple/set used only once) are inlined, I've inlined this one. --- mypy/semanal_newtype.py | 2 +- test-data/unit/check-newtype.test | 7 +++++++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/mypy/semanal_newtype.py b/mypy/semanal_newtype.py index a8380309d310..16c6c024800d 100644 --- a/mypy/semanal_newtype.py +++ b/mypy/semanal_newtype.py @@ -147,7 +147,7 @@ def analyze_newtype_declaration(self, s: AssignmentStmt) -> tuple[str | None, Ca and isinstance(s.lvalues[0], NameExpr) and isinstance(s.rvalue, CallExpr) and isinstance(s.rvalue.callee, RefExpr) - and s.rvalue.callee.fullname == "typing.NewType" + and (s.rvalue.callee.fullname in ("typing.NewType", "typing_extensions.NewType")) ): name = s.lvalues[0].name diff --git a/test-data/unit/check-newtype.test b/test-data/unit/check-newtype.test index 0ff6b8396fa7..99fdf5fe7ca3 100644 --- a/test-data/unit/check-newtype.test +++ b/test-data/unit/check-newtype.test @@ -379,3 +379,10 @@ N = NewType('N', XXX) # E: Argument 2 to NewType(...) must be subclassable (got # E: Name "XXX" is not defined x: List[Union[N, int]] [builtins fixtures/list.pyi] + +[case testTypingExtensionsNewType] +# flags: --python-version 3.7 +from typing_extensions import NewType +N = NewType("N", int) +x: N +[builtins fixtures/tuple.pyi] From ff8cebbcf5094012ee914308dc4f9ecaa7f4684c Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Sat, 21 Oct 2023 00:23:00 -0700 Subject: [PATCH 109/144] Lock test dependencies (#16283) This was discussed in the contributor meetup today. This is a simple solution that requires very few changes. If you want to upgrade the lock file, you can pass `--upgrade` or just delete it and regenerate. --- .github/workflows/test.yml | 4 +- MANIFEST.in | 1 + test-requirements.in | 19 +++++++ test-requirements.txt | 101 +++++++++++++++++++++++++++++++------ 4 files changed, 108 insertions(+), 17 deletions(-) create mode 100644 test-requirements.in diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index afa5d5823ea9..86704aca2f91 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -133,7 +133,7 @@ jobs: ./misc/build-debug-python.sh $PYTHONVERSION $PYTHONDIR $VENV source $VENV/bin/activate - name: Install tox - run: pip install --upgrade 'setuptools!=50' tox==4.11.0 + run: pip install setuptools==68.2.2 tox==4.11.0 - name: Compiled with mypyc if: ${{ matrix.test_mypyc }} run: | @@ -185,7 +185,7 @@ jobs: default: 3.11.1 command: python -c "import platform; print(f'{platform.architecture()=} {platform.machine()=}');" - name: Install tox - run: pip install --upgrade 'setuptools!=50' tox==4.11.0 + run: pip install setuptools==68.2.2 tox==4.11.0 - name: Setup tox environment run: tox run -e py --notest - name: Test diff --git a/MANIFEST.in b/MANIFEST.in index a1c15446de3f..3ae340c7bd5e 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -31,6 +31,7 @@ graft mypyc/doc # files necessary for testing sdist include mypy-requirements.txt include build-requirements.txt +include test-requirements.in include test-requirements.txt include mypy_self_check.ini prune misc diff --git a/test-requirements.in b/test-requirements.in new file mode 100644 index 000000000000..bab3ece29c02 --- /dev/null +++ b/test-requirements.in @@ -0,0 +1,19 @@ +# If you change this file (or mypy-requirements.txt or build-requirements.txt), please run: +# pip-compile --output-file=test-requirements.txt --strip-extras --allow-unsafe test-requirements.in + +-r mypy-requirements.txt +-r build-requirements.txt +attrs>=18.0 +black==23.9.1 # must match version in .pre-commit-config.yaml +filelock>=3.3.0 +# lxml 4.9.3 switched to manylinux_2_28, the wheel builder still uses manylinux2014 +lxml>=4.9.1,<4.9.3; (python_version<'3.11' or sys_platform!='win32') and python_version<'3.12' +pre-commit +pre-commit-hooks==4.5.0 +psutil>=4.0 +pytest>=7.4.0 +pytest-xdist>=1.34.0 +pytest-cov>=2.10.0 +ruff==0.1.0 # must match version in .pre-commit-config.yaml +setuptools>=65.5.1 +tomli>=1.1.0 # needed even on py311+ so the self check passes with --python-version 3.7 diff --git a/test-requirements.txt b/test-requirements.txt index a1fa98917872..3bb9cf29635f 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -1,16 +1,87 @@ --r mypy-requirements.txt --r build-requirements.txt -attrs>=18.0 -black==23.9.1 # must match version in .pre-commit-config.yaml -filelock>=3.3.0 -# lxml 4.9.3 switched to manylinux_2_28, the wheel builder still uses manylinux2014 -lxml>=4.9.1,<4.9.3; (python_version<'3.11' or sys_platform!='win32') and python_version<'3.12' -pre-commit +# +# This file is autogenerated by pip-compile with Python 3.11 +# by the following command: +# +# pip-compile --allow-unsafe --output-file=test-requirements.txt --strip-extras test-requirements.in +# +attrs==23.1.0 + # via -r test-requirements.in +black==23.9.1 + # via -r test-requirements.in +cfgv==3.4.0 + # via pre-commit +click==8.1.7 + # via black +coverage==7.3.2 + # via pytest-cov +distlib==0.3.7 + # via virtualenv +execnet==2.0.2 + # via pytest-xdist +filelock==3.12.4 + # via + # -r test-requirements.in + # virtualenv +identify==2.5.30 + # via pre-commit +iniconfig==2.0.0 + # via pytest +lxml==4.9.2 ; (python_version < "3.11" or sys_platform != "win32") and python_version < "3.12" + # via -r test-requirements.in +mypy-extensions==1.0.0 + # via + # -r mypy-requirements.txt + # black +nodeenv==1.8.0 + # via pre-commit +packaging==23.2 + # via + # black + # pytest +pathspec==0.11.2 + # via black +platformdirs==3.11.0 + # via + # black + # virtualenv +pluggy==1.3.0 + # via pytest +pre-commit==3.5.0 + # via -r test-requirements.in pre-commit-hooks==4.5.0 -psutil>=4.0 -pytest>=7.4.0 -pytest-xdist>=1.34.0 -pytest-cov>=2.10.0 -ruff==0.1.0 # must match version in .pre-commit-config.yaml -setuptools>=65.5.1 -tomli>=1.1.0 # needed even on py311+ so the self check passes with --python-version 3.7 + # via -r test-requirements.in +psutil==5.9.6 + # via -r test-requirements.in +pytest==7.4.2 + # via + # -r test-requirements.in + # pytest-cov + # pytest-xdist +pytest-cov==4.1.0 + # via -r test-requirements.in +pytest-xdist==3.3.1 + # via -r test-requirements.in +pyyaml==6.0.1 + # via pre-commit +ruamel-yaml==0.17.40 + # via pre-commit-hooks +ruamel-yaml-clib==0.2.8 + # via ruamel-yaml +ruff==0.1.0 + # via -r test-requirements.in +tomli==2.0.1 + # via -r test-requirements.in +types-psutil==5.9.5.17 + # via -r build-requirements.txt +types-setuptools==68.2.0.0 + # via -r build-requirements.txt +typing-extensions==4.8.0 + # via -r mypy-requirements.txt +virtualenv==20.24.5 + # via pre-commit + +# The following packages are considered to be unsafe in a requirements file: +setuptools==68.2.2 + # via + # -r test-requirements.in + # nodeenv From a3af87bf252f0ed0c6e0f977ad4079418b37a70f Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Sat, 21 Oct 2023 16:41:10 +0100 Subject: [PATCH 110/144] Narrow tuple types using len() (#16237) Fixes #1178 Supersedes #10367 This is includes implementation for fixed length tuples, homogeneous tuples, and variadic tuples (and combinations of those). Generally implementation is straightforward. Some notes: * Unfortunately, it is necessary to add a new attribute `min_len` to `TypeVarTupleType`, which is probably fine, as it doesn't have that many attributes so far. * Supporting more general use cases (like `>` comparisons for variadic tuples) can cause quick proliferation of unions. I added two mechanisms to counteract this: not applying the narrowing if the integer literal in comparison is itself large, and collapsing unions of tuples into a single tuple (if possible) after we are done with the narrowing. This looks a bit arbitrary, but I think it is important to have. * Main missing feature here is probably not inferring type information from indirect comparisons like `len(x) > foo() > 1`. Supporting this kind of things in full generality is cumbersome, and we may add cases that turn out to be important later. * Note I am quite careful with indexing "inside" a `TypeVarTuple`, it is not really needed now, but I wanted to make everything future proof, so that it will be easy to add support for upper bounds for `TypeVarTuple`s, like `Nums = TypeVarTuple("Nums", bound=tuple[float, ...])`. * I also fix couple existing inconsistencies with `Any` handling in type narrowing. It looks like they stem from the old incorrect logic that meet of `Any` and `X` should be `X`, while in fact it should be `Any`. These fixes are not strictly necessary, but otherwise there may be new false positives, because I introduce a bunch of additional type narrowing scenarios here. cc @hatal175, thanks for the test cases, and for your nice first attempt to implement this! Co-authored-by: Tal Hayon --- mypy/binder.py | 83 ++++ mypy/checker.py | 359 +++++++++++++++- mypy/checkexpr.py | 53 ++- mypy/meet.py | 6 +- mypy/operators.py | 23 + mypy/options.py | 3 +- mypy/subtypes.py | 2 +- mypy/test/testcheck.py | 2 +- mypy/typeops.py | 2 +- mypy/types.py | 27 +- mypy_self_check.ini | 1 + test-data/unit/check-expressions.test | 13 + test-data/unit/check-namedtuple.test | 2 +- test-data/unit/check-narrowing.test | 576 ++++++++++++++++++++++++++ test-data/unit/fixtures/len.pyi | 39 ++ test-data/unit/lib-stub/typing.pyi | 1 + 16 files changed, 1154 insertions(+), 38 deletions(-) create mode 100644 test-data/unit/fixtures/len.pyi diff --git a/mypy/binder.py b/mypy/binder.py index 8a68f24f661e..3b67d09f16c3 100644 --- a/mypy/binder.py +++ b/mypy/binder.py @@ -12,12 +12,17 @@ from mypy.subtypes import is_same_type, is_subtype from mypy.types import ( AnyType, + Instance, NoneType, PartialType, + ProperType, + TupleType, Type, TypeOfAny, TypeType, UnionType, + UnpackType, + find_unpack_in_list, get_proper_type, ) from mypy.typevars import fill_typevars_with_any @@ -213,6 +218,24 @@ def update_from_options(self, frames: list[Frame]) -> bool: for other in resulting_values[1:]: assert other is not None type = join_simple(self.declarations[key], type, other) + # Try simplifying resulting type for unions involving variadic tuples. + # Technically, everything is still valid without this step, but if we do + # not do this, this may create long unions after exiting an if check like: + # x: tuple[int, ...] + # if len(x) < 10: + # ... + # We want the type of x to be tuple[int, ...] after this block (if it is + # still equivalent to such type). + if isinstance(type, UnionType): + type = collapse_variadic_union(type) + if isinstance(type, ProperType) and isinstance(type, UnionType): + # Simplify away any extra Any's that were added to the declared + # type when popping a frame. + simplified = UnionType.make_union( + [t for t in type.items if not isinstance(get_proper_type(t), AnyType)] + ) + if simplified == self.declarations[key]: + type = simplified if current_value is None or not is_same_type(type, current_value): self._put(key, type) changed = True @@ -453,3 +476,63 @@ def get_declaration(expr: BindableExpression) -> Type | None: elif isinstance(expr.node, TypeInfo): return TypeType(fill_typevars_with_any(expr.node)) return None + + +def collapse_variadic_union(typ: UnionType) -> Type: + """Simplify a union involving variadic tuple if possible. + + This will collapse a type like e.g. + tuple[X, Z] | tuple[X, Y, Z] | tuple[X, Y, Y, *tuple[Y, ...], Z] + back to + tuple[X, *tuple[Y, ...], Z] + which is equivalent, but much simpler form of the same type. + """ + tuple_items = [] + other_items = [] + for t in typ.items: + p_t = get_proper_type(t) + if isinstance(p_t, TupleType): + tuple_items.append(p_t) + else: + other_items.append(t) + if len(tuple_items) <= 1: + # This type cannot be simplified further. + return typ + tuple_items = sorted(tuple_items, key=lambda t: len(t.items)) + first = tuple_items[0] + last = tuple_items[-1] + unpack_index = find_unpack_in_list(last.items) + if unpack_index is None: + return typ + unpack = last.items[unpack_index] + assert isinstance(unpack, UnpackType) + unpacked = get_proper_type(unpack.type) + if not isinstance(unpacked, Instance): + return typ + assert unpacked.type.fullname == "builtins.tuple" + suffix = last.items[unpack_index + 1 :] + + # Check that first item matches the expected pattern and infer prefix. + if len(first.items) < len(suffix): + return typ + if suffix and first.items[-len(suffix) :] != suffix: + return typ + if suffix: + prefix = first.items[: -len(suffix)] + else: + prefix = first.items + + # Check that all middle types match the expected pattern as well. + arg = unpacked.args[0] + for i, it in enumerate(tuple_items[1:-1]): + if it.items != prefix + [arg] * (i + 1) + suffix: + return typ + + # Check the last item (the one with unpack), and choose an appropriate simplified type. + if last.items != prefix + [arg] * (len(typ.items) - 1) + [unpack] + suffix: + return typ + if len(first.items) == 0: + simplified: Type = unpacked.copy_modified() + else: + simplified = TupleType(prefix + [unpack] + suffix, fallback=last.partial_fallback) + return UnionType.make_union([simplified] + other_items) diff --git a/mypy/checker.py b/mypy/checker.py index e1b65a95ae98..02bab37aa13f 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -43,7 +43,7 @@ from mypy.expandtype import expand_self_type, expand_type, expand_type_by_instance from mypy.literals import Key, extract_var_from_literal_hash, literal, literal_hash from mypy.maptype import map_instance_to_supertype -from mypy.meet import is_overlapping_erased_types, is_overlapping_types +from mypy.meet import is_overlapping_erased_types, is_overlapping_types, meet_types from mypy.message_registry import ErrorMessage from mypy.messages import ( SUGGESTED_TEST_FIXTURES, @@ -134,7 +134,8 @@ YieldExpr, is_final_node, ) -from mypy.options import Options +from mypy.operators import flip_ops, int_op_to_method, neg_ops +from mypy.options import PRECISE_TUPLE_TYPES, Options from mypy.patterns import AsPattern, StarredPattern from mypy.plugin import CheckerPluginInterface, Plugin from mypy.plugins import dataclasses as dataclasses_plugin @@ -228,6 +229,9 @@ DEFAULT_LAST_PASS: Final = 1 # Pass numbers start at 0 +# Maximum length of fixed tuple types inferred when narrowing from variadic tuples. +MAX_PRECISE_TUPLE_SIZE: Final = 8 + DeferredNodeType: _TypeAlias = Union[FuncDef, LambdaExpr, OverloadedFuncDef, Decorator] FineGrainedDeferredNodeType: _TypeAlias = Union[FuncDef, MypyFile, OverloadedFuncDef] @@ -5829,7 +5833,15 @@ def has_no_custom_eq_checks(t: Type) -> bool: partial_type_maps.append((if_map, else_map)) - return reduce_conditional_maps(partial_type_maps) + # If we have found non-trivial restrictions from the regular comparisons, + # then return soon. Otherwise try to infer restrictions involving `len(x)`. + # TODO: support regular and len() narrowing in the same chain. + if any(m != ({}, {}) for m in partial_type_maps): + return reduce_conditional_maps(partial_type_maps) + else: + # Use meet for `and` maps to get correct results for chained checks + # like `if 1 < len(x) < 4: ...` + return reduce_conditional_maps(self.find_tuple_len_narrowing(node), use_meet=True) elif isinstance(node, AssignmentExpr): if_map = {} else_map = {} @@ -5860,7 +5872,10 @@ def has_no_custom_eq_checks(t: Type) -> bool: # and false if at least one of e1 and e2 is false. return ( and_conditional_maps(left_if_vars, right_if_vars), - or_conditional_maps(left_else_vars, right_else_vars), + # Note that if left else type is Any, we can't add any additional + # types to it, since the right maps were computed assuming + # the left is True, which may be not the case in the else branch. + or_conditional_maps(left_else_vars, right_else_vars, coalesce_any=True), ) elif isinstance(node, OpExpr) and node.op == "or": left_if_vars, left_else_vars = self.find_isinstance_check(node.left) @@ -5875,6 +5890,27 @@ def has_no_custom_eq_checks(t: Type) -> bool: elif isinstance(node, UnaryExpr) and node.op == "not": left, right = self.find_isinstance_check(node.expr) return right, left + elif ( + literal(node) == LITERAL_TYPE + and self.has_type(node) + and self.can_be_narrowed_with_len(self.lookup_type(node)) + # Only translate `if x` to `if len(x) > 0` when possible. + and not custom_special_method(self.lookup_type(node), "__bool__") + and self.options.strict_optional + ): + # Combine a `len(x) > 0` check with the default logic below. + yes_type, no_type = self.narrow_with_len(self.lookup_type(node), ">", 0) + if yes_type is not None: + yes_type = true_only(yes_type) + else: + yes_type = UninhabitedType() + if no_type is not None: + no_type = false_only(no_type) + else: + no_type = UninhabitedType() + if_map = {node: yes_type} if not isinstance(yes_type, UninhabitedType) else None + else_map = {node: no_type} if not isinstance(no_type, UninhabitedType) else None + return if_map, else_map # Restrict the type of the variable to True-ish/False-ish in the if and else branches # respectively @@ -6221,6 +6257,287 @@ def refine_away_none_in_comparison( return if_map, {} + def is_len_of_tuple(self, expr: Expression) -> bool: + """Is this expression a `len(x)` call where x is a tuple or union of tuples?""" + if not isinstance(expr, CallExpr): + return False + if not refers_to_fullname(expr.callee, "builtins.len"): + return False + if len(expr.args) != 1: + return False + expr = expr.args[0] + if literal(expr) != LITERAL_TYPE: + return False + if not self.has_type(expr): + return False + return self.can_be_narrowed_with_len(self.lookup_type(expr)) + + def can_be_narrowed_with_len(self, typ: Type) -> bool: + """Is this a type that can benefit from length check type restrictions? + + Currently supported types are TupleTypes, Instances of builtins.tuple, and + unions involving such types. + """ + if custom_special_method(typ, "__len__"): + # If user overrides builtin behavior, we can't do anything. + return False + p_typ = get_proper_type(typ) + # Note: we are conservative about tuple subclasses, because some code may rely on + # the fact that tuple_type of fallback TypeInfo matches the original TupleType. + if isinstance(p_typ, TupleType): + if any(isinstance(t, UnpackType) for t in p_typ.items): + return p_typ.partial_fallback.type.fullname == "builtins.tuple" + return True + if isinstance(p_typ, Instance): + return p_typ.type.has_base("builtins.tuple") + if isinstance(p_typ, UnionType): + return any(self.can_be_narrowed_with_len(t) for t in p_typ.items) + return False + + def literal_int_expr(self, expr: Expression) -> int | None: + """Is this expression an int literal, or a reference to an int constant? + + If yes, return the corresponding int value, otherwise return None. + """ + if not self.has_type(expr): + return None + expr_type = self.lookup_type(expr) + expr_type = coerce_to_literal(expr_type) + proper_type = get_proper_type(expr_type) + if not isinstance(proper_type, LiteralType): + return None + if not isinstance(proper_type.value, int): + return None + return proper_type.value + + def find_tuple_len_narrowing(self, node: ComparisonExpr) -> list[tuple[TypeMap, TypeMap]]: + """Top-level logic to find type restrictions from a length check on tuples. + + We try to detect `if` checks like the following: + x: tuple[int, int] | tuple[int, int, int] + y: tuple[int, int] | tuple[int, int, int] + if len(x) == len(y) == 2: + a, b = x # OK + c, d = y # OK + + z: tuple[int, ...] + if 1 < len(z) < 4: + x = z # OK + and report corresponding type restrictions to the binder. + """ + # First step: group consecutive `is` and `==` comparisons together. + # This is essentially a simplified version of group_comparison_operands(), + # tuned to the len()-like checks. Note that we don't propagate indirect + # restrictions like e.g. `len(x) > foo() > 1` yet, since it is tricky. + # TODO: propagate indirect len() comparison restrictions. + chained = [] + last_group = set() + for op, left, right in node.pairwise(): + if isinstance(left, AssignmentExpr): + left = left.value + if isinstance(right, AssignmentExpr): + right = right.value + if op in ("is", "=="): + last_group.add(left) + last_group.add(right) + else: + if last_group: + chained.append(("==", list(last_group))) + last_group = set() + if op in {"is not", "!=", "<", "<=", ">", ">="}: + chained.append((op, [left, right])) + if last_group: + chained.append(("==", list(last_group))) + + # Second step: infer type restrictions from each group found above. + type_maps = [] + for op, items in chained: + # TODO: support unions of literal types as len() comparison targets. + if not any(self.literal_int_expr(it) is not None for it in items): + continue + if not any(self.is_len_of_tuple(it) for it in items): + continue + + # At this step we know there is at least one len(x) and one literal in the group. + if op in ("is", "=="): + literal_values = set() + tuples = [] + for it in items: + lit = self.literal_int_expr(it) + if lit is not None: + literal_values.add(lit) + continue + if self.is_len_of_tuple(it): + assert isinstance(it, CallExpr) + tuples.append(it.args[0]) + if len(literal_values) > 1: + # More than one different literal value found, like 1 == len(x) == 2, + # so the corresponding branch is unreachable. + return [(None, {})] + size = literal_values.pop() + if size > MAX_PRECISE_TUPLE_SIZE: + # Avoid creating huge tuples from checks like if len(x) == 300. + continue + for tpl in tuples: + yes_type, no_type = self.narrow_with_len(self.lookup_type(tpl), op, size) + yes_map = None if yes_type is None else {tpl: yes_type} + no_map = None if no_type is None else {tpl: no_type} + type_maps.append((yes_map, no_map)) + else: + left, right = items + if self.is_len_of_tuple(right): + # Normalize `1 < len(x)` and similar as `len(x) > 1`. + left, right = right, left + op = flip_ops.get(op, op) + r_size = self.literal_int_expr(right) + assert r_size is not None + if r_size > MAX_PRECISE_TUPLE_SIZE: + # Avoid creating huge unions from checks like if len(x) > 300. + continue + assert isinstance(left, CallExpr) + yes_type, no_type = self.narrow_with_len( + self.lookup_type(left.args[0]), op, r_size + ) + yes_map = None if yes_type is None else {left.args[0]: yes_type} + no_map = None if no_type is None else {left.args[0]: no_type} + type_maps.append((yes_map, no_map)) + return type_maps + + def narrow_with_len(self, typ: Type, op: str, size: int) -> tuple[Type | None, Type | None]: + """Dispatch tuple type narrowing logic depending on the kind of type we got.""" + typ = get_proper_type(typ) + if isinstance(typ, TupleType): + return self.refine_tuple_type_with_len(typ, op, size) + elif isinstance(typ, Instance): + return self.refine_instance_type_with_len(typ, op, size) + elif isinstance(typ, UnionType): + yes_types = [] + no_types = [] + other_types = [] + for t in typ.items: + if not self.can_be_narrowed_with_len(t): + other_types.append(t) + continue + yt, nt = self.narrow_with_len(t, op, size) + if yt is not None: + yes_types.append(yt) + if nt is not None: + no_types.append(nt) + yes_types += other_types + no_types += other_types + if yes_types: + yes_type = make_simplified_union(yes_types) + else: + yes_type = None + if no_types: + no_type = make_simplified_union(no_types) + else: + no_type = None + return yes_type, no_type + else: + assert False, "Unsupported type for len narrowing" + + def refine_tuple_type_with_len( + self, typ: TupleType, op: str, size: int + ) -> tuple[Type | None, Type | None]: + """Narrow a TupleType using length restrictions.""" + unpack_index = find_unpack_in_list(typ.items) + if unpack_index is None: + # For fixed length tuple situation is trivial, it is either reachable or not, + # depending on the current length, expected length, and the comparison op. + method = int_op_to_method[op] + if method(typ.length(), size): + return typ, None + return None, typ + unpack = typ.items[unpack_index] + assert isinstance(unpack, UnpackType) + unpacked = get_proper_type(unpack.type) + if isinstance(unpacked, TypeVarTupleType): + # For tuples involving TypeVarTuple unpack we can't do much except + # inferring reachability, and recording the restrictions on TypeVarTuple + # for further "manual" use elsewhere. + min_len = typ.length() - 1 + unpacked.min_len + if op in ("==", "is"): + if min_len <= size: + return typ, typ + return None, typ + elif op in ("<", "<="): + if op == "<=": + size += 1 + if min_len < size: + prefix = typ.items[:unpack_index] + suffix = typ.items[unpack_index + 1 :] + # TODO: also record max_len to avoid false negatives? + unpack = UnpackType(unpacked.copy_modified(min_len=size - typ.length() + 1)) + return typ, typ.copy_modified(items=prefix + [unpack] + suffix) + return None, typ + else: + yes_type, no_type = self.refine_tuple_type_with_len(typ, neg_ops[op], size) + return no_type, yes_type + # Homogeneous variadic item is the case where we are most flexible. Essentially, + # we adjust the variadic item by "eating away" from it to satisfy the restriction. + assert isinstance(unpacked, Instance) and unpacked.type.fullname == "builtins.tuple" + min_len = typ.length() - 1 + arg = unpacked.args[0] + prefix = typ.items[:unpack_index] + suffix = typ.items[unpack_index + 1 :] + if op in ("==", "is"): + if min_len <= size: + # TODO: return fixed union + prefixed variadic tuple for no_type? + return typ.copy_modified(items=prefix + [arg] * (size - min_len) + suffix), typ + return None, typ + elif op in ("<", "<="): + if op == "<=": + size += 1 + if min_len < size: + # Note: there is some ambiguity w.r.t. to where to put the additional + # items: before or after the unpack. However, such types are equivalent, + # so we always put them before for consistency. + no_type = typ.copy_modified( + items=prefix + [arg] * (size - min_len) + [unpack] + suffix + ) + yes_items = [] + for n in range(size - min_len): + yes_items.append(typ.copy_modified(items=prefix + [arg] * n + suffix)) + return UnionType.make_union(yes_items, typ.line, typ.column), no_type + return None, typ + else: + yes_type, no_type = self.refine_tuple_type_with_len(typ, neg_ops[op], size) + return no_type, yes_type + + def refine_instance_type_with_len( + self, typ: Instance, op: str, size: int + ) -> tuple[Type | None, Type | None]: + """Narrow a homogeneous tuple using length restrictions.""" + base = map_instance_to_supertype(typ, self.lookup_typeinfo("builtins.tuple")) + arg = base.args[0] + # Again, we are conservative about subclasses until we gain more confidence. + allow_precise = ( + PRECISE_TUPLE_TYPES in self.options.enable_incomplete_feature + ) and typ.type.fullname == "builtins.tuple" + if op in ("==", "is"): + # TODO: return fixed union + prefixed variadic tuple for no_type? + return TupleType(items=[arg] * size, fallback=typ), typ + elif op in ("<", "<="): + if op == "<=": + size += 1 + if allow_precise: + unpack = UnpackType(self.named_generic_type("builtins.tuple", [arg])) + no_type: Type | None = TupleType(items=[arg] * size + [unpack], fallback=typ) + else: + no_type = typ + if allow_precise: + items = [] + for n in range(size): + items.append(TupleType([arg] * n, fallback=typ)) + yes_type: Type | None = UnionType.make_union(items, typ.line, typ.column) + else: + yes_type = typ + return yes_type, no_type + else: + yes_type, no_type = self.refine_instance_type_with_len(typ, neg_ops[op], size) + return no_type, yes_type + # # Helpers # @@ -7168,7 +7485,7 @@ def builtin_item_type(tp: Type) -> Type | None: return None -def and_conditional_maps(m1: TypeMap, m2: TypeMap) -> TypeMap: +def and_conditional_maps(m1: TypeMap, m2: TypeMap, use_meet: bool = False) -> TypeMap: """Calculate what information we can learn from the truth of (e1 and e2) in terms of the information that we can learn from the truth of e1 and the truth of e2. @@ -7178,22 +7495,31 @@ def and_conditional_maps(m1: TypeMap, m2: TypeMap) -> TypeMap: # One of the conditions can never be true. return None # Both conditions can be true; combine the information. Anything - # we learn from either conditions's truth is valid. If the same + # we learn from either conditions' truth is valid. If the same # expression's type is refined by both conditions, we somewhat - # arbitrarily give precedence to m2. (In the future, we could use - # an intersection type.) + # arbitrarily give precedence to m2 unless m1 value is Any. + # In the future, we could use an intersection type or meet_types(). result = m2.copy() m2_keys = {literal_hash(n2) for n2 in m2} for n1 in m1: - if literal_hash(n1) not in m2_keys: + if literal_hash(n1) not in m2_keys or isinstance(get_proper_type(m1[n1]), AnyType): result[n1] = m1[n1] + if use_meet: + # For now, meet common keys only if specifically requested. + # This is currently used for tuple types narrowing, where having + # a precise result is important. + for n1 in m1: + for n2 in m2: + if literal_hash(n1) == literal_hash(n2): + result[n1] = meet_types(m1[n1], m2[n2]) return result -def or_conditional_maps(m1: TypeMap, m2: TypeMap) -> TypeMap: +def or_conditional_maps(m1: TypeMap, m2: TypeMap, coalesce_any: bool = False) -> TypeMap: """Calculate what information we can learn from the truth of (e1 or e2) in terms of the information that we can learn from the truth of e1 and - the truth of e2. + the truth of e2. If coalesce_any is True, consider Any a supertype when + joining restrictions. """ if m1 is None: @@ -7208,11 +7534,16 @@ def or_conditional_maps(m1: TypeMap, m2: TypeMap) -> TypeMap: for n1 in m1: for n2 in m2: if literal_hash(n1) == literal_hash(n2): - result[n1] = make_simplified_union([m1[n1], m2[n2]]) + if coalesce_any and isinstance(get_proper_type(m1[n1]), AnyType): + result[n1] = m1[n1] + else: + result[n1] = make_simplified_union([m1[n1], m2[n2]]) return result -def reduce_conditional_maps(type_maps: list[tuple[TypeMap, TypeMap]]) -> tuple[TypeMap, TypeMap]: +def reduce_conditional_maps( + type_maps: list[tuple[TypeMap, TypeMap]], use_meet: bool = False +) -> tuple[TypeMap, TypeMap]: """Reduces a list containing pairs of if/else TypeMaps into a single pair. We "and" together all of the if TypeMaps and "or" together the else TypeMaps. So @@ -7243,7 +7574,7 @@ def reduce_conditional_maps(type_maps: list[tuple[TypeMap, TypeMap]]) -> tuple[T else: final_if_map, final_else_map = type_maps[0] for if_map, else_map in type_maps[1:]: - final_if_map = and_conditional_maps(final_if_map, if_map) + final_if_map = and_conditional_maps(final_if_map, if_map, use_meet=use_meet) final_else_map = or_conditional_maps(final_else_map, else_map) return final_if_map, final_else_map diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index 1d5233170a10..2dc5a93a1de9 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -3643,6 +3643,14 @@ def dangerous_comparison( left = map_instance_to_supertype(left, abstract_set) right = map_instance_to_supertype(right, abstract_set) return self.dangerous_comparison(left.args[0], right.args[0]) + elif left.type.has_base("typing.Mapping") and right.type.has_base("typing.Mapping"): + # Similar to above: Mapping ignores the classes, it just compares items. + abstract_map = self.chk.lookup_typeinfo("typing.Mapping") + left = map_instance_to_supertype(left, abstract_map) + right = map_instance_to_supertype(right, abstract_map) + return self.dangerous_comparison( + left.args[0], right.args[0] + ) or self.dangerous_comparison(left.args[1], right.args[1]) elif left_name in ("builtins.list", "builtins.tuple") and right_name == left_name: return self.dangerous_comparison(left.args[0], right.args[0]) elif left_name in OVERLAPPING_BYTES_ALLOWLIST and right_name in ( @@ -4228,9 +4236,8 @@ def visit_index_with_type( else: self.chk.fail(message_registry.TUPLE_INDEX_OUT_OF_RANGE, e) if any(isinstance(t, UnpackType) for t in left_type.items): - self.chk.note( - f"Variadic tuple can have length {left_type.length() - 1}", e - ) + min_len = self.min_tuple_length(left_type) + self.chk.note(f"Variadic tuple can have length {min_len}", e) return AnyType(TypeOfAny.from_error) return make_simplified_union(out) else: @@ -4254,6 +4261,16 @@ def visit_index_with_type( e.method_type = method_type return result + def min_tuple_length(self, left: TupleType) -> int: + unpack_index = find_unpack_in_list(left.items) + if unpack_index is None: + return left.length() + unpack = left.items[unpack_index] + assert isinstance(unpack, UnpackType) + if isinstance(unpack.type, TypeVarTupleType): + return left.length() - 1 + unpack.type.min_len + return left.length() - 1 + def visit_tuple_index_helper(self, left: TupleType, n: int) -> Type | None: unpack_index = find_unpack_in_list(left.items) if unpack_index is None: @@ -4267,31 +4284,39 @@ def visit_tuple_index_helper(self, left: TupleType, n: int) -> Type | None: unpacked = get_proper_type(unpack.type) if isinstance(unpacked, TypeVarTupleType): # Usually we say that TypeVarTuple can't be split, be in case of - # indexing it seems benign to just return the fallback item, similar + # indexing it seems benign to just return the upper bound item, similar # to what we do when indexing a regular TypeVar. - middle = unpacked.tuple_fallback.args[0] + bound = get_proper_type(unpacked.upper_bound) + assert isinstance(bound, Instance) + assert bound.type.fullname == "builtins.tuple" + middle = bound.args[0] else: assert isinstance(unpacked, Instance) assert unpacked.type.fullname == "builtins.tuple" middle = unpacked.args[0] + + extra_items = self.min_tuple_length(left) - left.length() + 1 if n >= 0: - if n < unpack_index: - return left.items[n] - if n >= len(left.items) - 1: + if n >= self.min_tuple_length(left): # For tuple[int, *tuple[str, ...], int] we allow either index 0 or 1, # since variadic item may have zero items. return None + if n < unpack_index: + return left.items[n] return UnionType.make_union( - [middle] + left.items[unpack_index + 1 : n + 2], left.line, left.column + [middle] + + left.items[unpack_index + 1 : max(n - extra_items + 2, unpack_index + 1)], + left.line, + left.column, ) - n += len(left.items) - if n <= 0: + n += self.min_tuple_length(left) + if n < 0: # Similar to above, we only allow -1, and -2 for tuple[int, *tuple[str, ...], int] return None - if n > unpack_index: - return left.items[n] + if n >= unpack_index + extra_items: + return left.items[n - extra_items + 1] return UnionType.make_union( - left.items[n - 1 : unpack_index] + [middle], left.line, left.column + left.items[min(n, unpack_index) : unpack_index] + [middle], left.line, left.column ) def visit_tuple_slice_helper(self, left_type: TupleType, slic: SliceExpr) -> Type: diff --git a/mypy/meet.py b/mypy/meet.py index 0fa500d32c30..e3645c7b5879 100644 --- a/mypy/meet.py +++ b/mypy/meet.py @@ -221,6 +221,8 @@ def get_possible_variants(typ: Type) -> list[Type]: return [typ.upper_bound] elif isinstance(typ, ParamSpecType): return [typ.upper_bound] + elif isinstance(typ, TypeVarTupleType): + return [typ.upper_bound] elif isinstance(typ, UnionType): return list(typ.items) elif isinstance(typ, Overloaded): @@ -694,8 +696,8 @@ def visit_param_spec(self, t: ParamSpecType) -> ProperType: return self.default(self.s) def visit_type_var_tuple(self, t: TypeVarTupleType) -> ProperType: - if self.s == t: - return self.s + if isinstance(self.s, TypeVarTupleType) and self.s.id == t.id: + return self.s if self.s.min_len > t.min_len else t else: return self.default(self.s) diff --git a/mypy/operators.py b/mypy/operators.py index 07ec5a24fa77..d1f050b58fae 100644 --- a/mypy/operators.py +++ b/mypy/operators.py @@ -101,3 +101,26 @@ reverse_op_method_set: Final = set(reverse_op_methods.values()) unary_op_methods: Final = {"-": "__neg__", "+": "__pos__", "~": "__invert__"} + +int_op_to_method: Final = { + "==": int.__eq__, + "is": int.__eq__, + "<": int.__lt__, + "<=": int.__le__, + "!=": int.__ne__, + "is not": int.__ne__, + ">": int.__gt__, + ">=": int.__ge__, +} + +flip_ops: Final = {"<": ">", "<=": ">=", ">": "<", ">=": "<="} +neg_ops: Final = { + "==": "!=", + "!=": "==", + "is": "is not", + "is not": "is", + "<": ">=", + "<=": ">", + ">": "<=", + ">=": "<", +} diff --git a/mypy/options.py b/mypy/options.py index 603ba79935ee..cb0464d4dc06 100644 --- a/mypy/options.py +++ b/mypy/options.py @@ -72,7 +72,8 @@ class BuildType: # Features that are currently incomplete/experimental TYPE_VAR_TUPLE: Final = "TypeVarTuple" UNPACK: Final = "Unpack" -INCOMPLETE_FEATURES: Final = frozenset((TYPE_VAR_TUPLE, UNPACK)) +PRECISE_TUPLE_TYPES: Final = "PreciseTupleTypes" +INCOMPLETE_FEATURES: Final = frozenset((TYPE_VAR_TUPLE, UNPACK, PRECISE_TUPLE_TYPES)) class Options: diff --git a/mypy/subtypes.py b/mypy/subtypes.py index 638553883dd8..b79e0e628849 100644 --- a/mypy/subtypes.py +++ b/mypy/subtypes.py @@ -640,7 +640,7 @@ def visit_param_spec(self, left: ParamSpecType) -> bool: def visit_type_var_tuple(self, left: TypeVarTupleType) -> bool: right = self.right if isinstance(right, TypeVarTupleType) and right.id == left.id: - return True + return left.min_len >= right.min_len return self._is_subtype(left.upper_bound, self.right) def visit_unpack_type(self, left: UnpackType) -> bool: diff --git a/mypy/test/testcheck.py b/mypy/test/testcheck.py index 85fbe5dc2990..591421465a97 100644 --- a/mypy/test/testcheck.py +++ b/mypy/test/testcheck.py @@ -126,7 +126,7 @@ def run_case_once( options = parse_options(original_program_text, testcase, incremental_step) options.use_builtins_fixtures = True if not testcase.name.endswith("_no_incomplete"): - options.enable_incomplete_feature = [TYPE_VAR_TUPLE, UNPACK] + options.enable_incomplete_feature += [TYPE_VAR_TUPLE, UNPACK] options.show_traceback = True # Enable some options automatically based on test file name. diff --git a/mypy/typeops.py b/mypy/typeops.py index 37817933a397..dff43775fe3d 100644 --- a/mypy/typeops.py +++ b/mypy/typeops.py @@ -981,7 +981,7 @@ def custom_special_method(typ: Type, name: str, check_all: bool = False) -> bool method = typ.type.get(name) if method and isinstance(method.node, (SYMBOL_FUNCBASE_TYPES, Decorator, Var)): if method.node.info: - return not method.node.info.fullname.startswith("builtins.") + return not method.node.info.fullname.startswith(("builtins.", "typing.")) return False if isinstance(typ, UnionType): if check_all: diff --git a/mypy/types.py b/mypy/types.py index d0c19a08e60a..d08e9e7a890c 100644 --- a/mypy/types.py +++ b/mypy/types.py @@ -802,6 +802,8 @@ class TypeVarTupleType(TypeVarLikeType): See PEP646 for more information. """ + __slots__ = ("tuple_fallback", "min_len") + def __init__( self, name: str, @@ -813,9 +815,13 @@ def __init__( *, line: int = -1, column: int = -1, + min_len: int = 0, ) -> None: super().__init__(name, fullname, id, upper_bound, default, line=line, column=column) self.tuple_fallback = tuple_fallback + # This value is not settable by a user. It is an internal-only thing to support + # len()-narrowing of variadic tuples. + self.min_len = min_len def serialize(self) -> JsonDict: assert not self.id.is_meta_var() @@ -827,6 +833,7 @@ def serialize(self) -> JsonDict: "upper_bound": self.upper_bound.serialize(), "tuple_fallback": self.tuple_fallback.serialize(), "default": self.default.serialize(), + "min_len": self.min_len, } @classmethod @@ -839,18 +846,19 @@ def deserialize(cls, data: JsonDict) -> TypeVarTupleType: deserialize_type(data["upper_bound"]), Instance.deserialize(data["tuple_fallback"]), deserialize_type(data["default"]), + min_len=data["min_len"], ) def accept(self, visitor: TypeVisitor[T]) -> T: return visitor.visit_type_var_tuple(self) def __hash__(self) -> int: - return hash(self.id) + return hash((self.id, self.min_len)) def __eq__(self, other: object) -> bool: if not isinstance(other, TypeVarTupleType): return NotImplemented - return self.id == other.id + return self.id == other.id and self.min_len == other.min_len def copy_modified( self, @@ -858,6 +866,7 @@ def copy_modified( id: Bogus[TypeVarId | int] = _dummy, upper_bound: Bogus[Type] = _dummy, default: Bogus[Type] = _dummy, + min_len: Bogus[int] = _dummy, **kwargs: Any, ) -> TypeVarTupleType: return TypeVarTupleType( @@ -869,6 +878,7 @@ def copy_modified( self.default if default is _dummy else default, line=self.line, column=self.column, + min_len=self.min_len if min_len is _dummy else min_len, ) @@ -2354,7 +2364,18 @@ def can_be_false_default(self) -> bool: # Corner case: it is a `NamedTuple` with `__bool__` method defined. # It can be anything: both `True` and `False`. return True - return self.length() == 0 + if self.length() == 0: + return True + if self.length() > 1: + return False + # Special case tuple[*Ts] may or may not be false. + item = self.items[0] + if not isinstance(item, UnpackType): + return False + if not isinstance(item.type, TypeVarTupleType): + # Non-normalized tuple[int, ...] can be false. + return True + return item.type.min_len == 0 def can_be_any_bool(self) -> bool: return bool( diff --git a/mypy_self_check.ini b/mypy_self_check.ini index 6e1ad8187b7a..093926d4c415 100644 --- a/mypy_self_check.ini +++ b/mypy_self_check.ini @@ -10,6 +10,7 @@ python_version = 3.8 exclude = mypy/typeshed/|mypyc/test-data/|mypyc/lib-rt/ new_type_inference = True enable_error_code = ignore-without-code,redundant-expr +enable_incomplete_feature = PreciseTupleTypes show_error_code_links = True [mypy-mypy.visitor] diff --git a/test-data/unit/check-expressions.test b/test-data/unit/check-expressions.test index a3c1bc8795f2..4ac5512580d2 100644 --- a/test-data/unit/check-expressions.test +++ b/test-data/unit/check-expressions.test @@ -2365,6 +2365,19 @@ b"x" in data [builtins fixtures/primitives.pyi] [typing fixtures/typing-full.pyi] +[case testStrictEqualityWithDifferentMapTypes] +# flags: --strict-equality +from typing import Mapping + +class A(Mapping[int, str]): ... +class B(Mapping[int, str]): ... + +a: A +b: B +assert a == b +[builtins fixtures/dict.pyi] +[typing fixtures/typing-full.pyi] + [case testUnimportedHintAny] def f(x: Any) -> None: # E: Name "Any" is not defined \ # N: Did you forget to import it from "typing"? (Suggestion: "from typing import Any") diff --git a/test-data/unit/check-namedtuple.test b/test-data/unit/check-namedtuple.test index 6e3628060617..9fa098b28dee 100644 --- a/test-data/unit/check-namedtuple.test +++ b/test-data/unit/check-namedtuple.test @@ -878,7 +878,7 @@ reveal_type(Child.class_method()) # N: Revealed type is "Tuple[builtins.str, fa [builtins fixtures/classmethod.pyi] [case testNamedTupleAsConditionalStrictOptionalDisabled] -# flags: --no-strict-optional +# flags: --no-strict-optional --warn-unreachable from typing import NamedTuple class C(NamedTuple): diff --git a/test-data/unit/check-narrowing.test b/test-data/unit/check-narrowing.test index c86cffd453df..5b7fadf41c79 100644 --- a/test-data/unit/check-narrowing.test +++ b/test-data/unit/check-narrowing.test @@ -1334,3 +1334,579 @@ if isinstance(some, raw): else: reveal_type(some) # N: Revealed type is "Union[builtins.int, __main__.Base]" [builtins fixtures/dict.pyi] + +[case testNarrowingWithAnyOps] +from typing import Any + +class C: ... +class D(C): ... +tp: Any + +c: C +if isinstance(c, tp) or isinstance(c, D): + reveal_type(c) # N: Revealed type is "Union[Any, __main__.D]" +else: + reveal_type(c) # N: Revealed type is "__main__.C" +reveal_type(c) # N: Revealed type is "__main__.C" + +c1: C +if isinstance(c1, tp) and isinstance(c1, D): + reveal_type(c1) # N: Revealed type is "Any" +else: + reveal_type(c1) # N: Revealed type is "__main__.C" +reveal_type(c1) # N: Revealed type is "__main__.C" + +c2: C +if isinstance(c2, D) or isinstance(c2, tp): + reveal_type(c2) # N: Revealed type is "Union[__main__.D, Any]" +else: + reveal_type(c2) # N: Revealed type is "__main__.C" +reveal_type(c2) # N: Revealed type is "__main__.C" + +c3: C +if isinstance(c3, D) and isinstance(c3, tp): + reveal_type(c3) # N: Revealed type is "Any" +else: + reveal_type(c3) # N: Revealed type is "__main__.C" +reveal_type(c3) # N: Revealed type is "__main__.C" + +t: Any +if isinstance(t, (list, tuple)) and isinstance(t, tuple): + reveal_type(t) # N: Revealed type is "builtins.tuple[Any, ...]" +else: + reveal_type(t) # N: Revealed type is "Any" +reveal_type(t) # N: Revealed type is "Any" +[builtins fixtures/isinstancelist.pyi] + +[case testNarrowingLenItemAndLenCompare] +from typing import Any + +x: Any +if len(x) == x: + reveal_type(x) # N: Revealed type is "Any" +[builtins fixtures/len.pyi] + +[case testNarrowingLenTuple] +from typing import Tuple, Union + +VarTuple = Union[Tuple[int, int], Tuple[int, int, int]] + +x: VarTuple +a = b = c = 0 +if len(x) == 3: + a, b, c = x +else: + a, b = x + +if len(x) != 3: + a, b = x +else: + a, b, c = x +[builtins fixtures/len.pyi] + +[case testNarrowingLenHomogeneousTuple] +from typing import Tuple + +x: Tuple[int, ...] +if len(x) == 3: + reveal_type(x) # N: Revealed type is "Tuple[builtins.int, builtins.int, builtins.int]" +else: + reveal_type(x) # N: Revealed type is "builtins.tuple[builtins.int, ...]" + +if len(x) != 3: + reveal_type(x) # N: Revealed type is "builtins.tuple[builtins.int, ...]" +else: + reveal_type(x) # N: Revealed type is "Tuple[builtins.int, builtins.int, builtins.int]" +[builtins fixtures/len.pyi] + +[case testNarrowingLenTypeUnaffected] +from typing import Union, List + +x: Union[str, List[int]] +if len(x) == 3: + reveal_type(x) # N: Revealed type is "Union[builtins.str, builtins.list[builtins.int]]" +else: + reveal_type(x) # N: Revealed type is "Union[builtins.str, builtins.list[builtins.int]]" +[builtins fixtures/len.pyi] + +[case testNarrowingLenAnyListElseNotAffected] +from typing import Any + +def f(self, value: Any) -> Any: + if isinstance(value, list) and len(value) == 0: + reveal_type(value) # N: Revealed type is "builtins.list[Any]" + return value + reveal_type(value) # N: Revealed type is "Any" + return None +[builtins fixtures/len.pyi] + +[case testNarrowingLenMultiple] +from typing import Tuple, Union + +VarTuple = Union[Tuple[int, int], Tuple[int, int, int]] + +x: VarTuple +y: VarTuple +if len(x) == len(y) == 3: + reveal_type(x) # N: Revealed type is "Tuple[builtins.int, builtins.int, builtins.int]" + reveal_type(y) # N: Revealed type is "Tuple[builtins.int, builtins.int, builtins.int]" +[builtins fixtures/len.pyi] + +[case testNarrowingLenFinal] +from typing import Tuple, Union +from typing_extensions import Final + +VarTuple = Union[Tuple[int, int], Tuple[int, int, int]] + +x: VarTuple +fin: Final = 3 +if len(x) == fin: + reveal_type(x) # N: Revealed type is "Tuple[builtins.int, builtins.int, builtins.int]" +[builtins fixtures/len.pyi] + +[case testNarrowingLenGreaterThan] +from typing import Tuple, Union + +VarTuple = Union[Tuple[int], Tuple[int, int], Tuple[int, int, int]] + +x: VarTuple +if len(x) > 1: + reveal_type(x) # N: Revealed type is "Union[Tuple[builtins.int, builtins.int], Tuple[builtins.int, builtins.int, builtins.int]]" +else: + reveal_type(x) # N: Revealed type is "Tuple[builtins.int]" + +if len(x) < 2: + reveal_type(x) # N: Revealed type is "Tuple[builtins.int]" +else: + reveal_type(x) # N: Revealed type is "Union[Tuple[builtins.int, builtins.int], Tuple[builtins.int, builtins.int, builtins.int]]" + +if len(x) >= 2: + reveal_type(x) # N: Revealed type is "Union[Tuple[builtins.int, builtins.int], Tuple[builtins.int, builtins.int, builtins.int]]" +else: + reveal_type(x) # N: Revealed type is "Tuple[builtins.int]" + +if len(x) <= 2: + reveal_type(x) # N: Revealed type is "Union[Tuple[builtins.int], Tuple[builtins.int, builtins.int]]" +else: + reveal_type(x) # N: Revealed type is "Tuple[builtins.int, builtins.int, builtins.int]" +[builtins fixtures/len.pyi] + +[case testNarrowingLenBothSidesUnionTuples] +from typing import Tuple, Union + +VarTuple = Union[ + Tuple[int], + Tuple[int, int], + Tuple[int, int, int], + Tuple[int, int, int, int], +] + +x: VarTuple +if 2 <= len(x) <= 3: + reveal_type(x) # N: Revealed type is "Union[Tuple[builtins.int, builtins.int], Tuple[builtins.int, builtins.int, builtins.int]]" +else: + reveal_type(x) # N: Revealed type is "Union[Tuple[builtins.int], Tuple[builtins.int, builtins.int, builtins.int, builtins.int]]" +[builtins fixtures/len.pyi] + +[case testNarrowingLenGreaterThanHomogeneousTupleShort] +# flags: --enable-incomplete-feature=PreciseTupleTypes +from typing import Tuple + +VarTuple = Tuple[int, ...] + +x: VarTuple +if len(x) < 3: + reveal_type(x) # N: Revealed type is "Union[Tuple[()], Tuple[builtins.int], Tuple[builtins.int, builtins.int]]" +else: + reveal_type(x) # N: Revealed type is "Tuple[builtins.int, builtins.int, builtins.int, Unpack[builtins.tuple[builtins.int, ...]]]" +reveal_type(x) # N: Revealed type is "builtins.tuple[builtins.int, ...]" +[builtins fixtures/len.pyi] + +[case testNarrowingLenBiggerThanHomogeneousTupleLong] +# flags: --enable-incomplete-feature=PreciseTupleTypes +from typing import Tuple + +VarTuple = Tuple[int, ...] + +x: VarTuple +if len(x) < 30: + reveal_type(x) # N: Revealed type is "builtins.tuple[builtins.int, ...]" +else: + reveal_type(x) # N: Revealed type is "builtins.tuple[builtins.int, ...]" +[builtins fixtures/len.pyi] + +[case testNarrowingLenBothSidesHomogeneousTuple] +# flags: --enable-incomplete-feature=PreciseTupleTypes +from typing import Tuple + +x: Tuple[int, ...] +if 1 < len(x) < 4: + reveal_type(x) # N: Revealed type is "Union[Tuple[builtins.int, builtins.int], Tuple[builtins.int, builtins.int, builtins.int]]" +else: + reveal_type(x) # N: Revealed type is "Union[Tuple[()], Tuple[builtins.int], Tuple[builtins.int, builtins.int, builtins.int, builtins.int, Unpack[builtins.tuple[builtins.int, ...]]]]" +reveal_type(x) # N: Revealed type is "builtins.tuple[builtins.int, ...]" +[builtins fixtures/len.pyi] + +[case testNarrowingLenUnionTupleUnreachable] +# flags: --warn-unreachable +from typing import Tuple, Union + +x: Union[Tuple[int, int], Tuple[int, int, int]] +if len(x) >= 4: + reveal_type(x) # E: Statement is unreachable +else: + reveal_type(x) # N: Revealed type is "Union[Tuple[builtins.int, builtins.int], Tuple[builtins.int, builtins.int, builtins.int]]" + +if len(x) < 2: + reveal_type(x) # E: Statement is unreachable +else: + reveal_type(x) # N: Revealed type is "Union[Tuple[builtins.int, builtins.int], Tuple[builtins.int, builtins.int, builtins.int]]" +[builtins fixtures/len.pyi] + +[case testNarrowingLenMixedTypes] +from typing import Tuple, List, Union + +x: Union[Tuple[int, int], Tuple[int, int, int], List[int]] +a = b = c = 0 +if len(x) == 3: + reveal_type(x) # N: Revealed type is "Union[Tuple[builtins.int, builtins.int, builtins.int], builtins.list[builtins.int]]" + a, b, c = x +else: + reveal_type(x) # N: Revealed type is "Union[Tuple[builtins.int, builtins.int], builtins.list[builtins.int]]" + a, b = x + +if len(x) != 3: + reveal_type(x) # N: Revealed type is "Union[Tuple[builtins.int, builtins.int], builtins.list[builtins.int]]" + a, b = x +else: + reveal_type(x) # N: Revealed type is "Union[Tuple[builtins.int, builtins.int, builtins.int], builtins.list[builtins.int]]" + a, b, c = x +[builtins fixtures/len.pyi] + +[case testNarrowingLenTypeVarTupleEquals] +from typing import Tuple +from typing_extensions import TypeVarTuple, Unpack + +Ts = TypeVarTuple("Ts") +def foo(x: Tuple[int, Unpack[Ts], str]) -> None: + if len(x) == 5: + reveal_type(x) # N: Revealed type is "Tuple[builtins.int, Unpack[Ts`-1], builtins.str]" + else: + reveal_type(x) # N: Revealed type is "Tuple[builtins.int, Unpack[Ts`-1], builtins.str]" + + if len(x) != 5: + reveal_type(x) # N: Revealed type is "Tuple[builtins.int, Unpack[Ts`-1], builtins.str]" + else: + reveal_type(x) # N: Revealed type is "Tuple[builtins.int, Unpack[Ts`-1], builtins.str]" +[builtins fixtures/len.pyi] + +[case testNarrowingLenTypeVarTupleGreaterThan] +from typing import Tuple +from typing_extensions import TypeVarTuple, Unpack + +Ts = TypeVarTuple("Ts") +def foo(x: Tuple[int, Unpack[Ts], str]) -> None: + if len(x) > 5: + reveal_type(x) # N: Revealed type is "Tuple[builtins.int, Unpack[Ts`-1], builtins.str]" + reveal_type(x[5]) # N: Revealed type is "builtins.object" + reveal_type(x[-6]) # N: Revealed type is "builtins.object" + reveal_type(x[-1]) # N: Revealed type is "builtins.str" + else: + reveal_type(x) # N: Revealed type is "Tuple[builtins.int, Unpack[Ts`-1], builtins.str]" + + if len(x) < 5: + reveal_type(x) # N: Revealed type is "Tuple[builtins.int, Unpack[Ts`-1], builtins.str]" + else: + reveal_type(x) # N: Revealed type is "Tuple[builtins.int, Unpack[Ts`-1], builtins.str]" + x[5] # E: Tuple index out of range \ + # N: Variadic tuple can have length 5 + x[-6] # E: Tuple index out of range \ + # N: Variadic tuple can have length 5 + x[2] # E: Tuple index out of range \ + # N: Variadic tuple can have length 2 + x[-3] # E: Tuple index out of range \ + # N: Variadic tuple can have length 2 +[builtins fixtures/len.pyi] + +[case testNarrowingLenTypeVarTupleUnreachable] +# flags: --warn-unreachable +from typing import Tuple +from typing_extensions import TypeVarTuple, Unpack + +Ts = TypeVarTuple("Ts") +def foo(x: Tuple[int, Unpack[Ts], str]) -> None: + if len(x) == 1: + reveal_type(x) # E: Statement is unreachable + else: + reveal_type(x) # N: Revealed type is "Tuple[builtins.int, Unpack[Ts`-1], builtins.str]" + + if len(x) != 1: + reveal_type(x) # N: Revealed type is "Tuple[builtins.int, Unpack[Ts`-1], builtins.str]" + else: + reveal_type(x) # E: Statement is unreachable + +def bar(x: Tuple[int, Unpack[Ts], str]) -> None: + if len(x) >= 2: + reveal_type(x) # N: Revealed type is "Tuple[builtins.int, Unpack[Ts`-1], builtins.str]" + else: + reveal_type(x) # E: Statement is unreachable + + if len(x) < 2: + reveal_type(x) # E: Statement is unreachable + else: + reveal_type(x) # N: Revealed type is "Tuple[builtins.int, Unpack[Ts`-1], builtins.str]" +[builtins fixtures/len.pyi] + +[case testNarrowingLenVariadicTupleEquals] +from typing import Tuple +from typing_extensions import Unpack + +def foo(x: Tuple[int, Unpack[Tuple[float, ...]], str]) -> None: + if len(x) == 4: + reveal_type(x) # N: Revealed type is "Tuple[builtins.int, builtins.float, builtins.float, builtins.str]" + else: + reveal_type(x) # N: Revealed type is "Tuple[builtins.int, Unpack[builtins.tuple[builtins.float, ...]], builtins.str]" + + if len(x) != 4: + reveal_type(x) # N: Revealed type is "Tuple[builtins.int, Unpack[builtins.tuple[builtins.float, ...]], builtins.str]" + else: + reveal_type(x) # N: Revealed type is "Tuple[builtins.int, builtins.float, builtins.float, builtins.str]" +[builtins fixtures/len.pyi] + +[case testNarrowingLenVariadicTupleGreaterThan] +from typing import Tuple +from typing_extensions import Unpack + +def foo(x: Tuple[int, Unpack[Tuple[float, ...]], str]) -> None: + if len(x) > 3: + reveal_type(x) # N: Revealed type is "Tuple[builtins.int, builtins.float, builtins.float, Unpack[builtins.tuple[builtins.float, ...]], builtins.str]" + else: + reveal_type(x) # N: Revealed type is "Union[Tuple[builtins.int, builtins.str], Tuple[builtins.int, builtins.float, builtins.str]]" + reveal_type(x) # N: Revealed type is "Tuple[builtins.int, Unpack[builtins.tuple[builtins.float, ...]], builtins.str]" + + if len(x) < 3: + reveal_type(x) # N: Revealed type is "Tuple[builtins.int, builtins.str]" + else: + reveal_type(x) # N: Revealed type is "Tuple[builtins.int, builtins.float, Unpack[builtins.tuple[builtins.float, ...]], builtins.str]" + reveal_type(x) # N: Revealed type is "Tuple[builtins.int, Unpack[builtins.tuple[builtins.float, ...]], builtins.str]" +[builtins fixtures/len.pyi] + +[case testNarrowingLenVariadicTupleUnreachable] +# flags: --warn-unreachable +from typing import Tuple +from typing_extensions import Unpack + +def foo(x: Tuple[int, Unpack[Tuple[float, ...]], str]) -> None: + if len(x) == 1: + reveal_type(x) # E: Statement is unreachable + else: + reveal_type(x) # N: Revealed type is "Tuple[builtins.int, Unpack[builtins.tuple[builtins.float, ...]], builtins.str]" + + if len(x) != 1: + reveal_type(x) # N: Revealed type is "Tuple[builtins.int, Unpack[builtins.tuple[builtins.float, ...]], builtins.str]" + else: + reveal_type(x) # E: Statement is unreachable + +def bar(x: Tuple[int, Unpack[Tuple[float, ...]], str]) -> None: + if len(x) >= 2: + reveal_type(x) # N: Revealed type is "Tuple[builtins.int, Unpack[builtins.tuple[builtins.float, ...]], builtins.str]" + else: + reveal_type(x) # E: Statement is unreachable + + if len(x) < 2: + reveal_type(x) # E: Statement is unreachable + else: + reveal_type(x) # N: Revealed type is "Tuple[builtins.int, Unpack[builtins.tuple[builtins.float, ...]], builtins.str]" +[builtins fixtures/len.pyi] + +[case testNarrowingLenBareExpressionPrecise] +# flags: --enable-incomplete-feature=PreciseTupleTypes +from typing import Tuple + +x: Tuple[int, ...] +assert x +reveal_type(x) # N: Revealed type is "Tuple[builtins.int, Unpack[builtins.tuple[builtins.int, ...]]]" +[builtins fixtures/len.pyi] + +[case testNarrowingLenBareExpressionTypeVarTuple] +from typing import Tuple +from typing_extensions import TypeVarTuple, Unpack + +Ts = TypeVarTuple("Ts") +def test(*xs: Unpack[Ts]) -> None: + assert xs + xs[0] # OK +[builtins fixtures/len.pyi] + +[case testNarrowingLenBareExpressionWithNonePrecise] +# flags: --enable-incomplete-feature=PreciseTupleTypes +from typing import Tuple, Optional + +x: Optional[Tuple[int, ...]] +if x: + reveal_type(x) # N: Revealed type is "Tuple[builtins.int, Unpack[builtins.tuple[builtins.int, ...]]]" +else: + reveal_type(x) # N: Revealed type is "Union[Tuple[()], None]" +[builtins fixtures/len.pyi] + +[case testNarrowingLenBareExpressionWithNoneImprecise] +from typing import Tuple, Optional + +x: Optional[Tuple[int, ...]] +if x: + reveal_type(x) # N: Revealed type is "builtins.tuple[builtins.int, ...]" +else: + reveal_type(x) # N: Revealed type is "Union[builtins.tuple[builtins.int, ...], None]" +[builtins fixtures/len.pyi] + +[case testNarrowingLenMixWithAnyPrecise] +# flags: --enable-incomplete-feature=PreciseTupleTypes +from typing import Any + +x: Any +if isinstance(x, (list, tuple)) and len(x) == 0: + reveal_type(x) # N: Revealed type is "Union[Tuple[()], builtins.list[Any]]" +else: + reveal_type(x) # N: Revealed type is "Any" +reveal_type(x) # N: Revealed type is "Any" + +x1: Any +if isinstance(x1, (list, tuple)) and len(x1) > 1: + reveal_type(x1) # N: Revealed type is "Union[Tuple[Any, Any, Unpack[builtins.tuple[Any, ...]]], builtins.list[Any]]" +else: + reveal_type(x1) # N: Revealed type is "Any" +reveal_type(x1) # N: Revealed type is "Any" +[builtins fixtures/len.pyi] + +[case testNarrowingLenMixWithAnyImprecise] +from typing import Any + +x: Any +if isinstance(x, (list, tuple)) and len(x) == 0: + reveal_type(x) # N: Revealed type is "Union[Tuple[()], builtins.list[Any]]" +else: + reveal_type(x) # N: Revealed type is "Any" +reveal_type(x) # N: Revealed type is "Any" + +x1: Any +if isinstance(x1, (list, tuple)) and len(x1) > 1: + reveal_type(x1) # N: Revealed type is "Union[builtins.tuple[Any, ...], builtins.list[Any]]" +else: + reveal_type(x1) # N: Revealed type is "Any" +reveal_type(x1) # N: Revealed type is "Any" +[builtins fixtures/len.pyi] + +[case testNarrowingLenExplicitLiteralTypes] +from typing import Tuple, Union +from typing_extensions import Literal + +VarTuple = Union[ + Tuple[int], + Tuple[int, int], + Tuple[int, int, int], +] +x: VarTuple + +supported: Literal[2] +if len(x) == supported: + reveal_type(x) # N: Revealed type is "Tuple[builtins.int, builtins.int]" +else: + reveal_type(x) # N: Revealed type is "Union[Tuple[builtins.int], Tuple[builtins.int, builtins.int, builtins.int]]" + +not_supported_yet: Literal[2, 3] +if len(x) == not_supported_yet: + reveal_type(x) # N: Revealed type is "Union[Tuple[builtins.int], Tuple[builtins.int, builtins.int], Tuple[builtins.int, builtins.int, builtins.int]]" +else: + reveal_type(x) # N: Revealed type is "Union[Tuple[builtins.int], Tuple[builtins.int, builtins.int], Tuple[builtins.int, builtins.int, builtins.int]]" +[builtins fixtures/len.pyi] + +[case testNarrowingLenUnionOfVariadicTuples] +from typing import Tuple, Union + +x: Union[Tuple[int, ...], Tuple[str, ...]] +if len(x) == 2: + reveal_type(x) # N: Revealed type is "Union[Tuple[builtins.int, builtins.int], Tuple[builtins.str, builtins.str]]" +else: + reveal_type(x) # N: Revealed type is "Union[builtins.tuple[builtins.int, ...], builtins.tuple[builtins.str, ...]]" +[builtins fixtures/len.pyi] + +[case testNarrowingLenUnionOfNamedTuples] +from typing import NamedTuple, Union + +class Point2D(NamedTuple): + x: int + y: int +class Point3D(NamedTuple): + x: int + y: int + z: int + +x: Union[Point2D, Point3D] +if len(x) == 2: + reveal_type(x) # N: Revealed type is "Tuple[builtins.int, builtins.int, fallback=__main__.Point2D]" +else: + reveal_type(x) # N: Revealed type is "Tuple[builtins.int, builtins.int, builtins.int, fallback=__main__.Point3D]" +[builtins fixtures/len.pyi] + +[case testNarrowingLenTupleSubclass] +from typing import Tuple + +class Ints(Tuple[int, ...]): + size: int + +x: Ints +if len(x) == 2: + reveal_type(x) # N: Revealed type is "Tuple[builtins.int, builtins.int, fallback=__main__.Ints]" + reveal_type(x.size) # N: Revealed type is "builtins.int" +else: + reveal_type(x) # N: Revealed type is "__main__.Ints" + reveal_type(x.size) # N: Revealed type is "builtins.int" + +reveal_type(x) # N: Revealed type is "__main__.Ints" +[builtins fixtures/len.pyi] + +[case testNarrowingLenTupleSubclassCustomNotAllowed] +from typing import Tuple + +class Ints(Tuple[int, ...]): + def __len__(self) -> int: + return 0 + +x: Ints +if len(x) > 2: + reveal_type(x) # N: Revealed type is "__main__.Ints" +else: + reveal_type(x) # N: Revealed type is "__main__.Ints" +[builtins fixtures/len.pyi] + +[case testNarrowingLenTupleSubclassPreciseNotAllowed] +# flags: --enable-incomplete-feature=PreciseTupleTypes +from typing import Tuple + +class Ints(Tuple[int, ...]): + size: int + +x: Ints +if len(x) > 2: + reveal_type(x) # N: Revealed type is "__main__.Ints" +else: + reveal_type(x) # N: Revealed type is "__main__.Ints" +[builtins fixtures/len.pyi] + +[case testNarrowingLenUnknownLen] +from typing import Any, Tuple, Union + +x: Union[Tuple[int, int], Tuple[int, int, int]] + +n: int +if len(x) == n: + reveal_type(x) # N: Revealed type is "Union[Tuple[builtins.int, builtins.int], Tuple[builtins.int, builtins.int, builtins.int]]" +else: + reveal_type(x) # N: Revealed type is "Union[Tuple[builtins.int, builtins.int], Tuple[builtins.int, builtins.int, builtins.int]]" + +a: Any +if len(x) == a: + reveal_type(x) # N: Revealed type is "Union[Tuple[builtins.int, builtins.int], Tuple[builtins.int, builtins.int, builtins.int]]" +else: + reveal_type(x) # N: Revealed type is "Union[Tuple[builtins.int, builtins.int], Tuple[builtins.int, builtins.int, builtins.int]]" +[builtins fixtures/len.pyi] diff --git a/test-data/unit/fixtures/len.pyi b/test-data/unit/fixtures/len.pyi new file mode 100644 index 000000000000..c72596661858 --- /dev/null +++ b/test-data/unit/fixtures/len.pyi @@ -0,0 +1,39 @@ +from typing import Tuple, TypeVar, Generic, Union, Type, Sequence, Mapping +from typing_extensions import Protocol + +T = TypeVar("T") +V = TypeVar("V") + +class object: + def __init__(self) -> None: pass + +class type: + def __init__(self, x) -> None: pass + +class tuple(Generic[T]): + def __len__(self) -> int: pass + +class list(Sequence[T]): pass +class dict(Mapping[T, V]): pass + +class function: pass + +class Sized(Protocol): + def __len__(self) -> int: pass + +def len(__obj: Sized) -> int: ... +def isinstance(x: object, t: Union[Type[object], Tuple[Type[object], ...]]) -> bool: pass + +class int: + def __add__(self, other: int) -> int: pass + def __eq__(self, other: int) -> bool: pass + def __ne__(self, other: int) -> bool: pass + def __lt__(self, n: int) -> bool: pass + def __gt__(self, n: int) -> bool: pass + def __le__(self, n: int) -> bool: pass + def __ge__(self, n: int) -> bool: pass + def __neg__(self) -> int: pass +class float: pass +class bool(int): pass +class str(Sequence[str]): pass +class ellipsis: pass diff --git a/test-data/unit/lib-stub/typing.pyi b/test-data/unit/lib-stub/typing.pyi index b35b64a383c9..5f458ca687c0 100644 --- a/test-data/unit/lib-stub/typing.pyi +++ b/test-data/unit/lib-stub/typing.pyi @@ -48,6 +48,7 @@ class Generator(Iterator[T], Generic[T, U, V]): class Sequence(Iterable[T_co]): def __getitem__(self, n: Any) -> T_co: pass + def __len__(self) -> int: pass # Mapping type is oversimplified intentionally. class Mapping(Iterable[T], Generic[T, T_co]): From 27c4b462aa4cf269397253eca7a88e7fbbf4e43e Mon Sep 17 00:00:00 2001 From: Ali Hamdan Date: Sat, 21 Oct 2023 22:25:20 +0200 Subject: [PATCH 111/144] stubgen: fix missing property setter in semantic analysis mode (#16303) The semantic analyzer treats properties as overloaded functions. This was previously ignored by stubgen but regressed in #15232. This PR restores the original behavior. Fixes #16300 --- mypy/stubgen.py | 1 + mypy/stubutil.py | 2 -- test-data/unit/stubgen.test | 18 ++++++++++++++++++ 3 files changed, 19 insertions(+), 2 deletions(-) diff --git a/mypy/stubgen.py b/mypy/stubgen.py index 395a49fa4e08..a2f07a35eaa2 100755 --- a/mypy/stubgen.py +++ b/mypy/stubgen.py @@ -633,6 +633,7 @@ def process_decorator(self, o: Decorator) -> None: Only preserve certain special decorators such as @abstractmethod. """ + o.func.is_overload = False for decorator in o.original_decorators: if not isinstance(decorator, (NameExpr, MemberExpr)): continue diff --git a/mypy/stubutil.py b/mypy/stubutil.py index 22e525c14e7c..cc3b63098fd2 100644 --- a/mypy/stubutil.py +++ b/mypy/stubutil.py @@ -669,8 +669,6 @@ def set_defined_names(self, defined_names: set[str]) -> None: self.add_name(f"{pkg}.{t}", require=False) def check_undefined_names(self) -> None: - print(self._all_) - print(self._toplevel_names) undefined_names = [name for name in self._all_ or [] if name not in self._toplevel_names] if undefined_names: if self._output: diff --git a/test-data/unit/stubgen.test b/test-data/unit/stubgen.test index d83d74306230..64a1353b29b3 100644 --- a/test-data/unit/stubgen.test +++ b/test-data/unit/stubgen.test @@ -377,6 +377,24 @@ class A: def f(self, x) -> None: ... def h(self) -> None: ... +[case testProperty_semanal] +class A: + @property + def f(self): + return 1 + @f.setter + def f(self, x): ... + + def h(self): + self.f = 1 +[out] +class A: + @property + def f(self): ... + @f.setter + def f(self, x) -> None: ... + def h(self) -> None: ... + -- a read/write property is treated the same as an attribute [case testProperty_inspect] class A: From 2d54024cb44556302b40fed6e0bd40fd9ef56563 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Sat, 21 Oct 2023 17:06:42 -0700 Subject: [PATCH 112/144] [mypyc] Don't crash on unreachable statements (#16311) Skip them instead. This applies to statements after break, continue, return and raise statements. It's common to have unreachable statements temporarily while working on a half-finished change, so generating an error is perhaps not the best option. Fixes mypyc/mypyc#1028. --- mypyc/irbuild/builder.py | 11 ++ mypyc/irbuild/statement.py | 5 + mypyc/irbuild/visitor.py | 4 + mypyc/test-data/irbuild-unreachable.test | 137 ++++++++++++++++++++++- 4 files changed, 156 insertions(+), 1 deletion(-) diff --git a/mypyc/irbuild/builder.py b/mypyc/irbuild/builder.py index 0757415f6753..573ca334a5d1 100644 --- a/mypyc/irbuild/builder.py +++ b/mypyc/irbuild/builder.py @@ -165,6 +165,9 @@ def __init__( self.runtime_args: list[list[RuntimeArg]] = [[]] self.function_name_stack: list[str] = [] self.class_ir_stack: list[ClassIR] = [] + # Keep track of whether the next statement in a block is reachable + # or not, separately for each block nesting level + self.block_reachable_stack: list[bool] = [True] self.current_module = current_module self.mapper = mapper @@ -1302,6 +1305,14 @@ def is_native_attr_ref(self, expr: MemberExpr) -> bool: and not obj_rtype.class_ir.get_method(expr.name) ) + def mark_block_unreachable(self) -> None: + """Mark statements in the innermost block being processed as unreachable. + + This should be called after a statement that unconditionally leaves the + block, such as 'break' or 'return'. + """ + self.block_reachable_stack[-1] = False + # Lacks a good type because there wasn't a reasonable type in 3.5 :( def catch_errors(self, line: int) -> Any: return catch_errors(self.module_path, line) diff --git a/mypyc/irbuild/statement.py b/mypyc/irbuild/statement.py index d7e01456139d..2c17eb2bb14d 100644 --- a/mypyc/irbuild/statement.py +++ b/mypyc/irbuild/statement.py @@ -118,8 +118,13 @@ def transform_block(builder: IRBuilder, block: Block) -> None: if not block.is_unreachable: + builder.block_reachable_stack.append(True) for stmt in block.body: builder.accept(stmt) + if not builder.block_reachable_stack[-1]: + # The rest of the block is unreachable, so skip it + break + builder.block_reachable_stack.pop() # Raise a RuntimeError if we hit a non-empty unreachable block. # Don't complain about empty unreachable blocks, since mypy inserts # those after `if MYPY`. diff --git a/mypyc/irbuild/visitor.py b/mypyc/irbuild/visitor.py index d8725ee04dc5..12e186fd40d8 100644 --- a/mypyc/irbuild/visitor.py +++ b/mypyc/irbuild/visitor.py @@ -194,6 +194,7 @@ def visit_expression_stmt(self, stmt: ExpressionStmt) -> None: def visit_return_stmt(self, stmt: ReturnStmt) -> None: transform_return_stmt(self.builder, stmt) + self.builder.mark_block_unreachable() def visit_assignment_stmt(self, stmt: AssignmentStmt) -> None: transform_assignment_stmt(self.builder, stmt) @@ -212,12 +213,15 @@ def visit_for_stmt(self, stmt: ForStmt) -> None: def visit_break_stmt(self, stmt: BreakStmt) -> None: transform_break_stmt(self.builder, stmt) + self.builder.mark_block_unreachable() def visit_continue_stmt(self, stmt: ContinueStmt) -> None: transform_continue_stmt(self.builder, stmt) + self.builder.mark_block_unreachable() def visit_raise_stmt(self, stmt: RaiseStmt) -> None: transform_raise_stmt(self.builder, stmt) + self.builder.mark_block_unreachable() def visit_try_stmt(self, stmt: TryStmt) -> None: transform_try_stmt(self.builder, stmt) diff --git a/mypyc/test-data/irbuild-unreachable.test b/mypyc/test-data/irbuild-unreachable.test index 1c024a249bf1..b5188c91ac58 100644 --- a/mypyc/test-data/irbuild-unreachable.test +++ b/mypyc/test-data/irbuild-unreachable.test @@ -1,4 +1,4 @@ -# Test cases for unreachable expressions +# Test cases for unreachable expressions and statements [case testUnreachableMemberExpr] import sys @@ -104,3 +104,138 @@ L5: L6: y = r11 return 1 + +[case testUnreachableStatementAfterReturn] +def f(x: bool) -> int: + if x: + return 1 + f(False) + return 2 +[out] +def f(x): + x :: bool +L0: + if x goto L1 else goto L2 :: bool +L1: + return 2 +L2: + return 4 + +[case testUnreachableStatementAfterContinue] +def c() -> bool: + return False + +def f() -> None: + n = True + while n: + if c(): + continue + if int(): + f() + n = False +[out] +def c(): +L0: + return 0 +def f(): + n, r0 :: bool +L0: + n = 1 +L1: + if n goto L2 else goto L5 :: bool +L2: + r0 = c() + if r0 goto L3 else goto L4 :: bool +L3: + goto L1 +L4: + n = 0 + goto L1 +L5: + return 1 + +[case testUnreachableStatementAfterBreak] +def c() -> bool: + return False + +def f() -> None: + n = True + while n: + if c(): + break + if int(): + f() + n = False +[out] +def c(): +L0: + return 0 +def f(): + n, r0 :: bool +L0: + n = 1 +L1: + if n goto L2 else goto L5 :: bool +L2: + r0 = c() + if r0 goto L3 else goto L4 :: bool +L3: + goto L5 +L4: + n = 0 + goto L1 +L5: + return 1 + +[case testUnreachableStatementAfterRaise] +def f(x: bool) -> int: + if x: + raise ValueError() + print('hello') + return 2 +[out] +def f(x): + x :: bool + r0 :: object + r1 :: str + r2, r3 :: object +L0: + if x goto L1 else goto L2 :: bool +L1: + r0 = builtins :: module + r1 = 'ValueError' + r2 = CPyObject_GetAttr(r0, r1) + r3 = PyObject_CallFunctionObjArgs(r2, 0) + CPy_Raise(r3) + unreachable +L2: + return 4 + +[case testUnreachableStatementAfterAssertFalse] +def f(x: bool) -> int: + if x: + assert False + print('hello') + return 2 +[out] +def f(x): + x, r0 :: bool + r1 :: str + r2 :: object + r3 :: str + r4, r5 :: object +L0: + if x goto L1 else goto L4 :: bool +L1: + if 0 goto L3 else goto L2 :: bool +L2: + r0 = raise AssertionError + unreachable +L3: + r1 = 'hello' + r2 = builtins :: module + r3 = 'print' + r4 = CPyObject_GetAttr(r2, r3) + r5 = PyObject_CallFunctionObjArgs(r4, r1, 0) +L4: + return 4 From 341929b10df327796ef60da4837b907d6af1b7d9 Mon Sep 17 00:00:00 2001 From: Ihor <31508183+nautics889@users.noreply.github.com> Date: Mon, 23 Oct 2023 08:16:58 +0300 Subject: [PATCH 113/144] refactor: `__str__` in `CFG` class (#16307) (#16308) Closes https://github.com/python/mypy/issues/16307. --- mypyc/analysis/dataflow.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/mypyc/analysis/dataflow.py b/mypyc/analysis/dataflow.py index cade0c823962..57ad2b17fcc5 100644 --- a/mypyc/analysis/dataflow.py +++ b/mypyc/analysis/dataflow.py @@ -72,11 +72,8 @@ def __init__( self.exits = exits def __str__(self) -> str: - lines = [] - lines.append("exits: %s" % sorted(self.exits, key=lambda e: int(e.label))) - lines.append("succ: %s" % self.succ) - lines.append("pred: %s" % self.pred) - return "\n".join(lines) + exits = sorted(self.exits, key=lambda e: int(e.label)) + return f"exits: {exits}\nsucc: {self.succ}\npred: {self.pred}" def get_cfg(blocks: list[BasicBlock]) -> CFG: From cda163d378d6f85627b72454918cba323bf37749 Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Mon, 23 Oct 2023 06:48:37 +0100 Subject: [PATCH 114/144] Clarify variance convention for Parameters (#16302) Fixes https://github.com/python/mypy/issues/16296 In my big refactoring I flipped the variance convention for the `Parameters` type, but I did it inconsistently in one place. After working some more with ParamSpecs, it now seems to me the original convention is easier to remember. I also now explicitly put it in the type docstring. --- mypy/constraints.py | 9 ++---- mypy/join.py | 13 ++++++--- mypy/meet.py | 6 ++-- mypy/subtypes.py | 2 -- mypy/types.py | 5 +++- .../unit/check-parameter-specification.test | 29 ++++++++++++++++++- 6 files changed, 47 insertions(+), 17 deletions(-) diff --git a/mypy/constraints.py b/mypy/constraints.py index 58d0f4dbed29..7d782551b261 100644 --- a/mypy/constraints.py +++ b/mypy/constraints.py @@ -692,11 +692,8 @@ def visit_parameters(self, template: Parameters) -> list[Constraint]: return self.infer_against_any(template.arg_types, self.actual) if type_state.infer_polymorphic and isinstance(self.actual, Parameters): # For polymorphic inference we need to be able to infer secondary constraints - # in situations like [x: T] <: P <: [x: int]. Note we invert direction, since - # this function expects direction between callables. - return infer_callable_arguments_constraints( - template, self.actual, neg_op(self.direction) - ) + # in situations like [x: T] <: P <: [x: int]. + return infer_callable_arguments_constraints(template, self.actual, self.direction) raise RuntimeError("Parameters cannot be constrained to") # Non-leaf types @@ -1128,7 +1125,7 @@ def visit_callable_type(self, template: CallableType) -> list[Constraint]: ) ) if param_spec_target is not None: - res.append(Constraint(param_spec, neg_op(self.direction), param_spec_target)) + res.append(Constraint(param_spec, self.direction, param_spec_target)) if extra_tvars: for c in res: c.extra_tvars += cactual.variables diff --git a/mypy/join.py b/mypy/join.py index e4429425d98a..2e2939f9fbc8 100644 --- a/mypy/join.py +++ b/mypy/join.py @@ -350,10 +350,13 @@ def visit_parameters(self, t: Parameters) -> ProperType: if isinstance(self.s, Parameters): if len(t.arg_types) != len(self.s.arg_types): return self.default(self.s) + from mypy.meet import meet_types + return t.copy_modified( - # Note that since during constraint inference we already treat whole ParamSpec as - # contravariant, we should join individual items, not meet them like for Callables - arg_types=[join_types(s_a, t_a) for s_a, t_a in zip(self.s.arg_types, t.arg_types)] + arg_types=[ + meet_types(s_a, t_a) for s_a, t_a in zip(self.s.arg_types, t.arg_types) + ], + arg_names=combine_arg_names(self.s, t), ) else: return self.default(self.s) @@ -754,7 +757,9 @@ def combine_similar_callables(t: CallableType, s: CallableType) -> CallableType: ) -def combine_arg_names(t: CallableType, s: CallableType) -> list[str | None]: +def combine_arg_names( + t: CallableType | Parameters, s: CallableType | Parameters +) -> list[str | None]: """Produces a list of argument names compatible with both callables. For example, suppose 't' and 's' have the following signatures: diff --git a/mypy/meet.py b/mypy/meet.py index e3645c7b5879..1a566aed17de 100644 --- a/mypy/meet.py +++ b/mypy/meet.py @@ -708,10 +708,10 @@ def visit_parameters(self, t: Parameters) -> ProperType: if isinstance(self.s, Parameters): if len(t.arg_types) != len(self.s.arg_types): return self.default(self.s) + from mypy.join import join_types + return t.copy_modified( - # Note that since during constraint inference we already treat whole ParamSpec as - # contravariant, we should meet individual items, not join them like for Callables - arg_types=[meet_types(s_a, t_a) for s_a, t_a in zip(self.s.arg_types, t.arg_types)] + arg_types=[join_types(s_a, t_a) for s_a, t_a in zip(self.s.arg_types, t.arg_types)] ) else: return self.default(self.s) diff --git a/mypy/subtypes.py b/mypy/subtypes.py index b79e0e628849..2ca3357dd722 100644 --- a/mypy/subtypes.py +++ b/mypy/subtypes.py @@ -654,8 +654,6 @@ def visit_unpack_type(self, left: UnpackType) -> bool: def visit_parameters(self, left: Parameters) -> bool: if isinstance(self.right, Parameters): - # TODO: direction here should be opposite, this function expects - # order of callables, while parameters are contravariant. return are_parameters_compatible( left, self.right, diff --git a/mypy/types.py b/mypy/types.py index d08e9e7a890c..ae1a1f595fa2 100644 --- a/mypy/types.py +++ b/mypy/types.py @@ -1562,7 +1562,10 @@ class FormalArgument(NamedTuple): class Parameters(ProperType): """Type that represents the parameters to a function. - Used for ParamSpec analysis.""" + Used for ParamSpec analysis. Note that by convention we handle this + type as a Callable without return type, not as a "tuple with names", + so that it behaves contravariantly, in particular [x: int] <: [int]. + """ __slots__ = ( "arg_types", diff --git a/test-data/unit/check-parameter-specification.test b/test-data/unit/check-parameter-specification.test index 48fadbc96c90..db8c76fd21e9 100644 --- a/test-data/unit/check-parameter-specification.test +++ b/test-data/unit/check-parameter-specification.test @@ -1403,7 +1403,7 @@ def wrong_name_constructor(b: bool) -> SomeClass: func(SomeClass, constructor) reveal_type(func(SomeClass, wrong_constructor)) # N: Revealed type is "def (a: Never) -> __main__.SomeClass" reveal_type(func_regular(SomeClass, wrong_constructor)) # N: Revealed type is "def (Never) -> __main__.SomeClass" -func(SomeClass, wrong_name_constructor) # E: Argument 1 to "func" has incompatible type "Type[SomeClass]"; expected "Callable[[Never], SomeClass]" +reveal_type(func(SomeClass, wrong_name_constructor)) # N: Revealed type is "def (Never) -> __main__.SomeClass" [builtins fixtures/paramspec.pyi] [case testParamSpecInTypeAliasBasic] @@ -2059,3 +2059,30 @@ def test2(x: int, y: int) -> str: ... reveal_type(call(test1, 1)) # N: Revealed type is "builtins.str" reveal_type(call(test2, 1, 2)) # N: Revealed type is "builtins.str" [builtins fixtures/paramspec.pyi] + +[case testParamSpecCorrectParameterNameInference] +from typing import Callable, Protocol +from typing_extensions import ParamSpec, Concatenate + +def a(i: int) -> None: ... +def b(__i: int) -> None: ... + +class WithName(Protocol): + def __call__(self, i: int) -> None: ... +NoName = Callable[[int], None] + +def f1(__fn: WithName, i: int) -> None: ... +def f2(__fn: NoName, i: int) -> None: ... + +P = ParamSpec("P") +def d(f: Callable[P, None], fn: Callable[Concatenate[Callable[P, None], P], None]) -> Callable[P, None]: + def inner(*args: P.args, **kwargs: P.kwargs) -> None: + fn(f, *args, **kwargs) + return inner + +reveal_type(d(a, f1)) # N: Revealed type is "def (i: builtins.int)" +reveal_type(d(a, f2)) # N: Revealed type is "def (i: builtins.int)" +reveal_type(d(b, f1)) # E: Cannot infer type argument 1 of "d" \ + # N: Revealed type is "def (*Any, **Any)" +reveal_type(d(b, f2)) # N: Revealed type is "def (builtins.int)" +[builtins fixtures/paramspec.pyi] From 8236c93d899fa5225eb23644db802cf1e09196a7 Mon Sep 17 00:00:00 2001 From: Nikita Sobolev Date: Mon, 23 Oct 2023 15:52:42 +0300 Subject: [PATCH 115/144] Add `|=` and `|` operators support for `TypedDict` (#16249) Please, note that there are several problems with `__ror__` definitions. 1. `dict.__ror__` does not define support for `Mapping?` types. For example: ```python >>> import types >>> {'a': 1} | types.MappingProxyType({'b': 2}) {'a': 1, 'b': 2} >>> ``` 2. `TypedDict.__ror__` also does not define this support So, I would like to defer this feature for the future, we need some discussion to happen. However, this PR does fully solve the problem OP had. Closes https://github.com/python/mypy/issues/16244 --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- mypy/checker.py | 19 ++- mypy/checkexpr.py | 55 ++++++- mypy/plugins/default.py | 22 ++- test-data/unit/check-typeddict.test | 143 ++++++++++++++++++ test-data/unit/fixtures/dict.pyi | 19 ++- test-data/unit/fixtures/typing-async.pyi | 1 + test-data/unit/fixtures/typing-full.pyi | 1 + test-data/unit/fixtures/typing-medium.pyi | 1 + .../unit/fixtures/typing-typeddict-iror.pyi | 66 ++++++++ 9 files changed, 316 insertions(+), 11 deletions(-) create mode 100644 test-data/unit/fixtures/typing-typeddict-iror.pyi diff --git a/mypy/checker.py b/mypy/checker.py index 02bab37aa13f..64bbbfa0a55b 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -7783,14 +7783,25 @@ def infer_operator_assignment_method(typ: Type, operator: str) -> tuple[bool, st """ typ = get_proper_type(typ) method = operators.op_methods[operator] + existing_method = None if isinstance(typ, Instance): - if operator in operators.ops_with_inplace_method: - inplace_method = "__i" + method[2:] - if typ.type.has_readable_member(inplace_method): - return True, inplace_method + existing_method = _find_inplace_method(typ, method, operator) + elif isinstance(typ, TypedDictType): + existing_method = _find_inplace_method(typ.fallback, method, operator) + + if existing_method is not None: + return True, existing_method return False, method +def _find_inplace_method(inst: Instance, method: str, operator: str) -> str | None: + if operator in operators.ops_with_inplace_method: + inplace_method = "__i" + method[2:] + if inst.type.has_readable_member(inplace_method): + return inplace_method + return None + + def is_valid_inferred_type(typ: Type, is_lvalue_final: bool = False) -> bool: """Is an inferred type valid and needs no further refinement? diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index 2dc5a93a1de9..18c1c570ba91 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -2,12 +2,13 @@ from __future__ import annotations +import enum import itertools import time from collections import defaultdict from contextlib import contextmanager from typing import Callable, ClassVar, Final, Iterable, Iterator, List, Optional, Sequence, cast -from typing_extensions import TypeAlias as _TypeAlias, overload +from typing_extensions import TypeAlias as _TypeAlias, assert_never, overload import mypy.checker import mypy.errorcodes as codes @@ -277,6 +278,20 @@ class Finished(Exception): """Raised if we can terminate overload argument check early (no match).""" +@enum.unique +class UseReverse(enum.Enum): + """Used in `visit_op_expr` to enable or disable reverse method checks.""" + + DEFAULT = 0 + ALWAYS = 1 + NEVER = 2 + + +USE_REVERSE_DEFAULT: Final = UseReverse.DEFAULT +USE_REVERSE_ALWAYS: Final = UseReverse.ALWAYS +USE_REVERSE_NEVER: Final = UseReverse.NEVER + + class ExpressionChecker(ExpressionVisitor[Type]): """Expression type checker. @@ -3371,6 +3386,24 @@ def visit_op_expr(self, e: OpExpr) -> Type: return proper_left_type.copy_modified( items=proper_left_type.items + [UnpackType(mapped)] ) + + use_reverse: UseReverse = USE_REVERSE_DEFAULT + if e.op == "|": + if is_named_instance(proper_left_type, "builtins.dict"): + # This is a special case for `dict | TypedDict`. + # 1. Find `dict | TypedDict` case + # 2. Switch `dict.__or__` to `TypedDict.__ror__` (the same from both runtime and typing perspective) + proper_right_type = get_proper_type(self.accept(e.right)) + if isinstance(proper_right_type, TypedDictType): + use_reverse = USE_REVERSE_ALWAYS + if isinstance(proper_left_type, TypedDictType): + # This is the reverse case: `TypedDict | dict`, + # simply do not allow the reverse checking: + # do not call `__dict__.__ror__`. + proper_right_type = get_proper_type(self.accept(e.right)) + if is_named_instance(proper_right_type, "builtins.dict"): + use_reverse = USE_REVERSE_NEVER + if TYPE_VAR_TUPLE in self.chk.options.enable_incomplete_feature: # Handle tuple[X, ...] + tuple[Y, Z] = tuple[*tuple[X, ...], Y, Z]. if ( @@ -3390,7 +3423,25 @@ def visit_op_expr(self, e: OpExpr) -> Type: if e.op in operators.op_methods: method = operators.op_methods[e.op] - result, method_type = self.check_op(method, left_type, e.right, e, allow_reverse=True) + if use_reverse is UseReverse.DEFAULT or use_reverse is UseReverse.NEVER: + result, method_type = self.check_op( + method, + base_type=left_type, + arg=e.right, + context=e, + allow_reverse=use_reverse is UseReverse.DEFAULT, + ) + elif use_reverse is UseReverse.ALWAYS: + result, method_type = self.check_op( + # The reverse operator here gives better error messages: + operators.reverse_op_methods[method], + base_type=self.accept(e.right), + arg=e.left, + context=e, + allow_reverse=False, + ) + else: + assert_never(use_reverse) e.method_type = method_type return result else: diff --git a/mypy/plugins/default.py b/mypy/plugins/default.py index b60fc3873c04..ddcc37f465fe 100644 --- a/mypy/plugins/default.py +++ b/mypy/plugins/default.py @@ -74,12 +74,21 @@ def get_method_signature_hook( return typed_dict_setdefault_signature_callback elif fullname in {n + ".pop" for n in TPDICT_FB_NAMES}: return typed_dict_pop_signature_callback - elif fullname in {n + ".update" for n in TPDICT_FB_NAMES}: - return typed_dict_update_signature_callback elif fullname == "_ctypes.Array.__setitem__": return ctypes.array_setitem_callback elif fullname == singledispatch.SINGLEDISPATCH_CALLABLE_CALL_METHOD: return singledispatch.call_singledispatch_function_callback + + typed_dict_updates = set() + for n in TPDICT_FB_NAMES: + typed_dict_updates.add(n + ".update") + typed_dict_updates.add(n + ".__or__") + typed_dict_updates.add(n + ".__ror__") + typed_dict_updates.add(n + ".__ior__") + + if fullname in typed_dict_updates: + return typed_dict_update_signature_callback + return None def get_method_hook(self, fullname: str) -> Callable[[MethodContext], Type] | None: @@ -401,11 +410,16 @@ def typed_dict_delitem_callback(ctx: MethodContext) -> Type: def typed_dict_update_signature_callback(ctx: MethodSigContext) -> CallableType: - """Try to infer a better signature type for TypedDict.update.""" + """Try to infer a better signature type for methods that update `TypedDict`. + + This includes: `TypedDict.update`, `TypedDict.__or__`, `TypedDict.__ror__`, + and `TypedDict.__ior__`. + """ signature = ctx.default_signature if isinstance(ctx.type, TypedDictType) and len(signature.arg_types) == 1: arg_type = get_proper_type(signature.arg_types[0]) - assert isinstance(arg_type, TypedDictType) + if not isinstance(arg_type, TypedDictType): + return signature arg_type = arg_type.as_anonymous() arg_type = arg_type.copy_modified(required_keys=set()) if ctx.args and ctx.args[0]: diff --git a/test-data/unit/check-typeddict.test b/test-data/unit/check-typeddict.test index 7ee9ef0b708b..0e1d800e0468 100644 --- a/test-data/unit/check-typeddict.test +++ b/test-data/unit/check-typeddict.test @@ -3236,3 +3236,146 @@ def foo(x: int) -> Foo: ... f: Foo = {**foo("no")} # E: Argument 1 to "foo" has incompatible type "str"; expected "int" [builtins fixtures/dict.pyi] [typing fixtures/typing-typeddict.pyi] + + +[case testTypedDictWith__or__method] +from typing import Dict +from mypy_extensions import TypedDict + +class Foo(TypedDict): + key: int + +foo1: Foo = {'key': 1} +foo2: Foo = {'key': 2} + +reveal_type(foo1 | foo2) # N: Revealed type is "TypedDict('__main__.Foo', {'key': builtins.int})" +reveal_type(foo1 | {'key': 1}) # N: Revealed type is "TypedDict('__main__.Foo', {'key': builtins.int})" +reveal_type(foo1 | {'key': 'a'}) # N: Revealed type is "builtins.dict[builtins.str, builtins.object]" +reveal_type(foo1 | {}) # N: Revealed type is "TypedDict('__main__.Foo', {'key': builtins.int})" + +d1: Dict[str, int] +d2: Dict[int, str] + +reveal_type(foo1 | d1) # N: Revealed type is "builtins.dict[builtins.str, builtins.object]" +foo1 | d2 # E: Unsupported operand types for | ("Foo" and "Dict[int, str]") + + +class Bar(TypedDict): + key: int + value: str + +bar: Bar +reveal_type(bar | {}) # N: Revealed type is "TypedDict('__main__.Bar', {'key': builtins.int, 'value': builtins.str})" +reveal_type(bar | {'key': 1, 'value': 'v'}) # N: Revealed type is "TypedDict('__main__.Bar', {'key': builtins.int, 'value': builtins.str})" +reveal_type(bar | {'key': 1}) # N: Revealed type is "TypedDict('__main__.Bar', {'key': builtins.int, 'value': builtins.str})" +reveal_type(bar | {'value': 'v'}) # N: Revealed type is "TypedDict('__main__.Bar', {'key': builtins.int, 'value': builtins.str})" +reveal_type(bar | {'key': 'a'}) # N: Revealed type is "builtins.dict[builtins.str, builtins.object]" +reveal_type(bar | {'value': 1}) # N: Revealed type is "builtins.dict[builtins.str, builtins.object]" +reveal_type(bar | {'key': 'a', 'value': 1}) # N: Revealed type is "builtins.dict[builtins.str, builtins.object]" + +reveal_type(bar | foo1) # N: Revealed type is "TypedDict('__main__.Bar', {'key': builtins.int, 'value': builtins.str})" +reveal_type(bar | d1) # N: Revealed type is "builtins.dict[builtins.str, builtins.object]" +bar | d2 # E: Unsupported operand types for | ("Bar" and "Dict[int, str]") +[builtins fixtures/dict.pyi] +[typing fixtures/typing-typeddict-iror.pyi] + +[case testTypedDictWith__or__method_error] +from mypy_extensions import TypedDict + +class Foo(TypedDict): + key: int + +foo: Foo = {'key': 1} +foo | 1 + +class SubDict(dict): ... +foo | SubDict() +[out] +main:7: error: No overload variant of "__or__" of "TypedDict" matches argument type "int" +main:7: note: Possible overload variants: +main:7: note: def __or__(self, TypedDict({'key'?: int}), /) -> Foo +main:7: note: def __or__(self, Dict[str, Any], /) -> Dict[str, object] +main:10: error: No overload variant of "__ror__" of "dict" matches argument type "Foo" +main:10: note: Possible overload variants: +main:10: note: def __ror__(self, Dict[Any, Any], /) -> Dict[Any, Any] +main:10: note: def [T, T2] __ror__(self, Dict[T, T2], /) -> Dict[Union[Any, T], Union[Any, T2]] +[builtins fixtures/dict.pyi] +[typing fixtures/typing-typeddict-iror.pyi] + +[case testTypedDictWith__ror__method] +from typing import Dict +from mypy_extensions import TypedDict + +class Foo(TypedDict): + key: int + +foo: Foo = {'key': 1} + +reveal_type({'key': 1} | foo) # N: Revealed type is "TypedDict('__main__.Foo', {'key': builtins.int})" +reveal_type({'key': 'a'} | foo) # N: Revealed type is "builtins.dict[builtins.str, builtins.object]" +reveal_type({} | foo) # N: Revealed type is "TypedDict('__main__.Foo', {'key': builtins.int})" +{1: 'a'} | foo # E: Dict entry 0 has incompatible type "int": "str"; expected "str": "Any" + +d1: Dict[str, int] +d2: Dict[int, str] + +reveal_type(d1 | foo) # N: Revealed type is "builtins.dict[builtins.str, builtins.object]" +d2 | foo # E: Unsupported operand types for | ("Dict[int, str]" and "Foo") +1 | foo # E: Unsupported left operand type for | ("int") + + +class Bar(TypedDict): + key: int + value: str + +bar: Bar +reveal_type({} | bar) # N: Revealed type is "TypedDict('__main__.Bar', {'key': builtins.int, 'value': builtins.str})" +reveal_type({'key': 1, 'value': 'v'} | bar) # N: Revealed type is "TypedDict('__main__.Bar', {'key': builtins.int, 'value': builtins.str})" +reveal_type({'key': 1} | bar) # N: Revealed type is "TypedDict('__main__.Bar', {'key': builtins.int, 'value': builtins.str})" +reveal_type({'value': 'v'} | bar) # N: Revealed type is "TypedDict('__main__.Bar', {'key': builtins.int, 'value': builtins.str})" +reveal_type({'key': 'a'} | bar) # N: Revealed type is "builtins.dict[builtins.str, builtins.object]" +reveal_type({'value': 1} | bar) # N: Revealed type is "builtins.dict[builtins.str, builtins.object]" +reveal_type({'key': 'a', 'value': 1} | bar) # N: Revealed type is "builtins.dict[builtins.str, builtins.object]" + +reveal_type(d1 | bar) # N: Revealed type is "builtins.dict[builtins.str, builtins.object]" +d2 | bar # E: Unsupported operand types for | ("Dict[int, str]" and "Bar") +[builtins fixtures/dict.pyi] +[typing fixtures/typing-typeddict-iror.pyi] + +[case testTypedDictWith__ior__method] +from typing import Dict +from mypy_extensions import TypedDict + +class Foo(TypedDict): + key: int + +foo: Foo = {'key': 1} +foo |= {'key': 2} + +foo |= {} +foo |= {'key': 'a', 'b': 'a'} # E: Expected TypedDict key "key" but found keys ("key", "b") \ + # E: Incompatible types (expression has type "str", TypedDict item "key" has type "int") +foo |= {'b': 2} # E: Unexpected TypedDict key "b" + +d1: Dict[str, int] +d2: Dict[int, str] + +foo |= d1 # E: Argument 1 to "__ior__" of "TypedDict" has incompatible type "Dict[str, int]"; expected "TypedDict({'key'?: int})" +foo |= d2 # E: Argument 1 to "__ior__" of "TypedDict" has incompatible type "Dict[int, str]"; expected "TypedDict({'key'?: int})" + + +class Bar(TypedDict): + key: int + value: str + +bar: Bar +bar |= {} +bar |= {'key': 1, 'value': 'a'} +bar |= {'key': 'a', 'value': 'a', 'b': 'a'} # E: Expected TypedDict keys ("key", "value") but found keys ("key", "value", "b") \ + # E: Incompatible types (expression has type "str", TypedDict item "key" has type "int") + +bar |= foo +bar |= d1 # E: Argument 1 to "__ior__" of "TypedDict" has incompatible type "Dict[str, int]"; expected "TypedDict({'key'?: int, 'value'?: str})" +bar |= d2 # E: Argument 1 to "__ior__" of "TypedDict" has incompatible type "Dict[int, str]"; expected "TypedDict({'key'?: int, 'value'?: str})" +[builtins fixtures/dict.pyi] +[typing fixtures/typing-typeddict-iror.pyi] diff --git a/test-data/unit/fixtures/dict.pyi b/test-data/unit/fixtures/dict.pyi index 19d175ff79ab..7c0c8767f7d7 100644 --- a/test-data/unit/fixtures/dict.pyi +++ b/test-data/unit/fixtures/dict.pyi @@ -3,10 +3,12 @@ from _typeshed import SupportsKeysAndGetItem import _typeshed from typing import ( - TypeVar, Generic, Iterable, Iterator, Mapping, Tuple, overload, Optional, Union, Sequence + TypeVar, Generic, Iterable, Iterator, Mapping, Tuple, overload, Optional, Union, Sequence, + Self, ) T = TypeVar('T') +T2 = TypeVar('T2') KT = TypeVar('KT') VT = TypeVar('VT') @@ -34,6 +36,21 @@ class dict(Mapping[KT, VT]): def get(self, k: KT, default: Union[VT, T]) -> Union[VT, T]: pass def __len__(self) -> int: ... + # This was actually added in 3.9: + @overload + def __or__(self, __value: dict[KT, VT]) -> dict[KT, VT]: ... + @overload + def __or__(self, __value: dict[T, T2]) -> dict[Union[KT, T], Union[VT, T2]]: ... + @overload + def __ror__(self, __value: dict[KT, VT]) -> dict[KT, VT]: ... + @overload + def __ror__(self, __value: dict[T, T2]) -> dict[Union[KT, T], Union[VT, T2]]: ... + # dict.__ior__ should be kept roughly in line with MutableMapping.update() + @overload # type: ignore[misc] + def __ior__(self, __value: _typeshed.SupportsKeysAndGetItem[KT, VT]) -> Self: ... + @overload + def __ior__(self, __value: Iterable[Tuple[KT, VT]]) -> Self: ... + class int: # for convenience def __add__(self, x: Union[int, complex]) -> int: pass def __radd__(self, x: int) -> int: pass diff --git a/test-data/unit/fixtures/typing-async.pyi b/test-data/unit/fixtures/typing-async.pyi index b207dd599c33..9897dfd0b270 100644 --- a/test-data/unit/fixtures/typing-async.pyi +++ b/test-data/unit/fixtures/typing-async.pyi @@ -24,6 +24,7 @@ ClassVar = 0 Final = 0 Literal = 0 NoReturn = 0 +Self = 0 T = TypeVar('T') T_co = TypeVar('T_co', covariant=True) diff --git a/test-data/unit/fixtures/typing-full.pyi b/test-data/unit/fixtures/typing-full.pyi index e9f0aa199bb4..ef903ace78af 100644 --- a/test-data/unit/fixtures/typing-full.pyi +++ b/test-data/unit/fixtures/typing-full.pyi @@ -30,6 +30,7 @@ Literal = 0 TypedDict = 0 NoReturn = 0 NewType = 0 +Self = 0 T = TypeVar('T') T_co = TypeVar('T_co', covariant=True) diff --git a/test-data/unit/fixtures/typing-medium.pyi b/test-data/unit/fixtures/typing-medium.pyi index 03be1d0a664d..c19c5d5d96e2 100644 --- a/test-data/unit/fixtures/typing-medium.pyi +++ b/test-data/unit/fixtures/typing-medium.pyi @@ -28,6 +28,7 @@ NoReturn = 0 NewType = 0 TypeAlias = 0 LiteralString = 0 +Self = 0 T = TypeVar('T') T_co = TypeVar('T_co', covariant=True) diff --git a/test-data/unit/fixtures/typing-typeddict-iror.pyi b/test-data/unit/fixtures/typing-typeddict-iror.pyi new file mode 100644 index 000000000000..e452c8497109 --- /dev/null +++ b/test-data/unit/fixtures/typing-typeddict-iror.pyi @@ -0,0 +1,66 @@ +# Test stub for typing module that includes TypedDict `|` operator. +# It only covers `__or__`, `__ror__`, and `__ior__`. +# +# We cannot define these methods in `typing-typeddict.pyi`, +# because they need `dict` with two type args, +# and not all tests using `[typing typing-typeddict.pyi]` have the proper +# `dict` stub. +# +# Keep in sync with `typeshed`'s definition. +from abc import ABCMeta + +cast = 0 +assert_type = 0 +overload = 0 +Any = 0 +Union = 0 +Optional = 0 +TypeVar = 0 +Generic = 0 +Protocol = 0 +Tuple = 0 +Callable = 0 +NamedTuple = 0 +Final = 0 +Literal = 0 +TypedDict = 0 +NoReturn = 0 +Required = 0 +NotRequired = 0 +Self = 0 + +T = TypeVar('T') +T_co = TypeVar('T_co', covariant=True) +V = TypeVar('V') + +# Note: definitions below are different from typeshed, variances are declared +# to silence the protocol variance checks. Maybe it is better to use type: ignore? + +class Sized(Protocol): + def __len__(self) -> int: pass + +class Iterable(Protocol[T_co]): + def __iter__(self) -> 'Iterator[T_co]': pass + +class Iterator(Iterable[T_co], Protocol): + def __next__(self) -> T_co: pass + +class Sequence(Iterable[T_co]): + # misc is for explicit Any. + def __getitem__(self, n: Any) -> T_co: pass # type: ignore[misc] + +class Mapping(Iterable[T], Generic[T, T_co], metaclass=ABCMeta): + pass + +# Fallback type for all typed dicts (does not exist at runtime). +class _TypedDict(Mapping[str, object]): + @overload + def __or__(self, __value: Self) -> Self: ... + @overload + def __or__(self, __value: dict[str, Any]) -> dict[str, object]: ... + @overload + def __ror__(self, __value: Self) -> Self: ... + @overload + def __ror__(self, __value: dict[str, Any]) -> dict[str, object]: ... + # supposedly incompatible definitions of __or__ and __ior__ + def __ior__(self, __value: Self) -> Self: ... # type: ignore[misc] From 167dc7095758ddc001119e1c9f330bff4af72b22 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Tue, 24 Oct 2023 15:45:04 -0700 Subject: [PATCH 116/144] Fix sdist build by not including CHANGELOG.md (#16323) This is an attempt to fix wheel builds. Perhaps we'd want to actually include the changelog in the sdist. We can decide this later after the build has bee fixed. We've been getting these errors: ``` ... lists of files in version control and sdist do not match! missing from sdist: CHANGELOG.md listing source files under version control: 830 files and directories building an sdist: mypy-1.7.0+dev.ffe89a21058eaa6eb1c1796d9ab87aece965e2d9.tar.gz: 829 files and directories copying source files to a temporary directory building a clean sdist: mypy-1.7.0+dev.tar.gz: 829 files and directories suggested MANIFEST.in rules: include *.md Error: Process completed with exit code 1. `` Example failure: https://github.com/mypyc/mypy_mypyc-wheels/actions/runs/6555980362/job/17805243900 --- MANIFEST.in | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/MANIFEST.in b/MANIFEST.in index 3ae340c7bd5e..c18b83cc0088 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -42,7 +42,7 @@ include pytest.ini include tox.ini include LICENSE mypyc/README.md -exclude .gitmodules CONTRIBUTING.md CREDITS ROADMAP.md action.yml .editorconfig +exclude .gitmodules CONTRIBUTING.md CREDITS ROADMAP.md CHANGELOG.md action.yml .editorconfig exclude .git-blame-ignore-revs .pre-commit-config.yaml global-exclude *.py[cod] From 090a414ba022f600bd65e7611fa3691903fd5a74 Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Wed, 25 Oct 2023 07:03:26 -0700 Subject: [PATCH 117/144] Run macOS mypyc tests with Python 3.9 (#16326) The 3.8 tests have been flaking for several weeks and I don't think anyone has a good repro or idea as to the cause --- .github/workflows/test.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 86704aca2f91..4613605425c3 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -71,8 +71,8 @@ jobs: tox_extra_args: "-n 2" test_mypyc: true - - name: mypyc runtime tests with py38-macos - python: '3.8.17' + - name: mypyc runtime tests with py39-macos + python: '3.9.18' arch: x64 os: macos-latest toxenv: py From f7d047cd6dc008ab767510211d5c466d1c5e9215 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Fri, 27 Oct 2023 13:45:51 +0100 Subject: [PATCH 118/144] [mypyc] Generate error on duplicate function definitions (#16309) Previously we produced duplicate functions in C, which caused C compiler errors. --- mypyc/irbuild/builder.py | 9 +++++++++ mypyc/irbuild/function.py | 2 +- mypyc/test-data/irbuild-statements.test | 24 ++++++++++++++++++++++++ mypyc/test-data/run-misc.test | 4 ---- 4 files changed, 34 insertions(+), 5 deletions(-) diff --git a/mypyc/irbuild/builder.py b/mypyc/irbuild/builder.py index 573ca334a5d1..5ed617aa925f 100644 --- a/mypyc/irbuild/builder.py +++ b/mypyc/irbuild/builder.py @@ -175,6 +175,7 @@ def __init__( self.graph = graph self.ret_types: list[RType] = [] self.functions: list[FuncIR] = [] + self.function_names: set[tuple[str | None, str]] = set() self.classes: list[ClassIR] = [] self.final_names: list[tuple[str, RType]] = [] self.callable_class_names: set[str] = set() @@ -1326,6 +1327,14 @@ def error(self, msg: str, line: int) -> None: def note(self, msg: str, line: int) -> None: self.errors.note(msg, self.module_path, line) + def add_function(self, func_ir: FuncIR, line: int) -> None: + name = (func_ir.class_name, func_ir.name) + if name in self.function_names: + self.error(f'Duplicate definition of "{name[1]}" not supported by mypyc', line) + return + self.function_names.add(name) + self.functions.append(func_ir) + def gen_arg_defaults(builder: IRBuilder) -> None: """Generate blocks for arguments that have default values. diff --git a/mypyc/irbuild/function.py b/mypyc/irbuild/function.py index ebf7fa9a54de..b1785f40550e 100644 --- a/mypyc/irbuild/function.py +++ b/mypyc/irbuild/function.py @@ -103,7 +103,7 @@ def transform_func_def(builder: IRBuilder, fdef: FuncDef) -> None: if func_reg: builder.assign(get_func_target(builder, fdef), func_reg, fdef.line) maybe_insert_into_registry_dict(builder, fdef) - builder.functions.append(func_ir) + builder.add_function(func_ir, fdef.line) def transform_overloaded_func_def(builder: IRBuilder, o: OverloadedFuncDef) -> None: diff --git a/mypyc/test-data/irbuild-statements.test b/mypyc/test-data/irbuild-statements.test index 490b41336e88..b7c67730a05f 100644 --- a/mypyc/test-data/irbuild-statements.test +++ b/mypyc/test-data/irbuild-statements.test @@ -1123,3 +1123,27 @@ L6: r14 = CPy_NoErrOccured() L7: return 1 + +[case testConditionalFunctionDefinition] +if int(): + def foo() -> int: + return 0 +else: + def foo() -> int: # E + return 1 + +def bar() -> int: + return 0 + +if int(): + def bar() -> int: # E + return 1 +[out] +main:5: error: Duplicate definition of "foo" not supported by mypyc +main:12: error: Duplicate definition of "bar" not supported by mypyc + +[case testRepeatedUnderscoreFunctions] +def _(arg): pass +def _(arg): pass +[out] +main:2: error: Duplicate definition of "_" not supported by mypyc diff --git a/mypyc/test-data/run-misc.test b/mypyc/test-data/run-misc.test index c40e0fc55f0e..f77ba3a1302b 100644 --- a/mypyc/test-data/run-misc.test +++ b/mypyc/test-data/run-misc.test @@ -1117,10 +1117,6 @@ for _ in range(2): except AssertionError: pass -[case testRepeatedUnderscoreFunctions] -def _(arg): pass -def _(arg): pass - [case testUnderscoreFunctionsInMethods] class A: From 5ef9c82c19941bd376128491b7959f551bd530e7 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Fri, 27 Oct 2023 16:25:52 +0100 Subject: [PATCH 119/144] [daemon] Fix return type change to optional in generic function (#16342) Previously changing a return type to an optional type was not propagated at least in some cases, since astdiff could simplify away the optional type. --- mypy/server/astdiff.py | 4 +++- test-data/unit/diff.test | 33 +++++++++++++++++++++++++++++++++ 2 files changed, 36 insertions(+), 1 deletion(-) diff --git a/mypy/server/astdiff.py b/mypy/server/astdiff.py index 93f178dca35a..5323bf2c57cb 100644 --- a/mypy/server/astdiff.py +++ b/mypy/server/astdiff.py @@ -74,6 +74,7 @@ class level -- these are handled at attribute level (say, 'mod.Cls.method' Var, ) from mypy.semanal_shared import find_dataclass_transform_spec +from mypy.state import state from mypy.types import ( AnyType, CallableType, @@ -456,7 +457,8 @@ def normalize_callable_variables(self, typ: CallableType) -> CallableType: tv = v.copy_modified(id=tid) tvs.append(tv) tvmap[v.id] = tv - return expand_type(typ, tvmap).copy_modified(variables=tvs) + with state.strict_optional_set(True): + return expand_type(typ, tvmap).copy_modified(variables=tvs) def visit_tuple_type(self, typ: TupleType) -> SnapshotItem: return ("TupleType", snapshot_types(typ.items)) diff --git a/test-data/unit/diff.test b/test-data/unit/diff.test index 66adfaecd909..8fc74868123e 100644 --- a/test-data/unit/diff.test +++ b/test-data/unit/diff.test @@ -1497,3 +1497,36 @@ class C: def meth(self) -> int: return 0 [out] __main__.C.meth + +[case testGenericFunctionWithOptionalReturnType] +from typing import Type, TypeVar + +T = TypeVar("T") + +class C: + @classmethod + def get_by_team_and_id( + cls: Type[T], + raw_member_id: int, + include_removed: bool = False, + ) -> T: + pass + +[file next.py] +from typing import Type, TypeVar, Optional + +T = TypeVar("T") + +class C: + @classmethod + def get_by_team_and_id( + cls: Type[T], + raw_member_id: int, + include_removed: bool = False, + ) -> Optional[T]: + pass + +[builtins fixtures/classmethod.pyi] +[out] +__main__.C.get_by_team_and_id +__main__.Optional From b41c8c1ec4337f158d70d9dfd2032c2ae03a017c Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Fri, 27 Oct 2023 18:35:48 +0100 Subject: [PATCH 120/144] Use upper bound as inference fallback more consistently (#16344) Fixes https://github.com/python/mypy/issues/16331 Fix is straightforward: do not use the fallback, where we would not give the error in the first place. --- mypy/checkexpr.py | 4 +++- mypy/infer.py | 8 ++++++-- mypy/solve.py | 5 ++++- test-data/unit/check-inference.test | 19 +++++++++++++++++++ 4 files changed, 32 insertions(+), 4 deletions(-) diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index 18c1c570ba91..ddcaa6ee30c9 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -1989,7 +1989,9 @@ def infer_function_type_arguments_using_context( # in this case external context is almost everything we have. if not is_generic_instance(ctx) and not is_literal_type_like(ctx): return callable.copy_modified() - args = infer_type_arguments(callable.variables, ret_type, erased_ctx) + args = infer_type_arguments( + callable.variables, ret_type, erased_ctx, skip_unsatisfied=True + ) # Only substitute non-Uninhabited and non-erased types. new_args: list[Type | None] = [] for arg in args: diff --git a/mypy/infer.py b/mypy/infer.py index ba4a1d2bc9b1..bcf0c95808ab 100644 --- a/mypy/infer.py +++ b/mypy/infer.py @@ -63,9 +63,13 @@ def infer_function_type_arguments( def infer_type_arguments( - type_vars: Sequence[TypeVarLikeType], template: Type, actual: Type, is_supertype: bool = False + type_vars: Sequence[TypeVarLikeType], + template: Type, + actual: Type, + is_supertype: bool = False, + skip_unsatisfied: bool = False, ) -> list[Type | None]: # Like infer_function_type_arguments, but only match a single type # against a generic type. constraints = infer_constraints(template, actual, SUPERTYPE_OF if is_supertype else SUBTYPE_OF) - return solve_constraints(type_vars, constraints)[0] + return solve_constraints(type_vars, constraints, skip_unsatisfied=skip_unsatisfied)[0] diff --git a/mypy/solve.py b/mypy/solve.py index 4d0ca6b7af24..efe8e487c506 100644 --- a/mypy/solve.py +++ b/mypy/solve.py @@ -43,6 +43,7 @@ def solve_constraints( constraints: list[Constraint], strict: bool = True, allow_polymorphic: bool = False, + skip_unsatisfied: bool = False, ) -> tuple[list[Type | None], list[TypeVarLikeType]]: """Solve type constraints. @@ -54,6 +55,8 @@ def solve_constraints( If allow_polymorphic=True, then use the full algorithm that can potentially return free type variables in solutions (these require special care when applying). Otherwise, use a simplified algorithm that just solves each type variable individually if possible. + + The skip_unsatisfied flag matches the same one in applytype.apply_generic_arguments(). """ vars = [tv.id for tv in original_vars] if not vars: @@ -110,7 +113,7 @@ def solve_constraints( candidate = AnyType(TypeOfAny.special_form) res.append(candidate) - if not free_vars: + if not free_vars and not skip_unsatisfied: # Most of the validation for solutions is done in applytype.py, but here we can # quickly test solutions w.r.t. to upper bounds, and use the latter (if possible), # if solutions are actually not valid (due to poor inference context). diff --git a/test-data/unit/check-inference.test b/test-data/unit/check-inference.test index 0a95ffdd50cf..0d162238450a 100644 --- a/test-data/unit/check-inference.test +++ b/test-data/unit/check-inference.test @@ -3748,3 +3748,22 @@ empty: Dict[NoReturn, NoReturn] def bar() -> Union[Dict[str, Any], Dict[int, Any]]: return empty [builtins fixtures/dict.pyi] + +[case testUpperBoundInferenceFallbackNotOverused] +from typing import TypeVar, Protocol, List + +S = TypeVar("S", covariant=True) +class Foo(Protocol[S]): + def foo(self) -> S: ... +def foo(x: Foo[S]) -> S: ... + +T = TypeVar("T", bound="Base") +class Base: + def foo(self: T) -> T: ... +class C(Base): + pass + +def f(values: List[T]) -> T: ... +x = foo(f([C()])) +reveal_type(x) # N: Revealed type is "__main__.C" +[builtins fixtures/list.pyi] From 5d4046477eb017fcb2cdbf64403a4e67308ef2ed Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Fri, 27 Oct 2023 18:36:08 +0100 Subject: [PATCH 121/144] Support PEP-646 and PEP-692 in the same callable (#16294) Fixes https://github.com/python/mypy/issues/16285 I was not sure if it is important to support this, but taking into account the current behavior is a crash, and that implementation is quite simple, I think we should do this. Using this opportunity I also improve related error messages a bit. --- mypy/semanal.py | 2 +- mypy/typeanal.py | 59 ++++++++------ mypy/types.py | 7 +- test-data/unit/check-typevar-tuple.test | 104 +++++++++++++++++++++++- test-data/unit/semanal-types.test | 2 +- 5 files changed, 142 insertions(+), 32 deletions(-) diff --git a/mypy/semanal.py b/mypy/semanal.py index 179ee7c70bfb..342d48256ff5 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -950,7 +950,7 @@ def remove_unpack_kwargs(self, defn: FuncDef, typ: CallableType) -> CallableType return typ last_type = get_proper_type(last_type.type) if not isinstance(last_type, TypedDictType): - self.fail("Unpack item in ** argument must be a TypedDict", defn) + self.fail("Unpack item in ** argument must be a TypedDict", last_type) new_arg_types = typ.arg_types[:-1] + [AnyType(TypeOfAny.from_error)] return typ.copy_modified(arg_types=new_arg_types) overlap = set(typ.arg_names) & set(last_type.items) diff --git a/mypy/typeanal.py b/mypy/typeanal.py index b16d0ac066b4..ceb276d3bdd4 100644 --- a/mypy/typeanal.py +++ b/mypy/typeanal.py @@ -987,33 +987,40 @@ def visit_callable_type(self, t: CallableType, nested: bool = True) -> Type: self.anal_star_arg_type(t.arg_types[-2], ARG_STAR, nested=nested), self.anal_star_arg_type(t.arg_types[-1], ARG_STAR2, nested=nested), ] + # If nested is True, it means we are analyzing a Callable[...] type, rather + # than a function definition type. We need to "unpack" ** TypedDict annotation + # here (for function definitions it is done in semanal). + if nested and isinstance(arg_types[-1], UnpackType): + # TODO: it would be better to avoid this get_proper_type() call. + unpacked = get_proper_type(arg_types[-1].type) + if isinstance(unpacked, TypedDictType): + arg_types[-1] = unpacked + unpacked_kwargs = True + arg_types = self.check_unpacks_in_list(arg_types) else: - arg_types = self.anal_array(t.arg_types, nested=nested, allow_unpack=True) star_index = None if ARG_STAR in arg_kinds: star_index = arg_kinds.index(ARG_STAR) star2_index = None if ARG_STAR2 in arg_kinds: star2_index = arg_kinds.index(ARG_STAR2) - validated_args: list[Type] = [] - for i, at in enumerate(arg_types): - if isinstance(at, UnpackType) and i not in (star_index, star2_index): - self.fail( - message_registry.INVALID_UNPACK_POSITION, at, code=codes.VALID_TYPE - ) - validated_args.append(AnyType(TypeOfAny.from_error)) - else: - if nested and isinstance(at, UnpackType) and i == star_index: - # TODO: it would be better to avoid this get_proper_type() call. - p_at = get_proper_type(at.type) - if isinstance(p_at, TypedDictType) and not at.from_star_syntax: - # Automatically detect Unpack[Foo] in Callable as backwards - # compatible syntax for **Foo, if Foo is a TypedDict. - at = p_at - arg_kinds[i] = ARG_STAR2 - unpacked_kwargs = True - validated_args.append(at) - arg_types = validated_args + arg_types = [] + for i, ut in enumerate(t.arg_types): + at = self.anal_type( + ut, nested=nested, allow_unpack=i in (star_index, star2_index) + ) + if nested and isinstance(at, UnpackType) and i == star_index: + # TODO: it would be better to avoid this get_proper_type() call. + p_at = get_proper_type(at.type) + if isinstance(p_at, TypedDictType) and not at.from_star_syntax: + # Automatically detect Unpack[Foo] in Callable as backwards + # compatible syntax for **Foo, if Foo is a TypedDict. + at = p_at + arg_kinds[i] = ARG_STAR2 + unpacked_kwargs = True + arg_types.append(at) + if nested: + arg_types = self.check_unpacks_in_list(arg_types) # If there were multiple (invalid) unpacks, the arg types list will become shorter, # we need to trim the kinds/names as well to avoid crashes. arg_kinds = t.arg_kinds[: len(arg_types)] @@ -1387,8 +1394,9 @@ def analyze_callable_args( names: list[str | None] = [] seen_unpack = False unpack_types: list[Type] = [] - invalid_unpacks = [] - for arg in arglist.items: + invalid_unpacks: list[Type] = [] + second_unpack_last = False + for i, arg in enumerate(arglist.items): if isinstance(arg, CallableArgument): args.append(arg.typ) names.append(arg.name) @@ -1415,6 +1423,11 @@ def analyze_callable_args( ): if seen_unpack: # Multiple unpacks, preserve them, so we can give an error later. + if i == len(arglist.items) - 1 and not invalid_unpacks: + # Special case: if there are just two unpacks, and the second one appears + # as last type argument, it can be still valid, if the second unpacked type + # is a TypedDict. This should be checked by the caller. + second_unpack_last = True invalid_unpacks.append(arg) continue seen_unpack = True @@ -1442,7 +1455,7 @@ def analyze_callable_args( names.append(None) for arg in invalid_unpacks: args.append(arg) - kinds.append(ARG_STAR) + kinds.append(ARG_STAR2 if second_unpack_last else ARG_STAR) names.append(None) # Note that arglist below is only used for error context. check_arg_names(names, [arglist] * len(args), self.fail, "Callable") diff --git a/mypy/types.py b/mypy/types.py index ae1a1f595fa2..43003a9a22b6 100644 --- a/mypy/types.py +++ b/mypy/types.py @@ -3268,15 +3268,16 @@ def visit_callable_type(self, t: CallableType) -> str: num_skip = 0 s = "" - bare_asterisk = False + asterisk = False for i in range(len(t.arg_types) - num_skip): if s != "": s += ", " - if t.arg_kinds[i].is_named() and not bare_asterisk: + if t.arg_kinds[i].is_named() and not asterisk: s += "*, " - bare_asterisk = True + asterisk = True if t.arg_kinds[i] == ARG_STAR: s += "*" + asterisk = True if t.arg_kinds[i] == ARG_STAR2: s += "**" name = t.arg_names[i] diff --git a/test-data/unit/check-typevar-tuple.test b/test-data/unit/check-typevar-tuple.test index 4a281fbf0b49..1a2573898170 100644 --- a/test-data/unit/check-typevar-tuple.test +++ b/test-data/unit/check-typevar-tuple.test @@ -571,8 +571,7 @@ from typing_extensions import Unpack, TypeVarTuple Ts = TypeVarTuple("Ts") Us = TypeVarTuple("Us") -a: Callable[[Unpack[Ts], Unpack[Us]], int] # E: Var args may not appear after named or var args \ - # E: More than one Unpack in a type is not allowed +a: Callable[[Unpack[Ts], Unpack[Us]], int] # E: More than one Unpack in a type is not allowed reveal_type(a) # N: Revealed type is "def [Ts, Us] (*Unpack[Ts`-1]) -> builtins.int" b: Callable[[Unpack], int] # E: Unpack[...] requires exactly one type argument reveal_type(b) # N: Revealed type is "def (*Any) -> builtins.int" @@ -730,8 +729,7 @@ A = Tuple[Unpack[Ts], Unpack[Us]] # E: More than one Unpack in a type is not al x: A[int, str] reveal_type(x) # N: Revealed type is "Tuple[builtins.int, builtins.str]" -B = Callable[[Unpack[Ts], Unpack[Us]], int] # E: Var args may not appear after named or var args \ - # E: More than one Unpack in a type is not allowed +B = Callable[[Unpack[Ts], Unpack[Us]], int] # E: More than one Unpack in a type is not allowed y: B[int, str] reveal_type(y) # N: Revealed type is "def (builtins.int, builtins.str) -> builtins.int" @@ -1912,3 +1910,101 @@ reveal_type(y) # N: Revealed type is "__main__.C[builtins.int, Unpack[builtins. z = C[int]() # E: Bad number of arguments, expected: at least 2, given: 1 reveal_type(z) # N: Revealed type is "__main__.C[Any, Unpack[builtins.tuple[Any, ...]], Any]" [builtins fixtures/tuple.pyi] + +[case testTypeVarTupleBothUnpacksSimple] +from typing import Tuple +from typing_extensions import Unpack, TypeVarTuple, TypedDict + +class Keywords(TypedDict): + a: str + b: str + +Ints = Tuple[int, ...] + +def f(*args: Unpack[Ints], other: str = "no", **kwargs: Unpack[Keywords]) -> None: ... +reveal_type(f) # N: Revealed type is "def (*args: builtins.int, other: builtins.str =, **kwargs: Unpack[TypedDict('__main__.Keywords', {'a': builtins.str, 'b': builtins.str})])" +f(1, 2, a="a", b="b") # OK +f(1, 2, 3) # E: Missing named argument "a" for "f" \ + # E: Missing named argument "b" for "f" + +Ts = TypeVarTuple("Ts") +def g(*args: Unpack[Ts], other: str = "no", **kwargs: Unpack[Keywords]) -> None: ... +reveal_type(g) # N: Revealed type is "def [Ts] (*args: Unpack[Ts`-1], other: builtins.str =, **kwargs: Unpack[TypedDict('__main__.Keywords', {'a': builtins.str, 'b': builtins.str})])" +g(1, 2, a="a", b="b") # OK +g(1, 2, 3) # E: Missing named argument "a" for "g" \ + # E: Missing named argument "b" for "g" + +def bad( + *args: Unpack[Keywords], # E: "Keywords" cannot be unpacked (must be tuple or TypeVarTuple) + **kwargs: Unpack[Ints], # E: Unpack item in ** argument must be a TypedDict +) -> None: ... +reveal_type(bad) # N: Revealed type is "def (*args: Any, **kwargs: Any)" + +def bad2( + one: int, + *args: Unpack[Keywords], # E: "Keywords" cannot be unpacked (must be tuple or TypeVarTuple) + other: str = "no", + **kwargs: Unpack[Ints], # E: Unpack item in ** argument must be a TypedDict +) -> None: ... +reveal_type(bad2) # N: Revealed type is "def (one: builtins.int, *args: Any, other: builtins.str =, **kwargs: Any)" +[builtins fixtures/tuple.pyi] + +[case testTypeVarTupleBothUnpacksCallable] +from typing import Callable, Tuple +from typing_extensions import Unpack, TypedDict + +class Keywords(TypedDict): + a: str + b: str +Ints = Tuple[int, ...] + +cb: Callable[[Unpack[Ints], Unpack[Keywords]], None] +reveal_type(cb) # N: Revealed type is "def (*builtins.int, **Unpack[TypedDict('__main__.Keywords', {'a': builtins.str, 'b': builtins.str})])" + +cb2: Callable[[int, Unpack[Ints], int, Unpack[Keywords]], None] +reveal_type(cb2) # N: Revealed type is "def (builtins.int, *Unpack[Tuple[Unpack[builtins.tuple[builtins.int, ...]], builtins.int]], **Unpack[TypedDict('__main__.Keywords', {'a': builtins.str, 'b': builtins.str})])" +cb2(1, 2, 3, a="a", b="b") +cb2(1, a="a", b="b") # E: Too few arguments +cb2(1, 2, 3, a="a") # E: Missing named argument "b" + +bad1: Callable[[Unpack[Ints], Unpack[Ints]], None] # E: More than one Unpack in a type is not allowed +reveal_type(bad1) # N: Revealed type is "def (*builtins.int)" +bad2: Callable[[Unpack[Keywords], Unpack[Keywords]], None] # E: "Keywords" cannot be unpacked (must be tuple or TypeVarTuple) +reveal_type(bad2) # N: Revealed type is "def (*Any, **Unpack[TypedDict('__main__.Keywords', {'a': builtins.str, 'b': builtins.str})])" +bad3: Callable[[Unpack[Keywords], Unpack[Ints]], None] # E: "Keywords" cannot be unpacked (must be tuple or TypeVarTuple) \ + # E: More than one Unpack in a type is not allowed +reveal_type(bad3) # N: Revealed type is "def (*Any)" +[builtins fixtures/tuple.pyi] + +[case testTypeVarTupleBothUnpacksApplication] +from typing import Callable, TypeVar, Optional +from typing_extensions import Unpack, TypeVarTuple, TypedDict + +class Keywords(TypedDict): + a: str + b: str + +T = TypeVar("T") +Ts = TypeVarTuple("Ts") +def test( + x: int, + func: Callable[[Unpack[Ts]], T], + *args: Unpack[Ts], + other: Optional[str] = None, + **kwargs: Unpack[Keywords], +) -> T: + if bool(): + func(*args, **kwargs) # E: Extra argument "a" from **args + return func(*args) +def test2( + x: int, + func: Callable[[Unpack[Ts], Unpack[Keywords]], T], + *args: Unpack[Ts], + other: Optional[str] = None, + **kwargs: Unpack[Keywords], +) -> T: + if bool(): + func(*args) # E: Missing named argument "a" \ + # E: Missing named argument "b" + return func(*args, **kwargs) +[builtins fixtures/tuple.pyi] diff --git a/test-data/unit/semanal-types.test b/test-data/unit/semanal-types.test index 5e05d099b958..83c44738f055 100644 --- a/test-data/unit/semanal-types.test +++ b/test-data/unit/semanal-types.test @@ -1043,7 +1043,7 @@ MypyFile:1( default( Var(y) StrExpr())) - def (*x: builtins.int, *, y: builtins.str =) -> Any + def (*x: builtins.int, y: builtins.str =) -> Any VarArg( Var(x)) Block:1( From 4f05dd506ee4cc8a9f38210be96e974fb8f54a6e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=B8rgen=20Lind?= Date: Fri, 27 Oct 2023 22:24:50 +0200 Subject: [PATCH 122/144] Write stubs with utf-8 encoding (#16329) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This is to ensure that you don't get encoding errors if docstrings contains odd characters like emojis. --------- Co-authored-by: Jørgen Lind Co-authored-by: hauntsaninja --- mypy/stubgen.py | 2 +- test-data/unit/stubgen.test | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/mypy/stubgen.py b/mypy/stubgen.py index a2f07a35eaa2..837cd723c410 100755 --- a/mypy/stubgen.py +++ b/mypy/stubgen.py @@ -1578,7 +1578,7 @@ def generate_stub_for_py_module( subdir = os.path.dirname(target) if subdir and not os.path.isdir(subdir): os.makedirs(subdir) - with open(target, "w") as file: + with open(target, "w", encoding="utf-8") as file: file.write(output) diff --git a/test-data/unit/stubgen.test b/test-data/unit/stubgen.test index 64a1353b29b3..895500c1ba57 100644 --- a/test-data/unit/stubgen.test +++ b/test-data/unit/stubgen.test @@ -3485,7 +3485,7 @@ def f2(): ... class A: """class docstring - a multiline docstring""" + a multiline 😊 docstring""" def func(): """func docstring don't forget to indent""" @@ -3512,7 +3512,7 @@ class B: class A: """class docstring - a multiline docstring""" + a multiline 😊 docstring""" def func() -> None: """func docstring don't forget to indent""" From 5c6ca5cdee906ec7c57be478679cd689fdd15861 Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Fri, 27 Oct 2023 23:58:48 +0100 Subject: [PATCH 123/144] Properly use proper subtyping for callables (#16343) Fixes https://github.com/python/mypy/issues/16338 This is kind of a major change, but it is technically correct: we should not treat `(*args: Any, **kwargs: Any)` special in `is_proper_subtype()` (only in `is_subtype()`). Unfortunately, this requires an additional flag for `is_callable_compatible()`, since currently we are passing the subtype kind information via a callback, which is not applicable to handling argument kinds. --- mypy/checker.py | 11 ++++++++--- mypy/constraints.py | 12 ++++++++++-- mypy/meet.py | 1 + mypy/subtypes.py | 14 +++++++++++--- test-data/unit/check-overloading.test | 22 +++++++++++++++++++++- 5 files changed, 51 insertions(+), 9 deletions(-) diff --git a/mypy/checker.py b/mypy/checker.py index 64bbbfa0a55b..e68dc4178962 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -800,7 +800,7 @@ def check_overlapping_overloads(self, defn: OverloadedFuncDef) -> None: # Is the overload alternative's arguments subtypes of the implementation's? if not is_callable_compatible( - impl, sig1, is_compat=is_subtype, ignore_return=True + impl, sig1, is_compat=is_subtype, is_proper_subtype=False, ignore_return=True ): self.msg.overloaded_signatures_arg_specific(i + 1, defn.impl) @@ -7685,6 +7685,7 @@ def is_unsafe_overlapping_overload_signatures( signature, other, is_compat=is_overlapping_types_no_promote_no_uninhabited_no_none, + is_proper_subtype=False, is_compat_return=lambda l, r: not is_subtype_no_promote(l, r), ignore_return=False, check_args_covariantly=True, @@ -7694,6 +7695,7 @@ def is_unsafe_overlapping_overload_signatures( other, signature, is_compat=is_overlapping_types_no_promote_no_uninhabited_no_none, + is_proper_subtype=False, is_compat_return=lambda l, r: not is_subtype_no_promote(r, l), ignore_return=False, check_args_covariantly=False, @@ -7744,7 +7746,7 @@ def overload_can_never_match(signature: CallableType, other: CallableType) -> bo signature, {tvar.id: erase_def_to_union_or_bound(tvar) for tvar in signature.variables} ) return is_callable_compatible( - exp_signature, other, is_compat=is_more_precise, ignore_return=True + exp_signature, other, is_compat=is_more_precise, is_proper_subtype=True, ignore_return=True ) @@ -7754,7 +7756,9 @@ def is_more_general_arg_prefix(t: FunctionLike, s: FunctionLike) -> bool: # general than one with fewer items (or just one item)? if isinstance(t, CallableType): if isinstance(s, CallableType): - return is_callable_compatible(t, s, is_compat=is_proper_subtype, ignore_return=True) + return is_callable_compatible( + t, s, is_compat=is_proper_subtype, is_proper_subtype=True, ignore_return=True + ) elif isinstance(t, FunctionLike): if isinstance(s, FunctionLike): if len(t.items) == len(s.items): @@ -7769,6 +7773,7 @@ def is_same_arg_prefix(t: CallableType, s: CallableType) -> bool: t, s, is_compat=is_same_type, + is_proper_subtype=True, ignore_return=True, check_args_covariantly=True, ignore_pos_arg_names=True, diff --git a/mypy/constraints.py b/mypy/constraints.py index 7d782551b261..6f611736a72a 100644 --- a/mypy/constraints.py +++ b/mypy/constraints.py @@ -1352,7 +1352,11 @@ def find_matching_overload_item(overloaded: Overloaded, template: CallableType) # Return type may be indeterminate in the template, so ignore it when performing a # subtype check. if mypy.subtypes.is_callable_compatible( - item, template, is_compat=mypy.subtypes.is_subtype, ignore_return=True + item, + template, + is_compat=mypy.subtypes.is_subtype, + is_proper_subtype=False, + ignore_return=True, ): return item # Fall back to the first item if we can't find a match. This is totally arbitrary -- @@ -1370,7 +1374,11 @@ def find_matching_overload_items( # Return type may be indeterminate in the template, so ignore it when performing a # subtype check. if mypy.subtypes.is_callable_compatible( - item, template, is_compat=mypy.subtypes.is_subtype, ignore_return=True + item, + template, + is_compat=mypy.subtypes.is_subtype, + is_proper_subtype=False, + ignore_return=True, ): res.append(item) if not res: diff --git a/mypy/meet.py b/mypy/meet.py index 1a566aed17de..fa9bd6a83743 100644 --- a/mypy/meet.py +++ b/mypy/meet.py @@ -462,6 +462,7 @@ def _type_object_overlap(left: Type, right: Type) -> bool: left, right, is_compat=_is_overlapping_types, + is_proper_subtype=False, ignore_pos_arg_names=True, allow_partial_overlap=True, ) diff --git a/mypy/subtypes.py b/mypy/subtypes.py index 2ca3357dd722..383e6eddd317 100644 --- a/mypy/subtypes.py +++ b/mypy/subtypes.py @@ -658,6 +658,8 @@ def visit_parameters(self, left: Parameters) -> bool: left, self.right, is_compat=self._is_subtype, + # TODO: this should pass the current value, but then couple tests fail. + is_proper_subtype=False, ignore_pos_arg_names=self.subtype_context.ignore_pos_arg_names, ) else: @@ -677,6 +679,7 @@ def visit_callable_type(self, left: CallableType) -> bool: left, right, is_compat=self._is_subtype, + is_proper_subtype=self.proper_subtype, ignore_pos_arg_names=self.subtype_context.ignore_pos_arg_names, strict_concatenate=(self.options.extra_checks or self.options.strict_concatenate) if self.options @@ -932,6 +935,7 @@ def visit_overloaded(self, left: Overloaded) -> bool: left_item, right_item, is_compat=self._is_subtype, + is_proper_subtype=self.proper_subtype, ignore_return=True, ignore_pos_arg_names=self.subtype_context.ignore_pos_arg_names, strict_concatenate=strict_concat, @@ -940,6 +944,7 @@ def visit_overloaded(self, left: Overloaded) -> bool: right_item, left_item, is_compat=self._is_subtype, + is_proper_subtype=self.proper_subtype, ignore_return=True, ignore_pos_arg_names=self.subtype_context.ignore_pos_arg_names, strict_concatenate=strict_concat, @@ -1358,6 +1363,7 @@ def is_callable_compatible( right: CallableType, *, is_compat: Callable[[Type, Type], bool], + is_proper_subtype: bool, is_compat_return: Callable[[Type, Type], bool] | None = None, ignore_return: bool = False, ignore_pos_arg_names: bool = False, @@ -1517,6 +1523,7 @@ def g(x: int) -> int: ... left, right, is_compat=is_compat, + is_proper_subtype=is_proper_subtype, ignore_pos_arg_names=ignore_pos_arg_names, allow_partial_overlap=allow_partial_overlap, strict_concatenate_check=strict_concatenate_check, @@ -1552,12 +1559,13 @@ def are_parameters_compatible( right: Parameters | NormalizedCallableType, *, is_compat: Callable[[Type, Type], bool], + is_proper_subtype: bool, ignore_pos_arg_names: bool = False, allow_partial_overlap: bool = False, strict_concatenate_check: bool = False, ) -> bool: """Helper function for is_callable_compatible, used for Parameter compatibility""" - if right.is_ellipsis_args: + if right.is_ellipsis_args and not is_proper_subtype: return True left_star = left.var_arg() @@ -1566,9 +1574,9 @@ def are_parameters_compatible( right_star2 = right.kw_arg() # Treat "def _(*a: Any, **kw: Any) -> X" similarly to "Callable[..., X]" - if are_trivial_parameters(right): + if are_trivial_parameters(right) and not is_proper_subtype: return True - trivial_suffix = is_trivial_suffix(right) + trivial_suffix = is_trivial_suffix(right) and not is_proper_subtype # Match up corresponding arguments and check them for compatibility. In # every pair (argL, argR) of corresponding arguments from L and R, argL must diff --git a/test-data/unit/check-overloading.test b/test-data/unit/check-overloading.test index b97eeb48115c..7bca5cc7b508 100644 --- a/test-data/unit/check-overloading.test +++ b/test-data/unit/check-overloading.test @@ -6501,7 +6501,7 @@ eggs = lambda: 'eggs' reveal_type(func(eggs)) # N: Revealed type is "def (builtins.str) -> builtins.str" spam: Callable[..., str] = lambda x, y: 'baz' -reveal_type(func(spam)) # N: Revealed type is "def (*Any, **Any) -> builtins.str" +reveal_type(func(spam)) # N: Revealed type is "def (*Any, **Any) -> Any" [builtins fixtures/paramspec.pyi] [case testGenericOverloadOverlapWithType] @@ -6673,3 +6673,23 @@ c2 = MyCallable("test") reveal_type(c2) # N: Revealed type is "__main__.MyCallable[builtins.str]" reveal_type(c2()) # should be int # N: Revealed type is "builtins.int" [builtins fixtures/tuple.pyi] + +[case testOverloadWithStarAnyFallback] +from typing import overload, Any + +class A: + @overload + def f(self, e: str) -> str: ... + @overload + def f(self, *args: Any, **kwargs: Any) -> Any: ... + def f(self, *args, **kwargs): + pass + +class B: + @overload + def f(self, e: str, **kwargs: Any) -> str: ... + @overload + def f(self, *args: Any, **kwargs: Any) -> Any: ... + def f(self, *args, **kwargs): + pass +[builtins fixtures/tuple.pyi] From 42f7cf1a7228844f82f4de22ac94f0e1b5e3ed9b Mon Sep 17 00:00:00 2001 From: Cibin Mathew <10793628+cibinmathew@users.noreply.github.com> Date: Sat, 28 Oct 2023 01:03:55 +0200 Subject: [PATCH 124/144] Update starred expr error message to match Python's (#16304) Fixes https://github.com/python/mypy/issues/16287 Update mypy's error on starred expression to match that of Python 3.11 --- mypy/semanal.py | 2 +- test-data/unit/check-statements.test | 2 +- test-data/unit/check-tuples.test | 2 +- test-data/unit/semanal-errors.test | 4 ++-- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/mypy/semanal.py b/mypy/semanal.py index 342d48256ff5..a114a5a1dcd4 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -4997,7 +4997,7 @@ def visit_dict_expr(self, expr: DictExpr) -> None: def visit_star_expr(self, expr: StarExpr) -> None: if not expr.valid: - self.fail("Can use starred expression only as assignment target", expr, blocker=True) + self.fail("can't use starred expression here", expr, blocker=True) else: expr.expr.accept(self) diff --git a/test-data/unit/check-statements.test b/test-data/unit/check-statements.test index 023e2935a158..f5b47e7ab97f 100644 --- a/test-data/unit/check-statements.test +++ b/test-data/unit/check-statements.test @@ -2232,7 +2232,7 @@ def foo(x: int) -> Union[Generator[A, None, None], Generator[B, None, None]]: yield x # E: Incompatible types in "yield" (actual type "int", expected type "Union[A, B]") [case testNoCrashOnStarRightHandSide] -x = *(1, 2, 3) # E: Can use starred expression only as assignment target +x = *(1, 2, 3) # E: can't use starred expression here [builtins fixtures/tuple.pyi] diff --git a/test-data/unit/check-tuples.test b/test-data/unit/check-tuples.test index 76225360a7c1..7070ead43746 100644 --- a/test-data/unit/check-tuples.test +++ b/test-data/unit/check-tuples.test @@ -1118,7 +1118,7 @@ a = (0, *b, '') [builtins fixtures/tuple.pyi] [case testUnpackSyntaxError] -*foo # E: Can use starred expression only as assignment target +*foo # E: can't use starred expression here [builtins fixtures/tuple.pyi] [case testUnpackBases] diff --git a/test-data/unit/semanal-errors.test b/test-data/unit/semanal-errors.test index f21ba5253437..82307f30877e 100644 --- a/test-data/unit/semanal-errors.test +++ b/test-data/unit/semanal-errors.test @@ -480,13 +480,13 @@ c = 1 d = 1 a = *b [out] -main:4: error: Can use starred expression only as assignment target +main:4: error: can't use starred expression here [case testStarExpressionInExp] a = 1 *a + 1 [out] -main:2: error: Can use starred expression only as assignment target +main:2: error: can't use starred expression here [case testInvalidDel1] x = 1 From 9011ca8b4dedc0e7177737b5265f69694afa91b5 Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Sat, 28 Oct 2023 00:04:58 +0100 Subject: [PATCH 125/144] Delete recursive aliases flags (#16346) FWIW I decided to keep the old tests (where possible), just to be sure we will not re-introduce various crashes at function scope, where recursive aliases are not allowed. --- mypy/main.py | 14 --- mypy/options.py | 4 - mypy/semanal.py | 4 +- mypy/semanal_namedtuple.py | 6 +- mypy/semanal_newtype.py | 3 +- mypy/semanal_typeddict.py | 11 +- mypy/typeanal.py | 2 +- test-data/unit/check-classes.test | 31 ++--- test-data/unit/check-incremental.test | 15 +-- test-data/unit/check-namedtuple.test | 168 +++++++++++++------------ test-data/unit/check-newsemanal.test | 96 +++++++------- test-data/unit/check-type-aliases.test | 44 ++++--- test-data/unit/check-typeddict.test | 34 ++--- test-data/unit/check-unions.test | 6 +- test-data/unit/cmdline.test | 8 -- 15 files changed, 216 insertions(+), 230 deletions(-) diff --git a/mypy/main.py b/mypy/main.py index dff1a0362ba2..718eb5a7c0c1 100644 --- a/mypy/main.py +++ b/mypy/main.py @@ -999,15 +999,6 @@ def add_invertible_flag( action="store_true", help="Enable new experimental type inference algorithm", ) - internals_group.add_argument( - "--disable-recursive-aliases", - action="store_true", - help="Disable experimental support for recursive type aliases", - ) - # Deprecated reverse variant of the above. - internals_group.add_argument( - "--enable-recursive-aliases", action="store_true", help=argparse.SUPPRESS - ) parser.add_argument( "--enable-incomplete-feature", action="append", @@ -1392,11 +1383,6 @@ def set_strict_flags() -> None: if options.logical_deps: options.cache_fine_grained = True - if options.enable_recursive_aliases: - print( - "Warning: --enable-recursive-aliases is deprecated;" - " recursive types are enabled by default" - ) if options.strict_concatenate and not strict_option_set: print("Warning: --strict-concatenate is deprecated; use --extra-checks instead") diff --git a/mypy/options.py b/mypy/options.py index cb0464d4dc06..3447b5dfb1f6 100644 --- a/mypy/options.py +++ b/mypy/options.py @@ -362,10 +362,6 @@ def __init__(self) -> None: self.many_errors_threshold = defaults.MANY_ERRORS_THRESHOLD # Enable new experimental type inference algorithm. self.new_type_inference = False - # Disable recursive type aliases (currently experimental) - self.disable_recursive_aliases = False - # Deprecated reverse version of the above, do not use. - self.enable_recursive_aliases = False # Export line-level, limited, fine-grained dependency information in cache data # (undocumented feature). self.export_ref_info = False diff --git a/mypy/semanal.py b/mypy/semanal.py index a114a5a1dcd4..27491ac695ae 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -3608,7 +3608,7 @@ def check_and_set_up_type_alias(self, s: AssignmentStmt) -> bool: ) if not res: return False - if not self.options.disable_recursive_aliases and not self.is_func_scope(): + if not self.is_func_scope(): # Only marking incomplete for top-level placeholders makes recursive aliases like # `A = Sequence[str | A]` valid here, similar to how we treat base classes in class # definitions, allowing `class str(Sequence[str]): ...` @@ -6296,7 +6296,7 @@ def process_placeholder( def cannot_resolve_name(self, name: str | None, kind: str, ctx: Context) -> None: name_format = f' "{name}"' if name else "" self.fail(f"Cannot resolve {kind}{name_format} (possible cyclic definition)", ctx) - if not self.options.disable_recursive_aliases and self.is_func_scope(): + if self.is_func_scope(): self.note("Recursive types are not allowed at function scope", ctx) def qualified_name(self, name: str) -> str: diff --git a/mypy/semanal_namedtuple.py b/mypy/semanal_namedtuple.py index 51ea90e07f3d..80cf1c4e184a 100644 --- a/mypy/semanal_namedtuple.py +++ b/mypy/semanal_namedtuple.py @@ -182,8 +182,7 @@ def check_namedtuple_classdef( # it would be inconsistent with type aliases. analyzed = self.api.anal_type( stmt.type, - allow_placeholder=not self.options.disable_recursive_aliases - and not self.api.is_func_scope(), + allow_placeholder=not self.api.is_func_scope(), prohibit_self_type="NamedTuple item type", ) if analyzed is None: @@ -450,8 +449,7 @@ def parse_namedtuple_fields_with_types( # We never allow recursive types at function scope. analyzed = self.api.anal_type( type, - allow_placeholder=not self.options.disable_recursive_aliases - and not self.api.is_func_scope(), + allow_placeholder=not self.api.is_func_scope(), prohibit_self_type="NamedTuple item type", ) # Workaround #4987 and avoid introducing a bogus UnboundType diff --git a/mypy/semanal_newtype.py b/mypy/semanal_newtype.py index 16c6c024800d..c9c0c46f7aee 100644 --- a/mypy/semanal_newtype.py +++ b/mypy/semanal_newtype.py @@ -207,8 +207,7 @@ def check_newtype_args( self.api.anal_type( unanalyzed_type, report_invalid_types=False, - allow_placeholder=not self.options.disable_recursive_aliases - and not self.api.is_func_scope(), + allow_placeholder=not self.api.is_func_scope(), ) ) should_defer = False diff --git a/mypy/semanal_typeddict.py b/mypy/semanal_typeddict.py index a9a4cd868f27..51424d8800d2 100644 --- a/mypy/semanal_typeddict.py +++ b/mypy/semanal_typeddict.py @@ -228,10 +228,7 @@ def analyze_base_args(self, base: IndexExpr, ctx: Context) -> list[Type] | None: self.fail("Invalid TypedDict type argument", ctx) return None analyzed = self.api.anal_type( - type, - allow_required=True, - allow_placeholder=not self.options.disable_recursive_aliases - and not self.api.is_func_scope(), + type, allow_required=True, allow_placeholder=not self.api.is_func_scope() ) if analyzed is None: return None @@ -307,8 +304,7 @@ def analyze_typeddict_classdef_fields( analyzed = self.api.anal_type( stmt.type, allow_required=True, - allow_placeholder=not self.options.disable_recursive_aliases - and not self.api.is_func_scope(), + allow_placeholder=not self.api.is_func_scope(), prohibit_self_type="TypedDict item type", ) if analyzed is None: @@ -504,8 +500,7 @@ def parse_typeddict_fields_with_types( analyzed = self.api.anal_type( type, allow_required=True, - allow_placeholder=not self.options.disable_recursive_aliases - and not self.api.is_func_scope(), + allow_placeholder=not self.api.is_func_scope(), prohibit_self_type="TypedDict item type", ) if analyzed is None: diff --git a/mypy/typeanal.py b/mypy/typeanal.py index ceb276d3bdd4..03579404aac9 100644 --- a/mypy/typeanal.py +++ b/mypy/typeanal.py @@ -490,7 +490,7 @@ def cannot_resolve_type(self, t: UnboundType) -> None: # need access to MessageBuilder here. Also move the similar # message generation logic in semanal.py. self.api.fail(f'Cannot resolve name "{t.name}" (possible cyclic definition)', t) - if not self.options.disable_recursive_aliases and self.api.is_func_scope(): + if self.api.is_func_scope(): self.note("Recursive types are not allowed at function scope", t) def apply_concatenate_operator(self, t: UnboundType) -> Type: diff --git a/test-data/unit/check-classes.test b/test-data/unit/check-classes.test index cd60ec7c9a9c..983cb8454a05 100644 --- a/test-data/unit/check-classes.test +++ b/test-data/unit/check-classes.test @@ -5002,12 +5002,13 @@ class A(Tuple[int, str]): pass -- ----------------------- [case testCrashOnSelfRecursiveNamedTupleVar] -# flags: --disable-recursive-aliases from typing import NamedTuple -N = NamedTuple('N', [('x', N)]) # E: Cannot resolve name "N" (possible cyclic definition) -n: N -reveal_type(n) # N: Revealed type is "Tuple[Any, fallback=__main__.N]" +def test() -> None: + N = NamedTuple('N', [('x', N)]) # E: Cannot resolve name "N" (possible cyclic definition) \ + # N: Recursive types are not allowed at function scope + n: N + reveal_type(n) # N: Revealed type is "Tuple[Any, fallback=__main__.N@4]" [builtins fixtures/tuple.pyi] [case testCrashOnSelfRecursiveTypedDictVar] @@ -5032,18 +5033,20 @@ lst = [n, m] [builtins fixtures/isinstancelist.pyi] [case testCorrectJoinOfSelfRecursiveTypedDicts] -# flags: --disable-recursive-aliases from mypy_extensions import TypedDict -class N(TypedDict): - x: N # E: Cannot resolve name "N" (possible cyclic definition) -class M(TypedDict): - x: M # E: Cannot resolve name "M" (possible cyclic definition) - -n: N -m: M -lst = [n, m] -reveal_type(lst[0]['x']) # N: Revealed type is "Any" +def test() -> None: + class N(TypedDict): + x: N # E: Cannot resolve name "N" (possible cyclic definition) \ + # N: Recursive types are not allowed at function scope + class M(TypedDict): + x: M # E: Cannot resolve name "M" (possible cyclic definition) \ + # N: Recursive types are not allowed at function scope + + n: N + m: M + lst = [n, m] + reveal_type(lst[0]['x']) # N: Revealed type is "Any" [builtins fixtures/isinstancelist.pyi] [case testCrashInForwardRefToNamedTupleWithIsinstance] diff --git a/test-data/unit/check-incremental.test b/test-data/unit/check-incremental.test index 801bbd4e77b4..f2625b869c19 100644 --- a/test-data/unit/check-incremental.test +++ b/test-data/unit/check-incremental.test @@ -4594,7 +4594,6 @@ def outer() -> None: [out2] [case testRecursiveAliasImported] -# flags: --disable-recursive-aliases import a [file a.py] @@ -4620,16 +4619,10 @@ B = List[A] [builtins fixtures/list.pyi] [out] -tmp/lib.pyi:4: error: Module "other" has no attribute "B" -tmp/other.pyi:3: error: Cannot resolve name "B" (possible cyclic definition) [out2] -tmp/lib.pyi:4: error: Module "other" has no attribute "B" -tmp/other.pyi:3: error: Cannot resolve name "B" (possible cyclic definition) -tmp/a.py:3: note: Revealed type is "builtins.list[Any]" - -[case testRecursiveNamedTupleTypedDict-skip] -# https://github.com/python/mypy/issues/7125 +tmp/a.py:3: note: Revealed type is "builtins.list[builtins.list[...]]" +[case testRecursiveNamedTupleTypedDict] import a [file a.py] import lib @@ -4641,7 +4634,7 @@ reveal_type(x.x['x']) [file lib.pyi] from typing import NamedTuple from other import B -A = NamedTuple('A', [('x', B)]) # type: ignore +A = NamedTuple('A', [('x', B)]) [file other.pyi] from mypy_extensions import TypedDict from lib import A @@ -4649,7 +4642,7 @@ B = TypedDict('B', {'x': A}) [builtins fixtures/dict.pyi] [out] [out2] -tmp/a.py:3: note: Revealed type is "Tuple[TypedDict('other.B', {'x': Any}), fallback=lib.A]" +tmp/a.py:3: note: Revealed type is "Tuple[TypedDict('other.B', {'x': Tuple[..., fallback=lib.A]}), fallback=lib.A]" [case testFollowImportSkipNotInvalidatedOnPresent] # flags: --follow-imports=skip diff --git a/test-data/unit/check-namedtuple.test b/test-data/unit/check-namedtuple.test index 9fa098b28dee..14e075339572 100644 --- a/test-data/unit/check-namedtuple.test +++ b/test-data/unit/check-namedtuple.test @@ -607,16 +607,18 @@ tmp/b.py:4: note: Revealed type is "Tuple[Any, fallback=a.N]" tmp/b.py:7: note: Revealed type is "Tuple[Any, fallback=a.N]" [case testSimpleSelfReferentialNamedTuple] -# flags: --disable-recursive-aliases from typing import NamedTuple -class MyNamedTuple(NamedTuple): - parent: 'MyNamedTuple' # E: Cannot resolve name "MyNamedTuple" (possible cyclic definition) -def bar(nt: MyNamedTuple) -> MyNamedTuple: - return nt +def test() -> None: + class MyNamedTuple(NamedTuple): + parent: 'MyNamedTuple' # E: Cannot resolve name "MyNamedTuple" (possible cyclic definition) \ + # N: Recursive types are not allowed at function scope -x: MyNamedTuple -reveal_type(x.parent) # N: Revealed type is "Any" + def bar(nt: MyNamedTuple) -> MyNamedTuple: + return nt + + x: MyNamedTuple + reveal_type(x.parent) # N: Revealed type is "Any" [builtins fixtures/tuple.pyi] -- Some crazy self-referential named tuples and types dicts @@ -645,106 +647,111 @@ class B: [out] [case testSelfRefNT1] -# flags: --disable-recursive-aliases from typing import Tuple, NamedTuple -Node = NamedTuple('Node', [ - ('name', str), - ('children', Tuple['Node', ...]), # E: Cannot resolve name "Node" (possible cyclic definition) - ]) -n: Node -reveal_type(n) # N: Revealed type is "Tuple[builtins.str, builtins.tuple[Any, ...], fallback=__main__.Node]" +def test() -> None: + Node = NamedTuple('Node', [ + ('name', str), + ('children', Tuple['Node', ...]), # E: Cannot resolve name "Node" (possible cyclic definition) \ + # N: Recursive types are not allowed at function scope + ]) + n: Node + reveal_type(n) # N: Revealed type is "Tuple[builtins.str, builtins.tuple[Any, ...], fallback=__main__.Node@4]" [builtins fixtures/tuple.pyi] [case testSelfRefNT2] -# flags: --disable-recursive-aliases from typing import Tuple, NamedTuple -A = NamedTuple('A', [ - ('x', str), - ('y', Tuple['B', ...]), # E: Cannot resolve name "B" (possible cyclic definition) - ]) -class B(NamedTuple): - x: A - y: int +def test() -> None: + A = NamedTuple('A', [ + ('x', str), + ('y', Tuple['B', ...]), # E: Cannot resolve name "B" (possible cyclic definition) \ + # N: Recursive types are not allowed at function scope + ]) + class B(NamedTuple): + x: A + y: int -n: A -reveal_type(n) # N: Revealed type is "Tuple[builtins.str, builtins.tuple[Any, ...], fallback=__main__.A]" + n: A + reveal_type(n) # N: Revealed type is "Tuple[builtins.str, builtins.tuple[Any, ...], fallback=__main__.A@4]" [builtins fixtures/tuple.pyi] [case testSelfRefNT3] -# flags: --disable-recursive-aliases from typing import NamedTuple, Tuple -class B(NamedTuple): - x: Tuple[A, int] # E: Cannot resolve name "A" (possible cyclic definition) - y: int +def test() -> None: + class B(NamedTuple): + x: Tuple[A, int] # E: Cannot resolve name "A" (possible cyclic definition) \ + # N: Recursive types are not allowed at function scope + y: int -A = NamedTuple('A', [ - ('x', str), - ('y', 'B'), - ]) -n: B -m: A -reveal_type(n.x) # N: Revealed type is "Tuple[Any, builtins.int]" -reveal_type(m[0]) # N: Revealed type is "builtins.str" -lst = [m, n] -reveal_type(lst[0]) # N: Revealed type is "Tuple[builtins.object, builtins.object]" + A = NamedTuple('A', [ + ('x', str), + ('y', 'B'), + ]) + n: B + m: A + reveal_type(n.x) # N: Revealed type is "Tuple[Any, builtins.int]" + reveal_type(m[0]) # N: Revealed type is "builtins.str" + lst = [m, n] + reveal_type(lst[0]) # N: Revealed type is "Tuple[builtins.object, builtins.object]" [builtins fixtures/tuple.pyi] [case testSelfRefNT4] -# flags: --disable-recursive-aliases from typing import NamedTuple -class B(NamedTuple): - x: A # E: Cannot resolve name "A" (possible cyclic definition) - y: int +def test() -> None: + class B(NamedTuple): + x: A # E: Cannot resolve name "A" (possible cyclic definition) \ + # N: Recursive types are not allowed at function scope + y: int -class A(NamedTuple): - x: str - y: B + class A(NamedTuple): + x: str + y: B -n: A -reveal_type(n.y[0]) # N: Revealed type is "Any" + n: A + reveal_type(n.y[0]) # N: Revealed type is "Any" [builtins fixtures/tuple.pyi] [case testSelfRefNT5] -# flags: --disable-recursive-aliases from typing import NamedTuple -B = NamedTuple('B', [ - ('x', A), # E: Cannot resolve name "A" (possible cyclic definition) # E: Name "A" is used before definition - ('y', int), - ]) -A = NamedTuple('A', [ - ('x', str), - ('y', 'B'), - ]) -n: A -def f(m: B) -> None: pass -reveal_type(n) # N: Revealed type is "Tuple[builtins.str, Tuple[Any, builtins.int, fallback=__main__.B], fallback=__main__.A]" -reveal_type(f) # N: Revealed type is "def (m: Tuple[Any, builtins.int, fallback=__main__.B])" +def test() -> None: + B = NamedTuple('B', [ + ('x', A), # E: Cannot resolve name "A" (possible cyclic definition) \ + # N: Recursive types are not allowed at function scope \ + # E: Name "A" is used before definition + ('y', int), + ]) + A = NamedTuple('A', [ + ('x', str), + ('y', 'B'), + ]) + n: A + def f(m: B) -> None: pass + reveal_type(n) # N: Revealed type is "Tuple[builtins.str, Tuple[Any, builtins.int, fallback=__main__.B@4], fallback=__main__.A@8]" + reveal_type(f) # N: Revealed type is "def (m: Tuple[Any, builtins.int, fallback=__main__.B@4])" [builtins fixtures/tuple.pyi] [case testRecursiveNamedTupleInBases] -# flags: --disable-recursive-aliases from typing import List, NamedTuple, Union -Exp = Union['A', 'B'] # E: Cannot resolve name "Exp" (possible cyclic definition) \ - # E: Cannot resolve name "A" (possible cyclic definition) -class A(NamedTuple('A', [('attr', List[Exp])])): pass -class B(NamedTuple('B', [('val', object)])): pass +def test() -> None: + Exp = Union['A', 'B'] # E: Cannot resolve name "Exp" (possible cyclic definition) \ + # N: Recursive types are not allowed at function scope \ + # E: Cannot resolve name "A" (possible cyclic definition) + class A(NamedTuple('A', [('attr', List[Exp])])): pass + class B(NamedTuple('B', [('val', object)])): pass -def my_eval(exp: Exp) -> int: - reveal_type(exp) # N: Revealed type is "Union[Any, Tuple[builtins.object, fallback=__main__.B]]" + exp: Exp + reveal_type(exp) # N: Revealed type is "Union[Any, Tuple[builtins.object, fallback=__main__.B@6]]" if isinstance(exp, A): - my_eval(exp[0][0]) - return my_eval(exp.attr[0]) + reveal_type(exp[0][0]) # N: Revealed type is "Union[Any, Tuple[builtins.object, fallback=__main__.B@6]]" + reveal_type(exp.attr[0]) # N: Revealed type is "Union[Any, Tuple[builtins.object, fallback=__main__.B@6]]" if isinstance(exp, B): - return exp.val # E: Incompatible return value type (got "object", expected "int") - return 0 - -my_eval(A([B(1), B(2)])) # OK + reveal_type(exp.val) # N: Revealed type is "builtins.object" + reveal_type(A([B(1), B(2)])) # N: Revealed type is "Tuple[builtins.list[Union[Any, Tuple[builtins.object, fallback=__main__.B@6]]], fallback=__main__.A@5]" [builtins fixtures/isinstancelist.pyi] [out] @@ -771,17 +778,18 @@ tp = NamedTuple('tp', [('x', int)]) [out] [case testSubclassOfRecursiveNamedTuple] -# flags: --disable-recursive-aliases from typing import List, NamedTuple -class Command(NamedTuple): - subcommands: List['Command'] # E: Cannot resolve name "Command" (possible cyclic definition) +def test() -> None: + class Command(NamedTuple): + subcommands: List['Command'] # E: Cannot resolve name "Command" (possible cyclic definition) \ + # N: Recursive types are not allowed at function scope -class HelpCommand(Command): - pass + class HelpCommand(Command): + pass -hc = HelpCommand(subcommands=[]) -reveal_type(hc) # N: Revealed type is "Tuple[builtins.list[Any], fallback=__main__.HelpCommand]" + hc = HelpCommand(subcommands=[]) + reveal_type(hc) # N: Revealed type is "Tuple[builtins.list[Any], fallback=__main__.HelpCommand@7]" [builtins fixtures/list.pyi] [out] diff --git a/test-data/unit/check-newsemanal.test b/test-data/unit/check-newsemanal.test index ff8d346e74a1..f4d3b9df760e 100644 --- a/test-data/unit/check-newsemanal.test +++ b/test-data/unit/check-newsemanal.test @@ -434,13 +434,14 @@ def main() -> None: x # E: Name "x" is not defined [case testNewAnalyzerCyclicDefinitions] -# flags: --disable-recursive-aliases --disable-error-code used-before-def +# flags: --disable-error-code used-before-def gx = gy # E: Cannot resolve name "gy" (possible cyclic definition) gy = gx def main() -> None: class C: def meth(self) -> None: - lx = ly # E: Cannot resolve name "ly" (possible cyclic definition) + lx = ly # E: Cannot resolve name "ly" (possible cyclic definition) \ + # N: Recursive types are not allowed at function scope ly = lx [case testNewAnalyzerCyclicDefinitionCrossModule] @@ -1495,22 +1496,25 @@ reveal_type(x[0][0]) # N: Revealed type is "__main__.C" [builtins fixtures/list.pyi] [case testNewAnalyzerAliasToNotReadyDirectBase] -# flags: --disable-recursive-aliases --disable-error-code used-before-def +# flags: --disable-error-code used-before-def from typing import List -x: B -B = List[C] -class C(B): pass +def test() -> None: + x: B + B = List[C] + class C(B): pass -reveal_type(x) -reveal_type(x[0][0]) + reveal_type(x) + reveal_type(x[0][0]) [builtins fixtures/list.pyi] [out] -main:4: error: Cannot resolve name "B" (possible cyclic definition) main:5: error: Cannot resolve name "B" (possible cyclic definition) -main:5: error: Cannot resolve name "C" (possible cyclic definition) -main:8: note: Revealed type is "Any" +main:5: note: Recursive types are not allowed at function scope +main:6: error: Cannot resolve name "B" (possible cyclic definition) +main:6: note: Recursive types are not allowed at function scope +main:6: error: Cannot resolve name "C" (possible cyclic definition) main:9: note: Revealed type is "Any" +main:10: note: Revealed type is "Any" [case testNewAnalyzerAliasToNotReadyTwoDeferralsFunction] # flags: --disable-error-code used-before-def @@ -1530,25 +1534,21 @@ reveal_type(f) # N: Revealed type is "def (x: builtins.list[a.C]) -> builtins.l [builtins fixtures/list.pyi] [case testNewAnalyzerAliasToNotReadyDirectBaseFunction] -# flags: --disable-recursive-aliases --disable-error-code used-before-def +# flags: --disable-error-code used-before-def import a [file a.py] from typing import List from b import D def f(x: B) -> List[B]: ... -B = List[C] # E +B = List[C] class C(B): pass [file b.py] from a import f class D: ... -reveal_type(f) # N +reveal_type(f) # N: Revealed type is "def (x: builtins.list[a.C]) -> builtins.list[builtins.list[a.C]]" [builtins fixtures/list.pyi] -[out] -tmp/b.py:3: note: Revealed type is "def (x: builtins.list[Any]) -> builtins.list[builtins.list[Any]]" -tmp/a.py:5: error: Cannot resolve name "B" (possible cyclic definition) -tmp/a.py:5: error: Cannot resolve name "C" (possible cyclic definition) [case testNewAnalyzerAliasToNotReadyMixed] from typing import List, Union @@ -2118,25 +2118,29 @@ class B(List[C]): [builtins fixtures/list.pyi] [case testNewAnalyzerNewTypeForwardClassAliasDirect] -# flags: --disable-recursive-aliases --disable-error-code used-before-def +# flags: --disable-error-code used-before-def from typing import NewType, List -x: D -reveal_type(x[0][0]) +def test() -> None: + x: D + reveal_type(x[0][0]) -D = List[C] -C = NewType('C', 'B') + D = List[C] + C = NewType('C', 'B') -class B(D): - pass + class B(D): + pass [builtins fixtures/list.pyi] [out] -main:4: error: Cannot resolve name "D" (possible cyclic definition) -main:5: note: Revealed type is "Any" -main:7: error: Cannot resolve name "D" (possible cyclic definition) -main:7: error: Cannot resolve name "C" (possible cyclic definition) -main:8: error: Argument 2 to NewType(...) must be a valid type -main:8: error: Cannot resolve name "B" (possible cyclic definition) +main:5: error: Cannot resolve name "D" (possible cyclic definition) +main:5: note: Recursive types are not allowed at function scope +main:6: note: Revealed type is "Any" +main:8: error: Cannot resolve name "D" (possible cyclic definition) +main:8: note: Recursive types are not allowed at function scope +main:8: error: Cannot resolve name "C" (possible cyclic definition) +main:9: error: Argument 2 to NewType(...) must be a valid type +main:9: error: Cannot resolve name "B" (possible cyclic definition) +main:9: note: Recursive types are not allowed at function scope -- Copied from check-classes.test (tricky corner cases). [case testNewAnalyzerNoCrashForwardRefToBrokenDoubleNewTypeClass] @@ -2154,22 +2158,24 @@ class C: [builtins fixtures/dict.pyi] [case testNewAnalyzerForwardTypeAliasInBase] -# flags: --disable-recursive-aliases from typing import List, Generic, TypeVar, NamedTuple T = TypeVar('T') -class C(A, B): # E: Cannot resolve name "A" (possible cyclic definition) - pass -class G(Generic[T]): pass -A = G[C] # E: Cannot resolve name "A" (possible cyclic definition) -class B(NamedTuple): - x: int +def test() -> None: + class C(A, B): # E: Cannot resolve name "A" (possible cyclic definition) \ + # N: Recursive types are not allowed at function scope + pass + class G(Generic[T]): pass + A = G[C] # E: Cannot resolve name "A" (possible cyclic definition) \ + # N: Recursive types are not allowed at function scope + class B(NamedTuple): + x: int -y: C -reveal_type(y.x) # N: Revealed type is "builtins.int" -reveal_type(y[0]) # N: Revealed type is "builtins.int" -x: A -reveal_type(x) # N: Revealed type is "__main__.G[Tuple[builtins.int, fallback=__main__.C]]" + y: C + reveal_type(y.x) # N: Revealed type is "builtins.int" + reveal_type(y[0]) # N: Revealed type is "builtins.int" + x: A + reveal_type(x) # N: Revealed type is "__main__.G@7[Tuple[builtins.int, fallback=__main__.C@5]]" [builtins fixtures/list.pyi] [case testNewAnalyzerDuplicateTypeVar] @@ -2584,9 +2590,9 @@ import n def __getattr__(x): pass [case testNewAnalyzerReportLoopInMRO2] -# flags: --disable-recursive-aliases def f() -> None: - class A(A): ... # E: Cannot resolve name "A" (possible cyclic definition) + class A(A): ... # E: Cannot resolve name "A" (possible cyclic definition) \ + # N: Recursive types are not allowed at function scope [case testNewAnalyzerUnsupportedBaseClassInsideFunction] class C: diff --git a/test-data/unit/check-type-aliases.test b/test-data/unit/check-type-aliases.test index 3ca0c5ef0a4b..46f5ff07f1ac 100644 --- a/test-data/unit/check-type-aliases.test +++ b/test-data/unit/check-type-aliases.test @@ -197,30 +197,35 @@ Alias = Tuple[int, T] [out] [case testRecursiveAliasesErrors1] -# flags: --disable-recursive-aliases -# Recursive aliases are not supported yet. from typing import Type, Callable, Union -A = Union[A, int] # E: Cannot resolve name "A" (possible cyclic definition) -B = Callable[[B], int] # E: Cannot resolve name "B" (possible cyclic definition) -C = Type[C] # E: Cannot resolve name "C" (possible cyclic definition) +def test() -> None: + A = Union[A, int] # E: Cannot resolve name "A" (possible cyclic definition) \ + # N: Recursive types are not allowed at function scope + B = Callable[[B], int] # E: Cannot resolve name "B" (possible cyclic definition) \ + # N: Recursive types are not allowed at function scope + C = Type[C] # E: Cannot resolve name "C" (possible cyclic definition) \ + # N: Recursive types are not allowed at function scope [case testRecursiveAliasesErrors2] -# flags: --disable-recursive-aliases --disable-error-code=used-before-def -# Recursive aliases are not supported yet. +# flags: --disable-error-code=used-before-def from typing import Type, Callable, Union -A = Union[B, int] -B = Callable[[C], int] -C = Type[A] -x: A -reveal_type(x) +def test() -> None: + A = Union[B, int] + B = Callable[[C], int] + C = Type[A] + x: A + reveal_type(x) [out] main:5: error: Cannot resolve name "A" (possible cyclic definition) +main:5: note: Recursive types are not allowed at function scope main:5: error: Cannot resolve name "B" (possible cyclic definition) main:6: error: Cannot resolve name "B" (possible cyclic definition) +main:6: note: Recursive types are not allowed at function scope main:6: error: Cannot resolve name "C" (possible cyclic definition) main:7: error: Cannot resolve name "C" (possible cyclic definition) +main:7: note: Recursive types are not allowed at function scope main:9: note: Revealed type is "Union[Any, builtins.int]" [case testDoubleForwardAlias] @@ -245,13 +250,16 @@ reveal_type(x[0].x) # N: Revealed type is "builtins.str" [out] [case testJSONAliasApproximation] -# flags: --disable-recursive-aliases from typing import List, Union, Dict -x: JSON # E: Cannot resolve name "JSON" (possible cyclic definition) -JSON = Union[int, str, List[JSON], Dict[str, JSON]] # E: Cannot resolve name "JSON" (possible cyclic definition) -reveal_type(x) # N: Revealed type is "Any" -if isinstance(x, list): - reveal_type(x) # N: Revealed type is "builtins.list[Any]" + +def test() -> None: + x: JSON # E: Cannot resolve name "JSON" (possible cyclic definition) \ + # N: Recursive types are not allowed at function scope + JSON = Union[int, str, List[JSON], Dict[str, JSON]] # E: Cannot resolve name "JSON" (possible cyclic definition) \ + # N: Recursive types are not allowed at function scope + reveal_type(x) # N: Revealed type is "Any" + if isinstance(x, list): + reveal_type(x) # N: Revealed type is "builtins.list[Any]" [builtins fixtures/isinstancelist.pyi] [out] diff --git a/test-data/unit/check-typeddict.test b/test-data/unit/check-typeddict.test index 0e1d800e0468..088b52db0473 100644 --- a/test-data/unit/check-typeddict.test +++ b/test-data/unit/check-typeddict.test @@ -1443,34 +1443,34 @@ reveal_type(x['a']['b']) # N: Revealed type is "builtins.int" [builtins fixtures/dict.pyi] [case testSelfRecursiveTypedDictInheriting] - from mypy_extensions import TypedDict -# flags: --disable-recursive-aliases -class MovieBase(TypedDict): - name: str - year: int -class Movie(MovieBase): - director: 'Movie' # E: Cannot resolve name "Movie" (possible cyclic definition) +def test() -> None: + class MovieBase(TypedDict): + name: str + year: int -m: Movie -reveal_type(m['director']['name']) # N: Revealed type is "Any" + class Movie(MovieBase): + director: 'Movie' # E: Cannot resolve name "Movie" (possible cyclic definition) \ + # N: Recursive types are not allowed at function scope + m: Movie + reveal_type(m['director']['name']) # N: Revealed type is "Any" [builtins fixtures/dict.pyi] -[out] [case testSubclassOfRecursiveTypedDict] -# flags: --disable-recursive-aliases from typing import List from mypy_extensions import TypedDict -class Command(TypedDict): - subcommands: List['Command'] # E: Cannot resolve name "Command" (possible cyclic definition) +def test() -> None: + class Command(TypedDict): + subcommands: List['Command'] # E: Cannot resolve name "Command" (possible cyclic definition) \ + # N: Recursive types are not allowed at function scope -class HelpCommand(Command): - pass + class HelpCommand(Command): + pass -hc = HelpCommand(subcommands=[]) -reveal_type(hc) # N: Revealed type is "TypedDict('__main__.HelpCommand', {'subcommands': builtins.list[Any]})" + hc = HelpCommand(subcommands=[]) + reveal_type(hc) # N: Revealed type is "TypedDict('__main__.HelpCommand@8', {'subcommands': builtins.list[Any]})" [builtins fixtures/list.pyi] [out] diff --git a/test-data/unit/check-unions.test b/test-data/unit/check-unions.test index f6fd27e59e4d..d79ab14184c6 100644 --- a/test-data/unit/check-unions.test +++ b/test-data/unit/check-unions.test @@ -1003,9 +1003,11 @@ def takes_int(arg: int) -> None: pass takes_int(x) # E: Argument 1 to "takes_int" has incompatible type "Union[ExtremelyLongTypeNameWhichIsGenericSoWeCanUseItMultipleTimes[int], ExtremelyLongTypeNameWhichIsGenericSoWeCanUseItMultipleTimes[object], ExtremelyLongTypeNameWhichIsGenericSoWeCanUseItMultipleTimes[float], ExtremelyLongTypeNameWhichIsGenericSoWeCanUseItMultipleTimes[str], ExtremelyLongTypeNameWhichIsGenericSoWeCanUseItMultipleTimes[Any], ExtremelyLongTypeNameWhichIsGenericSoWeCanUseItMultipleTimes[bytes]]"; expected "int" [case testRecursiveForwardReferenceInUnion] -# flags: --disable-recursive-aliases from typing import List, Union -MYTYPE = List[Union[str, "MYTYPE"]] # E: Cannot resolve name "MYTYPE" (possible cyclic definition) + +def test() -> None: + MYTYPE = List[Union[str, "MYTYPE"]] # E: Cannot resolve name "MYTYPE" (possible cyclic definition) \ + # N: Recursive types are not allowed at function scope [builtins fixtures/list.pyi] [case testNonStrictOptional] diff --git a/test-data/unit/cmdline.test b/test-data/unit/cmdline.test index 42f0ee8a9ec6..cf5e3c438fac 100644 --- a/test-data/unit/cmdline.test +++ b/test-data/unit/cmdline.test @@ -1472,14 +1472,6 @@ note: A user-defined top-level module with name "typing" is not supported Failed to find builtin module mypy_extensions, perhaps typeshed is broken? == Return code: 2 -[case testRecursiveAliasesFlagDeprecated] -# cmd: mypy --enable-recursive-aliases a.py -[file a.py] -pass -[out] -Warning: --enable-recursive-aliases is deprecated; recursive types are enabled by default -== Return code: 0 - [case testNotesOnlyResultInExitSuccess] # cmd: mypy a.py [file a.py] From 93d4cb0a2ef1723ce92f39ae61fe6a0c010eb90b Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Sat, 28 Oct 2023 14:15:28 +0100 Subject: [PATCH 126/144] Enable new type inference by default (#16345) Fixes https://github.com/python/mypy/issues/15906 I am adding `--old-type-inference` so people can disable the flag if they have issues (for few releases). IIRC there will be some fallback in `mypy_primer`, but last time I checked it was all correct. Also I don't remember if we need to update some tests, but we will see. --- mypy/checker.py | 6 +++--- mypy/checkexpr.py | 12 ++++++++---- mypy/main.py | 14 ++++++++++++-- mypy/options.py | 6 ++++-- mypy_self_check.ini | 1 - test-data/unit/check-generics.test | 9 +++++++-- test-data/unit/check-inference-context.test | 19 +++++++++++++++++++ test-data/unit/cmdline.test | 8 ++++++++ 8 files changed, 61 insertions(+), 14 deletions(-) diff --git a/mypy/checker.py b/mypy/checker.py index e68dc4178962..fd633b209438 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -4043,11 +4043,11 @@ def is_valid_defaultdict_partial_value_type(self, t: ProperType) -> bool: return True if len(t.args) == 1: arg = get_proper_type(t.args[0]) - if self.options.new_type_inference: - allowed = isinstance(arg, (UninhabitedType, NoneType)) - else: + if self.options.old_type_inference: # Allow leaked TypeVars for legacy inference logic. allowed = isinstance(arg, (UninhabitedType, NoneType, TypeVarType)) + else: + allowed = isinstance(arg, (UninhabitedType, NoneType)) if allowed: return True return False diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index ddcaa6ee30c9..9ece4680f59e 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -343,7 +343,7 @@ def __init__( # on whether current expression is a callee, to give better error messages # related to type context. self.is_callee = False - type_state.infer_polymorphic = self.chk.options.new_type_inference + type_state.infer_polymorphic = not self.chk.options.old_type_inference def reset(self) -> None: self.resolved_type = {} @@ -2082,7 +2082,7 @@ def infer_function_type_arguments( elif not first_arg or not is_subtype(self.named_type("builtins.str"), first_arg): self.chk.fail(message_registry.KEYWORD_ARGUMENT_REQUIRES_STR_KEY_TYPE, context) - if self.chk.options.new_type_inference and any( + if not self.chk.options.old_type_inference and any( a is None or isinstance(get_proper_type(a), UninhabitedType) or set(get_type_vars(a)) & set(callee_type.variables) @@ -2181,7 +2181,11 @@ def infer_function_type_arguments_pass2( lambda a: self.accept(args[a]), ) - arg_types = self.infer_arg_types_in_context(callee_type, args, arg_kinds, formal_to_actual) + # Same as during first pass, disable type errors (we still have partial context). + with self.msg.filter_errors(): + arg_types = self.infer_arg_types_in_context( + callee_type, args, arg_kinds, formal_to_actual + ) inferred_args, _ = infer_function_type_arguments( callee_type, @@ -5230,7 +5234,7 @@ def infer_lambda_type_using_context( # they must be considered as indeterminate. We use ErasedType since it # does not affect type inference results (it is for purposes like this # only). - if self.chk.options.new_type_inference: + if not self.chk.options.old_type_inference: # With new type inference we can preserve argument types even if they # are generic, since new inference algorithm can handle constraints # like S <: T (we still erase return type since it's ultimately unknown). diff --git a/mypy/main.py b/mypy/main.py index 718eb5a7c0c1..43ab761072ca 100644 --- a/mypy/main.py +++ b/mypy/main.py @@ -995,9 +995,13 @@ def add_invertible_flag( help="Use a custom typing module", ) internals_group.add_argument( - "--new-type-inference", + "--old-type-inference", action="store_true", - help="Enable new experimental type inference algorithm", + help="Disable new experimental type inference algorithm", + ) + # Deprecated reverse variant of the above. + internals_group.add_argument( + "--new-type-inference", action="store_true", help=argparse.SUPPRESS ) parser.add_argument( "--enable-incomplete-feature", @@ -1383,6 +1387,12 @@ def set_strict_flags() -> None: if options.logical_deps: options.cache_fine_grained = True + if options.new_type_inference: + print( + "Warning: --new-type-inference flag is deprecated;" + " new type inference algorithm is already enabled by default" + ) + if options.strict_concatenate and not strict_option_set: print("Warning: --strict-concatenate is deprecated; use --extra-checks instead") diff --git a/mypy/options.py b/mypy/options.py index 3447b5dfb1f6..31d5d584f897 100644 --- a/mypy/options.py +++ b/mypy/options.py @@ -62,7 +62,7 @@ class BuildType: | { "platform", "bazel", - "new_type_inference", + "old_type_inference", "plugins", "disable_bytearray_promotion", "disable_memoryview_promotion", @@ -360,7 +360,9 @@ def __init__(self) -> None: # skip most errors after this many messages have been reported. # -1 means unlimited. self.many_errors_threshold = defaults.MANY_ERRORS_THRESHOLD - # Enable new experimental type inference algorithm. + # Disable new experimental type inference algorithm. + self.old_type_inference = False + # Deprecated reverse version of the above, do not use. self.new_type_inference = False # Export line-level, limited, fine-grained dependency information in cache data # (undocumented feature). diff --git a/mypy_self_check.ini b/mypy_self_check.ini index 093926d4c415..7f1f9689a757 100644 --- a/mypy_self_check.ini +++ b/mypy_self_check.ini @@ -8,7 +8,6 @@ always_false = MYPYC plugins = mypy.plugins.proper_plugin python_version = 3.8 exclude = mypy/typeshed/|mypyc/test-data/|mypyc/lib-rt/ -new_type_inference = True enable_error_code = ignore-without-code,redundant-expr enable_incomplete_feature = PreciseTupleTypes show_error_code_links = True diff --git a/test-data/unit/check-generics.test b/test-data/unit/check-generics.test index 0781451e07ce..ef3f359e4989 100644 --- a/test-data/unit/check-generics.test +++ b/test-data/unit/check-generics.test @@ -2724,7 +2724,7 @@ def f(x: Callable[[G[T]], int]) -> T: ... class G(Generic[T]): def g(self, x: S) -> Union[S, T]: ... -f(lambda x: x.g(0)) # E: Incompatible return value type (got "Union[int, T]", expected "int") +reveal_type(f(lambda x: x.g(0))) # N: Revealed type is "builtins.int" [case testDictStarInference] class B: ... @@ -3059,6 +3059,10 @@ def dec5(f: Callable[[int], T]) -> Callable[[int], List[T]]: return [f(x)] * x return g +I = TypeVar("I", bound=int) +def dec4_bound(f: Callable[[I], List[T]]) -> Callable[[I], T]: + ... + reveal_type(dec1(lambda x: x)) # N: Revealed type is "def [T] (T`3) -> builtins.list[T`3]" reveal_type(dec2(lambda x: x)) # N: Revealed type is "def [S] (S`4) -> builtins.list[S`4]" reveal_type(dec3(lambda x: x[0])) # N: Revealed type is "def [S] (S`6) -> S`6" @@ -3066,7 +3070,8 @@ reveal_type(dec4(lambda x: [x])) # N: Revealed type is "def [S] (S`9) -> S`9" reveal_type(dec1(lambda x: 1)) # N: Revealed type is "def (builtins.int) -> builtins.list[builtins.int]" reveal_type(dec5(lambda x: x)) # N: Revealed type is "def (builtins.int) -> builtins.list[builtins.int]" reveal_type(dec3(lambda x: x)) # N: Revealed type is "def [S] (S`16) -> builtins.list[S`16]" -dec4(lambda x: x) # E: Incompatible return value type (got "S", expected "List[object]") +reveal_type(dec4(lambda x: x)) # N: Revealed type is "def [T] (builtins.list[T`19]) -> T`19" +dec4_bound(lambda x: x) # E: Value of type variable "I" of "dec4_bound" cannot be "List[T]" [builtins fixtures/list.pyi] [case testInferenceAgainstGenericParamSpecBasicInList] diff --git a/test-data/unit/check-inference-context.test b/test-data/unit/check-inference-context.test index 773a9ffd8274..a933acbf7f32 100644 --- a/test-data/unit/check-inference-context.test +++ b/test-data/unit/check-inference-context.test @@ -1305,6 +1305,25 @@ def g(l: List[C], x: str) -> Optional[C]: return f(l, lambda c: reveal_type(c).x) # N: Revealed type is "__main__.C" [builtins fixtures/list.pyi] +[case testPartialTypeContextWithTwoLambdas] +from typing import Any, Generic, TypeVar, Callable + +def int_to_any(x: int) -> Any: ... +def any_to_int(x: Any) -> int: ... +def any_to_str(x: Any) -> str: ... + +T = TypeVar("T") +class W(Generic[T]): + def __init__( + self, serialize: Callable[[T], Any], deserialize: Callable[[Any], T] + ) -> None: + ... +reveal_type(W(lambda x: int_to_any(x), lambda x: any_to_int(x))) # N: Revealed type is "__main__.W[builtins.int]" +W( + lambda x: int_to_any(x), # E: Argument 1 to "int_to_any" has incompatible type "str"; expected "int" + lambda x: any_to_str(x) +) + [case testWideOuterContextEmpty] from typing import List, TypeVar diff --git a/test-data/unit/cmdline.test b/test-data/unit/cmdline.test index cf5e3c438fac..91242eb62fcf 100644 --- a/test-data/unit/cmdline.test +++ b/test-data/unit/cmdline.test @@ -1472,6 +1472,14 @@ note: A user-defined top-level module with name "typing" is not supported Failed to find builtin module mypy_extensions, perhaps typeshed is broken? == Return code: 2 +[case testNewTypeInferenceFlagDeprecated] +# cmd: mypy --new-type-inference a.py +[file a.py] +pass +[out] +Warning: --new-type-inference flag is deprecated; new type inference algorithm is already enabled by default +== Return code: 0 + [case testNotesOnlyResultInExitSuccess] # cmd: mypy a.py [file a.py] From 6c7faf3af1c442c0802998cbf384f73b79d67478 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Sat, 28 Oct 2023 14:19:17 +0100 Subject: [PATCH 127/144] Skip expensive repr() in logging call when not needed (#16350) We were spending quite a lot of time in this function when running tests, based on profiling. --- mypy/build.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/mypy/build.py b/mypy/build.py index 1385021aac48..605368a6dc51 100644 --- a/mypy/build.py +++ b/mypy/build.py @@ -255,7 +255,8 @@ def _build( stdout=stdout, stderr=stderr, ) - manager.trace(repr(options)) + if manager.verbosity() >= 2: + manager.trace(repr(options)) reset_global_state() try: From f33c9a3b97f8226eb0156d50be7885ad96815f7c Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Sat, 28 Oct 2023 14:26:05 +0100 Subject: [PATCH 128/144] Some final touches for variadic types support (#16334) I decided to go again over various parts of variadic types implementation to double-check nothing is missing, checked interaction with various "advanced" features (dataclasses, protocols, self-types, match statement, etc.), added some more tests (including incremental), and `grep`ed for potentially unhandled cases (and did found few crashes). This mostly touches only variadic types but one thing goes beyond, the fix for self-types upper bound, I think it is correct and should be safe. If there are no objections, next PR will flip the switch. --------- Co-authored-by: Shantanu <12621235+hauntsaninja@users.noreply.github.com> --- mypy/applytype.py | 7 + mypy/checker.py | 39 +++--- mypy/checkexpr.py | 7 +- mypy/checkpattern.py | 111 ++++++++++++--- mypy/constraints.py | 19 ++- mypy/erasetype.py | 4 +- mypy/join.py | 43 +++++- mypy/maptype.py | 3 + mypy/meet.py | 21 ++- mypy/semanal_shared.py | 20 ++- mypy/subtypes.py | 22 ++- mypy/typeops.py | 3 +- mypy/types_utils.py | 4 +- mypy/typevars.py | 19 ++- test-data/unit/check-incremental.test | 43 ++++++ test-data/unit/check-python310.test | 117 ++++++++++++++++ test-data/unit/check-selftype.test | 20 +++ test-data/unit/check-typevar-tuple.test | 174 ++++++++++++++++++++++++ test-data/unit/fine-grained.test | 122 +++++++++++++++++ 19 files changed, 726 insertions(+), 72 deletions(-) diff --git a/mypy/applytype.py b/mypy/applytype.py index 884be287e33d..c7da67d6140b 100644 --- a/mypy/applytype.py +++ b/mypy/applytype.py @@ -3,6 +3,7 @@ from typing import Callable, Sequence import mypy.subtypes +from mypy.erasetype import erase_typevars from mypy.expandtype import expand_type from mypy.nodes import Context from mypy.types import ( @@ -62,6 +63,11 @@ def get_target_type( report_incompatible_typevar_value(callable, type, tvar.name, context) else: upper_bound = tvar.upper_bound + if tvar.name == "Self": + # Internally constructed Self-types contain class type variables in upper bound, + # so we need to erase them to avoid false positives. This is safe because we do + # not support type variables in upper bounds of user defined types. + upper_bound = erase_typevars(upper_bound) if not mypy.subtypes.is_subtype(type, upper_bound): if skip_unsatisfied: return None @@ -121,6 +127,7 @@ def apply_generic_arguments( # Apply arguments to argument types. var_arg = callable.var_arg() if var_arg is not None and isinstance(var_arg.typ, UnpackType): + # Same as for ParamSpec, callable with variadic types needs to be expanded as a whole. callable = expand_type(callable, id_to_type) assert isinstance(callable, CallableType) return callable.copy_modified(variables=[tv for tv in tvars if tv.id not in id_to_type]) diff --git a/mypy/checker.py b/mypy/checker.py index fd633b209438..62ba642256bf 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -1852,7 +1852,6 @@ def expand_typevars( if defn.info: # Class type variables tvars += defn.info.defn.type_vars or [] - # TODO(PEP612): audit for paramspec for tvar in tvars: if isinstance(tvar, TypeVarType) and tvar.values: subst.append([(tvar.id, value) for value in tvar.values]) @@ -2538,6 +2537,9 @@ def check_protocol_variance(self, defn: ClassDef) -> None: object_type = Instance(info.mro[-1], []) tvars = info.defn.type_vars for i, tvar in enumerate(tvars): + if not isinstance(tvar, TypeVarType): + # Variance of TypeVarTuple and ParamSpec is underspecified by PEPs. + continue up_args: list[Type] = [ object_type if i == j else AnyType(TypeOfAny.special_form) for j, _ in enumerate(tvars) @@ -2554,7 +2556,7 @@ def check_protocol_variance(self, defn: ClassDef) -> None: expected = CONTRAVARIANT else: expected = INVARIANT - if isinstance(tvar, TypeVarType) and expected != tvar.variance: + if expected != tvar.variance: self.msg.bad_proto_variance(tvar.variance, tvar.name, expected, defn) def check_multiple_inheritance(self, typ: TypeInfo) -> None: @@ -6695,19 +6697,6 @@ def check_possible_missing_await( return self.msg.possible_missing_await(context, code) - def contains_none(self, t: Type) -> bool: - t = get_proper_type(t) - return ( - isinstance(t, NoneType) - or (isinstance(t, UnionType) and any(self.contains_none(ut) for ut in t.items)) - or (isinstance(t, TupleType) and any(self.contains_none(tt) for tt in t.items)) - or ( - isinstance(t, Instance) - and bool(t.args) - and any(self.contains_none(it) for it in t.args) - ) - ) - def named_type(self, name: str) -> Instance: """Return an instance type with given name and implicit Any type args. @@ -7471,10 +7460,22 @@ def builtin_item_type(tp: Type) -> Type | None: return None if not isinstance(get_proper_type(tp.args[0]), AnyType): return tp.args[0] - elif isinstance(tp, TupleType) and all( - not isinstance(it, AnyType) for it in get_proper_types(tp.items) - ): - return make_simplified_union(tp.items) # this type is not externally visible + elif isinstance(tp, TupleType): + normalized_items = [] + for it in tp.items: + # This use case is probably rare, but not handling unpacks here can cause crashes. + if isinstance(it, UnpackType): + unpacked = get_proper_type(it.type) + if isinstance(unpacked, TypeVarTupleType): + unpacked = get_proper_type(unpacked.upper_bound) + assert ( + isinstance(unpacked, Instance) and unpacked.type.fullname == "builtins.tuple" + ) + normalized_items.append(unpacked.args[0]) + else: + normalized_items.append(it) + if all(not isinstance(it, AnyType) for it in get_proper_types(normalized_items)): + return make_simplified_union(normalized_items) # this type is not externally visible elif isinstance(tp, TypedDictType): # TypedDict always has non-optional string keys. Find the key type from the Mapping # base class. diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index 9ece4680f59e..df6000050986 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -410,7 +410,7 @@ def analyze_ref_expr(self, e: RefExpr, lvalue: bool = False) -> Type: result = self.alias_type_in_runtime_context( node, ctx=e, alias_definition=e.is_alias_rvalue or lvalue ) - elif isinstance(node, (TypeVarExpr, ParamSpecExpr)): + elif isinstance(node, (TypeVarExpr, ParamSpecExpr, TypeVarTupleExpr)): result = self.object_type() else: if isinstance(node, PlaceholderNode): @@ -3316,6 +3316,7 @@ def infer_literal_expr_type(self, value: LiteralValue, fallback_name: str) -> Ty def concat_tuples(self, left: TupleType, right: TupleType) -> TupleType: """Concatenate two fixed length tuples.""" + assert not (find_unpack_in_list(left.items) and find_unpack_in_list(right.items)) return TupleType( items=left.items + right.items, fallback=self.named_type("builtins.tuple") ) @@ -6507,8 +6508,8 @@ def merge_typevars_in_callables_by_name( for tv in target.variables: name = tv.fullname if name not in unique_typevars: - # TODO(PEP612): fix for ParamSpecType - if isinstance(tv, ParamSpecType): + # TODO: support ParamSpecType and TypeVarTuple. + if isinstance(tv, (ParamSpecType, TypeVarTupleType)): continue assert isinstance(tv, TypeVarType) unique_typevars[name] = tv diff --git a/mypy/checkpattern.py b/mypy/checkpattern.py index 3f9a99b21530..c0061f1c3e72 100644 --- a/mypy/checkpattern.py +++ b/mypy/checkpattern.py @@ -45,9 +45,13 @@ Type, TypedDictType, TypeOfAny, + TypeVarTupleType, UninhabitedType, UnionType, + UnpackType, + find_unpack_in_list, get_proper_type, + split_with_prefix_and_suffix, ) from mypy.typevars import fill_typevars from mypy.visitor import PatternVisitor @@ -239,13 +243,29 @@ def visit_sequence_pattern(self, o: SequencePattern) -> PatternType: # # get inner types of original type # + unpack_index = None if isinstance(current_type, TupleType): inner_types = current_type.items - size_diff = len(inner_types) - required_patterns - if size_diff < 0: - return self.early_non_match() - elif size_diff > 0 and star_position is None: - return self.early_non_match() + unpack_index = find_unpack_in_list(inner_types) + if unpack_index is None: + size_diff = len(inner_types) - required_patterns + if size_diff < 0: + return self.early_non_match() + elif size_diff > 0 and star_position is None: + return self.early_non_match() + else: + normalized_inner_types = [] + for it in inner_types: + # Unfortunately, it is not possible to "split" the TypeVarTuple + # into individual items, so we just use its upper bound for the whole + # analysis instead. + if isinstance(it, UnpackType) and isinstance(it.type, TypeVarTupleType): + it = UnpackType(it.type.upper_bound) + normalized_inner_types.append(it) + inner_types = normalized_inner_types + current_type = current_type.copy_modified(items=normalized_inner_types) + if len(inner_types) - 1 > required_patterns and star_position is None: + return self.early_non_match() else: inner_type = self.get_sequence_type(current_type, o) if inner_type is None: @@ -270,10 +290,10 @@ def visit_sequence_pattern(self, o: SequencePattern) -> PatternType: self.update_type_map(captures, type_map) new_inner_types = self.expand_starred_pattern_types( - contracted_new_inner_types, star_position, len(inner_types) + contracted_new_inner_types, star_position, len(inner_types), unpack_index is not None ) rest_inner_types = self.expand_starred_pattern_types( - contracted_rest_inner_types, star_position, len(inner_types) + contracted_rest_inner_types, star_position, len(inner_types), unpack_index is not None ) # @@ -281,7 +301,7 @@ def visit_sequence_pattern(self, o: SequencePattern) -> PatternType: # new_type: Type rest_type: Type = current_type - if isinstance(current_type, TupleType): + if isinstance(current_type, TupleType) and unpack_index is None: narrowed_inner_types = [] inner_rest_types = [] for inner_type, new_inner_type in zip(inner_types, new_inner_types): @@ -301,6 +321,14 @@ def visit_sequence_pattern(self, o: SequencePattern) -> PatternType: if all(is_uninhabited(typ) for typ in inner_rest_types): # All subpatterns always match, so we can apply negative narrowing rest_type = TupleType(rest_inner_types, current_type.partial_fallback) + elif isinstance(current_type, TupleType): + # For variadic tuples it is too tricky to match individual items like for fixed + # tuples, so we instead try to narrow the entire type. + # TODO: use more precise narrowing when possible (e.g. for identical shapes). + new_tuple_type = TupleType(new_inner_types, current_type.partial_fallback) + new_type, rest_type = self.chk.conditional_types_with_intersection( + new_tuple_type, [get_type_range(current_type)], o, default=new_tuple_type + ) else: new_inner_type = UninhabitedType() for typ in new_inner_types: @@ -345,17 +373,45 @@ def contract_starred_pattern_types( If star_pos in None the types are returned unchanged. """ - if star_pos is None: - return types - new_types = types[:star_pos] - star_length = len(types) - num_patterns - new_types.append(make_simplified_union(types[star_pos : star_pos + star_length])) - new_types += types[star_pos + star_length :] - - return new_types + unpack_index = find_unpack_in_list(types) + if unpack_index is not None: + # Variadic tuples require "re-shaping" to match the requested pattern. + unpack = types[unpack_index] + assert isinstance(unpack, UnpackType) + unpacked = get_proper_type(unpack.type) + # This should be guaranteed by the normalization in the caller. + assert isinstance(unpacked, Instance) and unpacked.type.fullname == "builtins.tuple" + if star_pos is None: + missing = num_patterns - len(types) + 1 + new_types = types[:unpack_index] + new_types += [unpacked.args[0]] * missing + new_types += types[unpack_index + 1 :] + return new_types + prefix, middle, suffix = split_with_prefix_and_suffix( + tuple([UnpackType(unpacked) if isinstance(t, UnpackType) else t for t in types]), + star_pos, + num_patterns - star_pos, + ) + new_middle = [] + for m in middle: + # The existing code expects the star item type, rather than the type of + # the whole tuple "slice". + if isinstance(m, UnpackType): + new_middle.append(unpacked.args[0]) + else: + new_middle.append(m) + return list(prefix) + [make_simplified_union(new_middle)] + list(suffix) + else: + if star_pos is None: + return types + new_types = types[:star_pos] + star_length = len(types) - num_patterns + new_types.append(make_simplified_union(types[star_pos : star_pos + star_length])) + new_types += types[star_pos + star_length :] + return new_types def expand_starred_pattern_types( - self, types: list[Type], star_pos: int | None, num_types: int + self, types: list[Type], star_pos: int | None, num_types: int, original_unpack: bool ) -> list[Type]: """Undoes the contraction done by contract_starred_pattern_types. @@ -364,6 +420,17 @@ def expand_starred_pattern_types( """ if star_pos is None: return types + if original_unpack: + # In the case where original tuple type has an unpack item, it is not practical + # to coerce pattern type back to the original shape (and may not even be possible), + # so we only restore the type of the star item. + res = [] + for i, t in enumerate(types): + if i != star_pos: + res.append(t) + else: + res.append(UnpackType(self.chk.named_generic_type("builtins.tuple", [t]))) + return res new_types = types[:star_pos] star_length = num_types - len(types) + 1 new_types += [types[star_pos]] * star_length @@ -459,7 +526,15 @@ def visit_class_pattern(self, o: ClassPattern) -> PatternType: return self.early_non_match() if isinstance(type_info, TypeInfo): any_type = AnyType(TypeOfAny.implementation_artifact) - typ: Type = Instance(type_info, [any_type] * len(type_info.defn.type_vars)) + args: list[Type] = [] + for tv in type_info.defn.type_vars: + if isinstance(tv, TypeVarTupleType): + args.append( + UnpackType(self.chk.named_generic_type("builtins.tuple", [any_type])) + ) + else: + args.append(any_type) + typ: Type = Instance(type_info, args) elif isinstance(type_info, TypeAlias): typ = type_info.target elif ( diff --git a/mypy/constraints.py b/mypy/constraints.py index 6f611736a72a..49e542a49e56 100644 --- a/mypy/constraints.py +++ b/mypy/constraints.py @@ -28,6 +28,7 @@ Instance, LiteralType, NoneType, + NormalizedCallableType, Overloaded, Parameters, ParamSpecType, @@ -1388,7 +1389,7 @@ def find_matching_overload_items( return res -def get_tuple_fallback_from_unpack(unpack: UnpackType) -> TypeInfo | None: +def get_tuple_fallback_from_unpack(unpack: UnpackType) -> TypeInfo: """Get builtins.tuple type from available types to construct homogeneous tuples.""" tp = get_proper_type(unpack.type) if isinstance(tp, Instance) and tp.type.fullname == "builtins.tuple": @@ -1399,10 +1400,10 @@ def get_tuple_fallback_from_unpack(unpack: UnpackType) -> TypeInfo | None: for base in tp.partial_fallback.type.mro: if base.fullname == "builtins.tuple": return base - return None + assert False, "Invalid unpack type" -def repack_callable_args(callable: CallableType, tuple_type: TypeInfo | None) -> list[Type]: +def repack_callable_args(callable: CallableType, tuple_type: TypeInfo) -> list[Type]: """Present callable with star unpack in a normalized form. Since positional arguments cannot follow star argument, they are packed in a suffix, @@ -1417,12 +1418,8 @@ def repack_callable_args(callable: CallableType, tuple_type: TypeInfo | None) -> star_type = callable.arg_types[star_index] suffix_types = [] if not isinstance(star_type, UnpackType): - if tuple_type is not None: - # Re-normalize *args: X -> *args: *tuple[X, ...] - star_type = UnpackType(Instance(tuple_type, [star_type])) - else: - # This is unfortunate, something like tuple[Any, ...] would be better. - star_type = UnpackType(AnyType(TypeOfAny.from_error)) + # Re-normalize *args: X -> *args: *tuple[X, ...] + star_type = UnpackType(Instance(tuple_type, [star_type])) else: tp = get_proper_type(star_type.type) if isinstance(tp, TupleType): @@ -1544,7 +1541,9 @@ def infer_directed_arg_constraints(left: Type, right: Type, direction: int) -> l def infer_callable_arguments_constraints( - template: CallableType | Parameters, actual: CallableType | Parameters, direction: int + template: NormalizedCallableType | Parameters, + actual: NormalizedCallableType | Parameters, + direction: int, ) -> list[Constraint]: """Infer constraints between argument types of two callables. diff --git a/mypy/erasetype.py b/mypy/erasetype.py index 7231ede66c65..b41eefcd4821 100644 --- a/mypy/erasetype.py +++ b/mypy/erasetype.py @@ -100,7 +100,9 @@ def visit_parameters(self, t: Parameters) -> ProperType: raise RuntimeError("Parameters should have been bound to a class") def visit_type_var_tuple(self, t: TypeVarTupleType) -> ProperType: - return AnyType(TypeOfAny.special_form) + # Likely, we can never get here because of aggressive erasure of types that + # can contain this, but better still return a valid replacement. + return t.tuple_fallback.copy_modified(args=[AnyType(TypeOfAny.special_form)]) def visit_unpack_type(self, t: UnpackType) -> ProperType: return AnyType(TypeOfAny.special_form) diff --git a/mypy/join.py b/mypy/join.py index 2e2939f9fbc8..d33cbd98726d 100644 --- a/mypy/join.py +++ b/mypy/join.py @@ -36,6 +36,7 @@ TypedDictType, TypeOfAny, TypeType, + TypeVarLikeType, TypeVarTupleType, TypeVarType, TypeVisitor, @@ -715,11 +716,9 @@ def is_similar_callables(t: CallableType, s: CallableType) -> bool: def join_similar_callables(t: CallableType, s: CallableType) -> CallableType: - from mypy.meet import meet_types - arg_types: list[Type] = [] for i in range(len(t.arg_types)): - arg_types.append(meet_types(t.arg_types[i], s.arg_types[i])) + arg_types.append(safe_meet(t.arg_types[i], s.arg_types[i])) # TODO in combine_similar_callables also applies here (names and kinds; user metaclasses) # The fallback type can be either 'function', 'type', or some user-provided metaclass. # The result should always use 'function' as a fallback if either operands are using it. @@ -736,10 +735,42 @@ def join_similar_callables(t: CallableType, s: CallableType) -> CallableType: ) +def safe_join(t: Type, s: Type) -> Type: + # This is a temporary solution to prevent crashes in combine_similar_callables() etc., + # until relevant TODOs on handling arg_kinds will be addressed there. + if not isinstance(t, UnpackType) and not isinstance(s, UnpackType): + return join_types(t, s) + if isinstance(t, UnpackType) and isinstance(s, UnpackType): + return UnpackType(join_types(t.type, s.type)) + return object_or_any_from_type(get_proper_type(t)) + + +def safe_meet(t: Type, s: Type) -> Type: + # Similar to above but for meet_types(). + from mypy.meet import meet_types + + if not isinstance(t, UnpackType) and not isinstance(s, UnpackType): + return meet_types(t, s) + if isinstance(t, UnpackType) and isinstance(s, UnpackType): + unpacked = get_proper_type(t.type) + if isinstance(unpacked, TypeVarTupleType): + fallback_type = unpacked.tuple_fallback.type + elif isinstance(unpacked, TupleType): + fallback_type = unpacked.partial_fallback.type + else: + assert isinstance(unpacked, Instance) and unpacked.type.fullname == "builtins.tuple" + fallback_type = unpacked.type + res = meet_types(t.type, s.type) + if isinstance(res, UninhabitedType): + res = Instance(fallback_type, [res]) + return UnpackType(res) + return UninhabitedType() + + def combine_similar_callables(t: CallableType, s: CallableType) -> CallableType: arg_types: list[Type] = [] for i in range(len(t.arg_types)): - arg_types.append(join_types(t.arg_types[i], s.arg_types[i])) + arg_types.append(safe_join(t.arg_types[i], s.arg_types[i])) # TODO kinds and argument names # TODO what should happen if one fallback is 'type' and the other is a user-provided metaclass? # The fallback type can be either 'function', 'type', or some user-provided metaclass. @@ -806,7 +837,7 @@ def object_or_any_from_type(typ: ProperType) -> ProperType: return object_from_instance(typ.partial_fallback) elif isinstance(typ, TypeType): return object_or_any_from_type(typ.item) - elif isinstance(typ, TypeVarType) and isinstance(typ.upper_bound, ProperType): + elif isinstance(typ, TypeVarLikeType) and isinstance(typ.upper_bound, ProperType): return object_or_any_from_type(typ.upper_bound) elif isinstance(typ, UnionType): for item in typ.items: @@ -814,6 +845,8 @@ def object_or_any_from_type(typ: ProperType) -> ProperType: candidate = object_or_any_from_type(item) if isinstance(candidate, Instance): return candidate + elif isinstance(typ, UnpackType): + object_or_any_from_type(get_proper_type(typ.type)) return AnyType(TypeOfAny.implementation_artifact) diff --git a/mypy/maptype.py b/mypy/maptype.py index 0d54a83127df..59ecb2bc9993 100644 --- a/mypy/maptype.py +++ b/mypy/maptype.py @@ -31,6 +31,9 @@ def map_instance_to_supertype(instance: Instance, superclass: TypeInfo) -> Insta import mypy.typeops return mypy.typeops.tuple_fallback(tuple_type) + elif isinstance(tuple_type, Instance): + # This can happen after normalizing variadic tuples. + return tuple_type if not superclass.type_vars: # Fast path: `superclass` has no type variables to map to. diff --git a/mypy/meet.py b/mypy/meet.py index fa9bd6a83743..d2fb16808425 100644 --- a/mypy/meet.py +++ b/mypy/meet.py @@ -869,16 +869,17 @@ def meet_tuples(self, s: TupleType, t: TupleType) -> list[Type] | None: return None if s_unpack_index is not None and t_unpack_index is not None: # The only simple case we can handle if both tuples are variadic - # is when they are purely variadic. Other cases are tricky because + # is when their structure fully matches. Other cases are tricky because # a variadic item is effectively a union of tuples of all length, thus # potentially causing overlap between a suffix in `s` and a prefix # in `t` (see how this is handled in is_subtype() for details). # TODO: handle more cases (like when both prefix/suffix are shorter in s or t). - if s.length() == 1 and t.length() == 1: - s_unpack = s.items[0] + if s.length() == t.length() and s_unpack_index == t_unpack_index: + unpack_index = s_unpack_index + s_unpack = s.items[unpack_index] assert isinstance(s_unpack, UnpackType) s_unpacked = get_proper_type(s_unpack.type) - t_unpack = t.items[0] + t_unpack = t.items[unpack_index] assert isinstance(t_unpack, UnpackType) t_unpacked = get_proper_type(t_unpack.type) if not (isinstance(s_unpacked, Instance) and isinstance(t_unpacked, Instance)): @@ -886,7 +887,13 @@ def meet_tuples(self, s: TupleType, t: TupleType) -> list[Type] | None: meet = self.meet(s_unpacked, t_unpacked) if not isinstance(meet, Instance): return None - return [UnpackType(meet)] + m_prefix: list[Type] = [] + for si, ti in zip(s.items[:unpack_index], t.items[:unpack_index]): + m_prefix.append(meet_types(si, ti)) + m_suffix: list[Type] = [] + for si, ti in zip(s.items[unpack_index + 1 :], t.items[unpack_index + 1 :]): + m_suffix.append(meet_types(si, ti)) + return m_prefix + [UnpackType(meet)] + m_suffix return None if s_unpack_index is not None: variadic = s @@ -1006,11 +1013,11 @@ def default(self, typ: Type) -> ProperType: def meet_similar_callables(t: CallableType, s: CallableType) -> CallableType: - from mypy.join import join_types + from mypy.join import safe_join arg_types: list[Type] = [] for i in range(len(t.arg_types)): - arg_types.append(join_types(t.arg_types[i], s.arg_types[i])) + arg_types.append(safe_join(t.arg_types[i], s.arg_types[i])) # TODO in combine_similar_callables also applies here (names and kinds) # The fallback type can be either 'function' or 'type'. The result should have 'function' as # fallback only if both operands have it as 'function'. diff --git a/mypy/semanal_shared.py b/mypy/semanal_shared.py index 425e5906926a..e8edfe65c8d4 100644 --- a/mypy/semanal_shared.py +++ b/mypy/semanal_shared.py @@ -45,6 +45,8 @@ TypeOfAny, TypeVarId, TypeVarLikeType, + TypeVarTupleType, + UnpackType, get_proper_type, ) @@ -286,7 +288,23 @@ def calculate_tuple_fallback(typ: TupleType) -> None: """ fallback = typ.partial_fallback assert fallback.type.fullname == "builtins.tuple" - fallback.args = (join.join_type_list(list(typ.items)),) + fallback.args[1:] + items = [] + for item in typ.items: + # TODO: this duplicates some logic in typeops.tuple_fallback(). + if isinstance(item, UnpackType): + unpacked_type = get_proper_type(item.type) + if isinstance(unpacked_type, TypeVarTupleType): + unpacked_type = get_proper_type(unpacked_type.upper_bound) + if ( + isinstance(unpacked_type, Instance) + and unpacked_type.type.fullname == "builtins.tuple" + ): + items.append(unpacked_type.args[0]) + else: + raise NotImplementedError + else: + items.append(item) + fallback.args = (join.join_type_list(items),) class _NamedTypeCallback(Protocol): diff --git a/mypy/subtypes.py b/mypy/subtypes.py index 383e6eddd317..6d129683c3f5 100644 --- a/mypy/subtypes.py +++ b/mypy/subtypes.py @@ -734,9 +734,13 @@ def visit_tuple_type(self, left: TupleType) -> bool: for li in left.items: if isinstance(li, UnpackType): unpack = get_proper_type(li.type) - if isinstance(unpack, Instance): - assert unpack.type.fullname == "builtins.tuple" - li = unpack.args[0] + if isinstance(unpack, TypeVarTupleType): + unpack = get_proper_type(unpack.upper_bound) + assert ( + isinstance(unpack, Instance) + and unpack.type.fullname == "builtins.tuple" + ) + li = unpack.args[0] if not self._is_subtype(li, iter_type): return False return True @@ -1578,6 +1582,18 @@ def are_parameters_compatible( return True trivial_suffix = is_trivial_suffix(right) and not is_proper_subtype + if ( + right.arg_kinds == [ARG_STAR] + and isinstance(get_proper_type(right.arg_types[0]), AnyType) + and not is_proper_subtype + ): + # Similar to how (*Any, **Any) is considered a supertype of all callables, we consider + # (*Any) a supertype of all callables with positional arguments. This is needed in + # particular because we often refuse to try type inference if actual type is not + # a subtype of erased template type. + if all(k.is_positional() for k in left.arg_kinds) and ignore_pos_arg_names: + return True + # Match up corresponding arguments and check them for compatibility. In # every pair (argL, argR) of corresponding arguments from L and R, argL must # be "more general" than argR if L is to be a subtype of R. diff --git a/mypy/typeops.py b/mypy/typeops.py index dff43775fe3d..2eb3b284e729 100644 --- a/mypy/typeops.py +++ b/mypy/typeops.py @@ -39,6 +39,7 @@ Instance, LiteralType, NoneType, + NormalizedCallableType, Overloaded, Parameters, ParamSpecType, @@ -364,7 +365,7 @@ def erase_to_bound(t: Type) -> Type: def callable_corresponding_argument( - typ: CallableType | Parameters, model: FormalArgument + typ: NormalizedCallableType | Parameters, model: FormalArgument ) -> FormalArgument | None: """Return the argument a function that corresponds to `model`""" diff --git a/mypy/types_utils.py b/mypy/types_utils.py index f289ac3e9ed1..1cd56eae5835 100644 --- a/mypy/types_utils.py +++ b/mypy/types_utils.py @@ -144,8 +144,7 @@ def store_argument_type( elif isinstance(arg_type, UnpackType): unpacked_type = get_proper_type(arg_type.type) if isinstance(unpacked_type, TupleType): - # Instead of using Tuple[Unpack[Tuple[...]]], just use - # Tuple[...] + # Instead of using Tuple[Unpack[Tuple[...]]], just use Tuple[...] arg_type = unpacked_type elif ( isinstance(unpacked_type, Instance) @@ -153,6 +152,7 @@ def store_argument_type( ): arg_type = unpacked_type else: + # TODO: verify that we can only have a TypeVarTuple here. arg_type = TupleType( [arg_type], fallback=named_type("builtins.tuple", [named_type("builtins.object", [])]), diff --git a/mypy/typevars.py b/mypy/typevars.py index 027a8e3f7fc5..3d74a40c303f 100644 --- a/mypy/typevars.py +++ b/mypy/typevars.py @@ -6,6 +6,7 @@ AnyType, Instance, ParamSpecType, + ProperType, TupleType, Type, TypeOfAny, @@ -55,6 +56,7 @@ def fill_typevars(typ: TypeInfo) -> Instance | TupleType: ) tvs.append(tv) inst = Instance(typ, tvs) + # TODO: do we need to also handle typeddict_type here and below? if typ.tuple_type is None: return inst return typ.tuple_type.copy_modified(fallback=inst) @@ -62,10 +64,23 @@ def fill_typevars(typ: TypeInfo) -> Instance | TupleType: def fill_typevars_with_any(typ: TypeInfo) -> Instance | TupleType: """Apply a correct number of Any's as type arguments to a type.""" - inst = Instance(typ, [AnyType(TypeOfAny.special_form)] * len(typ.defn.type_vars)) + args: list[Type] = [] + for tv in typ.defn.type_vars: + # Valid erasure for *Ts is *tuple[Any, ...], not just Any. + if isinstance(tv, TypeVarTupleType): + args.append( + UnpackType(tv.tuple_fallback.copy_modified(args=[AnyType(TypeOfAny.special_form)])) + ) + else: + args.append(AnyType(TypeOfAny.special_form)) + inst = Instance(typ, args) if typ.tuple_type is None: return inst - return typ.tuple_type.copy_modified(fallback=inst) + erased_tuple_type = erase_typevars(typ.tuple_type, {tv.id for tv in typ.defn.type_vars}) + assert isinstance(erased_tuple_type, ProperType) + if isinstance(erased_tuple_type, TupleType): + return typ.tuple_type.copy_modified(fallback=inst) + return inst def has_no_typevars(typ: Type) -> bool: diff --git a/test-data/unit/check-incremental.test b/test-data/unit/check-incremental.test index f2625b869c19..eb7a795f99c0 100644 --- a/test-data/unit/check-incremental.test +++ b/test-data/unit/check-incremental.test @@ -6454,6 +6454,49 @@ class C(Generic[P]): def __init__(self, fn: Callable[P, int]) -> None: ... [builtins fixtures/dict.pyi] +[case testVariadicClassIncrementalUpdateRegularToVariadic] +from typing import Any +from lib import C + +x: C[int, str] + +[file lib.py] +from typing import Generic, TypeVar + +T = TypeVar("T") +S = TypeVar("S") +class C(Generic[T, S]): ... + +[file lib.py.2] +from typing import Generic +from typing_extensions import TypeVarTuple, Unpack + +Ts = TypeVarTuple("Ts") +class C(Generic[Unpack[Ts]]): ... +[builtins fixtures/tuple.pyi] + +[case testVariadicClassIncrementalUpdateVariadicToRegular] +from typing import Any +from lib import C + +x: C[int, str, int] + +[file lib.py] +from typing import Generic +from typing_extensions import TypeVarTuple, Unpack + +Ts = TypeVarTuple("Ts") +class C(Generic[Unpack[Ts]]): ... +[file lib.py.2] +from typing import Generic, TypeVar + +T = TypeVar("T") +S = TypeVar("S") +class C(Generic[T, S]): ... +[builtins fixtures/tuple.pyi] +[out2] +main:4: error: "C" expects 2 type arguments, but 3 given + [case testVariadicTupleIncrementalUpdateNoCrash] import m [file m.py] diff --git a/test-data/unit/check-python310.test b/test-data/unit/check-python310.test index 640e64c78d5f..d3cdf3af849d 100644 --- a/test-data/unit/check-python310.test +++ b/test-data/unit/check-python310.test @@ -700,6 +700,21 @@ match m: reveal_type(m) # N: Revealed type is "__main__.A[Any]" reveal_type(i) # N: Revealed type is "Any" +[case testMatchClassPatternCaptureVariadicGeneric] +from typing import Generic, Tuple +from typing_extensions import TypeVarTuple, Unpack + +Ts = TypeVarTuple('Ts') +class A(Generic[Unpack[Ts]]): + a: Tuple[Unpack[Ts]] + +m: object +match m: + case A(a=i): + reveal_type(m) # N: Revealed type is "__main__.A[Unpack[builtins.tuple[Any, ...]]]" + reveal_type(i) # N: Revealed type is "builtins.tuple[Any, ...]" +[builtins fixtures/tuple.pyi] + [case testMatchClassPatternCaptureGenericAlreadyKnown] from typing import Generic, TypeVar @@ -2026,3 +2041,105 @@ def f4(e: int | str | bytes) -> int: return 0 [builtins fixtures/primitives.pyi] + +[case testMatchSequencePatternVariadicTupleNotTooShort] +from typing import Tuple +from typing_extensions import Unpack + +fm1: Tuple[int, int, Unpack[Tuple[str, ...]], int] +match fm1: + case [fa1, fb1, fc1]: + reveal_type(fa1) # N: Revealed type is "builtins.int" + reveal_type(fb1) # N: Revealed type is "builtins.int" + reveal_type(fc1) # N: Revealed type is "builtins.int" + +fm2: Tuple[int, int, Unpack[Tuple[str, ...]], int] +match fm2: + case [fa2, fb2]: + reveal_type(fa2) + reveal_type(fb2) + +fm3: Tuple[int, int, Unpack[Tuple[str, ...]], int] +match fm3: + case [fa3, fb3, fc3, fd3, fe3]: + reveal_type(fa3) # N: Revealed type is "builtins.int" + reveal_type(fb3) # N: Revealed type is "builtins.int" + reveal_type(fc3) # N: Revealed type is "builtins.str" + reveal_type(fd3) # N: Revealed type is "builtins.str" + reveal_type(fe3) # N: Revealed type is "builtins.int" + +m1: Tuple[int, Unpack[Tuple[str, ...]], int] +match m1: + case [a1, *b1, c1]: + reveal_type(a1) # N: Revealed type is "builtins.int" + reveal_type(b1) # N: Revealed type is "builtins.list[builtins.str]" + reveal_type(c1) # N: Revealed type is "builtins.int" + +m2: Tuple[int, Unpack[Tuple[str, ...]], int] +match m2: + case [a2, b2, *c2, d2, e2]: + reveal_type(a2) # N: Revealed type is "builtins.int" + reveal_type(b2) # N: Revealed type is "builtins.str" + reveal_type(c2) # N: Revealed type is "builtins.list[builtins.str]" + reveal_type(d2) # N: Revealed type is "builtins.str" + reveal_type(e2) # N: Revealed type is "builtins.int" + +m3: Tuple[int, int, Unpack[Tuple[str, ...]], int, int] +match m3: + case [a3, *b3, c3]: + reveal_type(a3) # N: Revealed type is "builtins.int" + reveal_type(b3) # N: Revealed type is "builtins.list[Union[builtins.int, builtins.str]]" + reveal_type(c3) # N: Revealed type is "builtins.int" +[builtins fixtures/tuple.pyi] + +[case testMatchSequencePatternTypeVarTupleNotTooShort] +from typing import Tuple +from typing_extensions import Unpack, TypeVarTuple + +Ts = TypeVarTuple("Ts") +def test(xs: Tuple[Unpack[Ts]]) -> None: + fm1: Tuple[int, int, Unpack[Ts], int] + match fm1: + case [fa1, fb1, fc1]: + reveal_type(fa1) # N: Revealed type is "builtins.int" + reveal_type(fb1) # N: Revealed type is "builtins.int" + reveal_type(fc1) # N: Revealed type is "builtins.int" + + fm2: Tuple[int, int, Unpack[Ts], int] + match fm2: + case [fa2, fb2]: + reveal_type(fa2) + reveal_type(fb2) + + fm3: Tuple[int, int, Unpack[Ts], int] + match fm3: + case [fa3, fb3, fc3, fd3, fe3]: + reveal_type(fa3) # N: Revealed type is "builtins.int" + reveal_type(fb3) # N: Revealed type is "builtins.int" + reveal_type(fc3) # N: Revealed type is "builtins.object" + reveal_type(fd3) # N: Revealed type is "builtins.object" + reveal_type(fe3) # N: Revealed type is "builtins.int" + + m1: Tuple[int, Unpack[Ts], int] + match m1: + case [a1, *b1, c1]: + reveal_type(a1) # N: Revealed type is "builtins.int" + reveal_type(b1) # N: Revealed type is "builtins.list[builtins.object]" + reveal_type(c1) # N: Revealed type is "builtins.int" + + m2: Tuple[int, Unpack[Ts], int] + match m2: + case [a2, b2, *c2, d2, e2]: + reveal_type(a2) # N: Revealed type is "builtins.int" + reveal_type(b2) # N: Revealed type is "builtins.object" + reveal_type(c2) # N: Revealed type is "builtins.list[builtins.object]" + reveal_type(d2) # N: Revealed type is "builtins.object" + reveal_type(e2) # N: Revealed type is "builtins.int" + + m3: Tuple[int, int, Unpack[Ts], int, int] + match m3: + case [a3, *b3, c3]: + reveal_type(a3) # N: Revealed type is "builtins.int" + reveal_type(b3) # N: Revealed type is "builtins.list[builtins.object]" + reveal_type(c3) # N: Revealed type is "builtins.int" +[builtins fixtures/tuple.pyi] diff --git a/test-data/unit/check-selftype.test b/test-data/unit/check-selftype.test index bf7a928ff51d..29abe9cb025b 100644 --- a/test-data/unit/check-selftype.test +++ b/test-data/unit/check-selftype.test @@ -2036,3 +2036,23 @@ class Ben(Object): foo_method = cls.MY_MAP["foo"] return foo_method(Foo()) [builtins fixtures/isinstancelist.pyi] + +[case testSelfTypeOnGenericClassObjectNewStyleBound] +from typing import Generic, TypeVar, Self + +T = TypeVar("T") +S = TypeVar("S") +class B(Generic[T, S]): + def copy(self) -> Self: ... + +b: B[int, str] +reveal_type(B.copy(b)) # N: Revealed type is "__main__.B[builtins.int, builtins.str]" + +class C(B[T, S]): ... + +c: C[int, str] +reveal_type(C.copy(c)) # N: Revealed type is "__main__.C[builtins.int, builtins.str]" + +B.copy(42) # E: Value of type variable "Self" of "copy" of "B" cannot be "int" +C.copy(42) # E: Value of type variable "Self" of "copy" of "B" cannot be "int" +[builtins fixtures/tuple.pyi] diff --git a/test-data/unit/check-typevar-tuple.test b/test-data/unit/check-typevar-tuple.test index 1a2573898170..7b8a22313b36 100644 --- a/test-data/unit/check-typevar-tuple.test +++ b/test-data/unit/check-typevar-tuple.test @@ -1911,6 +1911,180 @@ z = C[int]() # E: Bad number of arguments, expected: at least 2, given: 1 reveal_type(z) # N: Revealed type is "__main__.C[Any, Unpack[builtins.tuple[Any, ...]], Any]" [builtins fixtures/tuple.pyi] +[case testVariadicTupleTupleSubclassPrefixSuffix] +from typing import Tuple +from typing_extensions import Unpack + +i: int + +class A(Tuple[int, Unpack[Tuple[int, ...]]]): ... +a: A +reveal_type(a[i]) # N: Revealed type is "builtins.int" + +class B(Tuple[Unpack[Tuple[int, ...]], int]): ... +b: B +reveal_type(b[i]) # N: Revealed type is "builtins.int" +[builtins fixtures/tuple.pyi] + +[case testVariadicClassSubclassInit] +from typing import Tuple, Generic, TypeVar +from typing_extensions import TypeVarTuple, Unpack + +Ts = TypeVarTuple("Ts") +class B(Generic[Unpack[Ts]]): + def __init__(self, x: Tuple[Unpack[Ts]], *args: Unpack[Ts]) -> None: ... +reveal_type(B) # N: Revealed type is "def [Ts] (x: Tuple[Unpack[Ts`1]], *args: Unpack[Ts`1]) -> __main__.B[Unpack[Ts`1]]" + +T = TypeVar("T") +S = TypeVar("S") +class C(B[T, S]): ... +reveal_type(C) # N: Revealed type is "def [T, S] (x: Tuple[T`1, S`2], T`1, S`2) -> __main__.C[T`1, S`2]" +[builtins fixtures/tuple.pyi] + +[case testVariadicClassGenericSelf] +from typing import Tuple, Generic, TypeVar +from typing_extensions import TypeVarTuple, Unpack + +T = TypeVar("T") +S = TypeVar("S") +Ts = TypeVarTuple("Ts") +class B(Generic[Unpack[Ts]]): + def copy(self: T) -> T: ... + def on_pair(self: B[T, S]) -> Tuple[T, S]: ... + +b1: B[int] +reveal_type(b1.on_pair()) # E: Invalid self argument "B[int]" to attribute function "on_pair" with type "Callable[[B[T, S]], Tuple[T, S]]" \ + # N: Revealed type is "Tuple[Never, Never]" +b2: B[int, str] +reveal_type(b2.on_pair()) # N: Revealed type is "Tuple[builtins.int, builtins.str]" +b3: B[int, str, int] +reveal_type(b3.on_pair()) # E: Invalid self argument "B[int, str, int]" to attribute function "on_pair" with type "Callable[[B[T, S]], Tuple[T, S]]" \ + # N: Revealed type is "Tuple[Never, Never]" + +class C(B[T, S]): ... +c: C[int, str] +reveal_type(c.copy()) # N: Revealed type is "__main__.C[builtins.int, builtins.str]" +[builtins fixtures/tuple.pyi] + +[case testVariadicClassNewStyleSelf] +from typing import Generic, TypeVar, Self +from typing_extensions import TypeVarTuple, Unpack + +Ts = TypeVarTuple("Ts") +class B(Generic[Unpack[Ts]]): + next: Self + def copy(self) -> Self: + return self.next + +b: B[int, str, int] +reveal_type(b.next) # N: Revealed type is "__main__.B[builtins.int, builtins.str, builtins.int]" +reveal_type(b.copy()) # N: Revealed type is "__main__.B[builtins.int, builtins.str, builtins.int]" +reveal_type(B.copy(b)) # N: Revealed type is "__main__.B[builtins.int, builtins.str, builtins.int]" + +T = TypeVar("T") +S = TypeVar("S") +class C(B[T, S]): ... +c: C[int, str] + +reveal_type(c.next) # N: Revealed type is "__main__.C[builtins.int, builtins.str]" +reveal_type(c.copy()) # N: Revealed type is "__main__.C[builtins.int, builtins.str]" +reveal_type(C.copy(c)) # N: Revealed type is "__main__.C[builtins.int, builtins.str]" +[builtins fixtures/tuple.pyi] + +[case testVariadicTupleDataclass] +from dataclasses import dataclass +from typing import Generic, TypeVar, Tuple +from typing_extensions import TypeVarTuple, Unpack + +Ts = TypeVarTuple("Ts") + +@dataclass +class B(Generic[Unpack[Ts]]): + items: Tuple[Unpack[Ts]] + +reveal_type(B) # N: Revealed type is "def [Ts] (items: Tuple[Unpack[Ts`1]]) -> __main__.B[Unpack[Ts`1]]" +b = B((1, "yes")) +reveal_type(b.items) # N: Revealed type is "Tuple[builtins.int, builtins.str]" + +T = TypeVar("T") +S = TypeVar("S") + +@dataclass +class C(B[T, S]): + first: T + second: S + +reveal_type(C) # N: Revealed type is "def [T, S] (items: Tuple[T`1, S`2], first: T`1, second: S`2) -> __main__.C[T`1, S`2]" +c = C((1, "yes"), 2, "no") +reveal_type(c.items) # N: Revealed type is "Tuple[builtins.int, builtins.str]" +reveal_type(c.first) # N: Revealed type is "builtins.int" +reveal_type(c.second) # N: Revealed type is "builtins.str" +[builtins fixtures/dataclasses.pyi] +[typing fixtures/typing-medium.pyi] + +[case testVariadicTupleInProtocol] +from typing import Protocol, Tuple, List +from typing_extensions import TypeVarTuple, Unpack + +Ts = TypeVarTuple("Ts") +class P(Protocol[Unpack[Ts]]): + def items(self) -> Tuple[Unpack[Ts]]: ... + +class PC(Protocol[Unpack[Ts]]): + def meth(self, *args: Unpack[Ts]) -> None: ... + +def get_items(x: P[Unpack[Ts]]) -> Tuple[Unpack[Ts]]: ... +def match(x: PC[Unpack[Ts]]) -> Tuple[Unpack[Ts]]: ... + +class Bad: + def items(self) -> List[int]: ... + def meth(self, *, named: int) -> None: ... + +class Good: + def items(self) -> Tuple[int, str]: ... + def meth(self, __x: int, y: str) -> None: ... + +g: Good +reveal_type(get_items(g)) # N: Revealed type is "Tuple[builtins.int, builtins.str]" +reveal_type(match(g)) # N: Revealed type is "Tuple[builtins.int, builtins.str]" + +b: Bad +get_items(b) # E: Argument 1 to "get_items" has incompatible type "Bad"; expected "P[Unpack[Tuple[Never, ...]]]" \ + # N: Following member(s) of "Bad" have conflicts: \ + # N: Expected: \ + # N: def items(self) -> Tuple[Never, ...] \ + # N: Got: \ + # N: def items(self) -> List[int] +match(b) # E: Argument 1 to "match" has incompatible type "Bad"; expected "PC[Unpack[Tuple[Never, ...]]]" \ + # N: Following member(s) of "Bad" have conflicts: \ + # N: Expected: \ + # N: def meth(self, *args: Never) -> None \ + # N: Got: \ + # N: def meth(self, *, named: int) -> None +[builtins fixtures/tuple.pyi] + +[case testVariadicTupleCollectionCheck] +from typing import Tuple, Optional +from typing_extensions import Unpack + +allowed: Tuple[int, Unpack[Tuple[int, ...]]] + +x: Optional[int] +if x in allowed: + reveal_type(x) # N: Revealed type is "builtins.int" +[builtins fixtures/tuple.pyi] + +[case testJoinOfVariadicTupleCallablesNoCrash] +from typing import Callable, Tuple + +f: Callable[[int, *Tuple[str, ...], int], None] +g: Callable[[int, *Tuple[str, ...], int], None] +reveal_type([f, g]) # N: Revealed type is "builtins.list[def (builtins.int, *Unpack[Tuple[Unpack[builtins.tuple[builtins.str, ...]], builtins.int]])]" + +h: Callable[[int, *Tuple[str, ...], str], None] +reveal_type([f, h]) # N: Revealed type is "builtins.list[def (builtins.int, *Unpack[Tuple[Unpack[builtins.tuple[builtins.str, ...]], Never]])]" +[builtins fixtures/tuple.pyi] + [case testTypeVarTupleBothUnpacksSimple] from typing import Tuple from typing_extensions import Unpack, TypeVarTuple, TypedDict diff --git a/test-data/unit/fine-grained.test b/test-data/unit/fine-grained.test index cb24467cbf41..5dc42bd62d9b 100644 --- a/test-data/unit/fine-grained.test +++ b/test-data/unit/fine-grained.test @@ -9908,6 +9908,128 @@ x = 0 # Arbitrary change to trigger reprocessing == a.py:3: note: Revealed type is "Tuple[Literal[1]?, Literal['x']?]" +[case testVariadicClassFineUpdateRegularToVariadic] +from typing import Any +from lib import C + +x: C[int, str] + +[file lib.py] +from typing import Generic, TypeVar + +T = TypeVar("T") +S = TypeVar("S") +class C(Generic[T, S]): ... + +[file lib.py.2] +from typing import Generic +from typing_extensions import TypeVarTuple, Unpack + +Ts = TypeVarTuple("Ts") +class C(Generic[Unpack[Ts]]): ... +[builtins fixtures/tuple.pyi] +[out] +== + +[case testVariadicClassFineUpdateVariadicToRegular] +from typing import Any +from lib import C + +x: C[int, str, int] + +[file lib.py] +from typing import Generic +from typing_extensions import TypeVarTuple, Unpack + +Ts = TypeVarTuple("Ts") +class C(Generic[Unpack[Ts]]): ... +[file lib.py.2] +from typing import Generic, TypeVar + +T = TypeVar("T") +S = TypeVar("S") +class C(Generic[T, S]): ... +[builtins fixtures/tuple.pyi] +[out] +== +main:4: error: "C" expects 2 type arguments, but 3 given + +-- Order of error messages is different, so we repeat the test twice. +[case testVariadicClassFineUpdateValidToInvalidCached-only_when_cache] +from typing import Any +from lib import C + +x: C[int, str] + +[file lib.py] +from typing import Generic +from typing_extensions import TypeVarTuple, Unpack + +Ts = TypeVarTuple("Ts") +class C(Generic[Unpack[Ts]]): ... + +[file lib.py.2] +from typing import Generic +from typing_extensions import TypeVarTuple, Unpack + +Ts = TypeVarTuple("Ts") +class C(Generic[Ts]): ... +[builtins fixtures/tuple.pyi] +[out] +== +main:4: error: "C" expects no type arguments, but 2 given +lib.py:5: error: Free type variable expected in Generic[...] + +[case testVariadicClassFineUpdateValidToInvalid-only_when_nocache] +from typing import Any +from lib import C + +x: C[int, str] + +[file lib.py] +from typing import Generic +from typing_extensions import TypeVarTuple, Unpack + +Ts = TypeVarTuple("Ts") +class C(Generic[Unpack[Ts]]): ... + +[file lib.py.2] +from typing import Generic +from typing_extensions import TypeVarTuple, Unpack + +Ts = TypeVarTuple("Ts") +class C(Generic[Ts]): ... +[builtins fixtures/tuple.pyi] +[out] +== +lib.py:5: error: Free type variable expected in Generic[...] +main:4: error: "C" expects no type arguments, but 2 given + +[case testVariadicClassFineUpdateInvalidToValid] +from typing import Any +from lib import C + +x: C[int, str] + +[file lib.py] +from typing import Generic +from typing_extensions import TypeVarTuple, Unpack + +Ts = TypeVarTuple("Ts") +class C(Generic[Ts]): ... + +[file lib.py.2] +from typing import Generic +from typing_extensions import TypeVarTuple, Unpack + +Ts = TypeVarTuple("Ts") +class C(Generic[Unpack[Ts]]): ... +[builtins fixtures/tuple.pyi] +[out] +lib.py:5: error: Free type variable expected in Generic[...] +main:4: error: "C" expects no type arguments, but 2 given +== + [case testUnpackKwargsUpdateFine] import m [file shared.py] From c4ab46e6acdeab8fd503322311e2b934c9622695 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Sat, 28 Oct 2023 15:00:13 +0100 Subject: [PATCH 129/144] Cache information about whether file is typeshed file (#16351) We used to check if a file is in typeshed a lot. This seems to speed up self-check by about 2%, and this should also speed up tests a bit. --- mypy/checker.py | 4 ++-- mypy/nodes.py | 11 ++++++++++- mypy/semanal.py | 13 ++----------- mypy/semanal_main.py | 2 +- 4 files changed, 15 insertions(+), 15 deletions(-) diff --git a/mypy/checker.py b/mypy/checker.py index 62ba642256bf..b2804b25e35c 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -222,7 +222,7 @@ from mypy.types_utils import is_overlapping_none, remove_optional, store_argument_type, strip_type from mypy.typetraverser import TypeTraverserVisitor from mypy.typevars import fill_typevars, fill_typevars_with_any, has_no_typevars -from mypy.util import is_dunder, is_sunder, is_typeshed_file +from mypy.util import is_dunder, is_sunder from mypy.visitor import NodeVisitor T = TypeVar("T") @@ -400,7 +400,7 @@ def __init__( self.pass_num = 0 self.current_node_deferred = False self.is_stub = tree.is_stub - self.is_typeshed_stub = is_typeshed_file(options.abs_custom_typeshed_dir, path) + self.is_typeshed_stub = tree.is_typeshed_file(options) self.inferred_attribute_types = None # If True, process function definitions. If False, don't. This is used diff --git a/mypy/nodes.py b/mypy/nodes.py index 0e5c078d0227..1d7b3e3be84b 100644 --- a/mypy/nodes.py +++ b/mypy/nodes.py @@ -27,7 +27,7 @@ import mypy.strconv from mypy.options import Options -from mypy.util import short_type +from mypy.util import is_typeshed_file, short_type from mypy.visitor import ExpressionVisitor, NodeVisitor, StatementVisitor if TYPE_CHECKING: @@ -283,6 +283,7 @@ class MypyFile(SymbolNode): "is_partial_stub_package", "plugin_deps", "future_import_flags", + "_is_typeshed_file", ) __match_args__ = ("name", "path", "defs") @@ -319,6 +320,7 @@ class MypyFile(SymbolNode): plugin_deps: dict[str, set[str]] # Future imports defined in this file. Populated during semantic analysis. future_import_flags: set[str] + _is_typeshed_file: bool | None def __init__( self, @@ -346,6 +348,7 @@ def __init__( self.is_cache_skeleton = False self.is_partial_stub_package = False self.future_import_flags = set() + self._is_typeshed_file = None def local_definitions(self) -> Iterator[Definition]: """Return all definitions within the module (including nested). @@ -371,6 +374,12 @@ def is_package_init_file(self) -> bool: def is_future_flag_set(self, flag: str) -> bool: return flag in self.future_import_flags + def is_typeshed_file(self, options: Options) -> bool: + # Cache result since this is called a lot + if self._is_typeshed_file is None: + self._is_typeshed_file = is_typeshed_file(options.abs_custom_typeshed_dir, self.path) + return self._is_typeshed_file + def serialize(self) -> JsonDict: return { ".class": "MypyFile", diff --git a/mypy/semanal.py b/mypy/semanal.py index 27491ac695ae..41943e1db8b0 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -283,14 +283,7 @@ ) from mypy.types_utils import is_invalid_recursive_alias, store_argument_type from mypy.typevars import fill_typevars -from mypy.util import ( - correct_relative_import, - is_dunder, - is_typeshed_file, - module_prefix, - unmangle, - unnamed_function, -) +from mypy.util import correct_relative_import, is_dunder, module_prefix, unmangle, unnamed_function from mypy.visitor import NodeVisitor T = TypeVar("T") @@ -777,9 +770,7 @@ def file_context( self.cur_mod_id = file_node.fullname with scope.module_scope(self.cur_mod_id): self._is_stub_file = file_node.path.lower().endswith(".pyi") - self._is_typeshed_stub_file = is_typeshed_file( - options.abs_custom_typeshed_dir, file_node.path - ) + self._is_typeshed_stub_file = file_node.is_typeshed_file(options) self.globals = file_node.names self.tvar_scope = TypeVarLikeScope() diff --git a/mypy/semanal_main.py b/mypy/semanal_main.py index ec09deb0952f..1185a3821553 100644 --- a/mypy/semanal_main.py +++ b/mypy/semanal_main.py @@ -380,7 +380,7 @@ def check_type_arguments(graph: Graph, scc: list[str], errors: Errors) -> None: analyzer = TypeArgumentAnalyzer( errors, state.options, - is_typeshed_file(state.options.abs_custom_typeshed_dir, state.path or ""), + state.tree.is_typeshed_file(state.options), state.manager.semantic_analyzer.named_type, ) with state.wrap_context(): From c76132f63de5de4d3f9818d070c1cd26d2209d5a Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Sat, 28 Oct 2023 15:00:23 +0100 Subject: [PATCH 130/144] Add fast path for checking self types (#16352) The check was pretty expensive, though usually it's not doing anything non-trivial. Added a fast path for cases where we use the implicit self type, which covers the vast majority of cases. This makes self-check about 4% faster. --- mypy/checker.py | 58 +++++++++++++++++++++++++----------------------- mypy/subtypes.py | 12 ++++++++++ 2 files changed, 42 insertions(+), 28 deletions(-) diff --git a/mypy/checker.py b/mypy/checker.py index b2804b25e35c..f51ba746ea75 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -1199,13 +1199,14 @@ def check_func_def( # Push return type. self.return_types.append(typ.ret_type) + with self.scope.push_function(defn): + # We temporary push the definition to get the self type as + # visible from *inside* of this function/method. + ref_type: Type | None = self.scope.active_self_type() + # Store argument types. for i in range(len(typ.arg_types)): arg_type = typ.arg_types[i] - with self.scope.push_function(defn): - # We temporary push the definition to get the self type as - # visible from *inside* of this function/method. - ref_type: Type | None = self.scope.active_self_type() if ( isinstance(defn, FuncDef) and ref_type is not None @@ -1215,30 +1216,31 @@ def check_func_def( ): if defn.is_class or defn.name == "__new__": ref_type = mypy.types.TypeType.make_normalized(ref_type) - # This level of erasure matches the one in checkmember.check_self_arg(), - # better keep these two checks consistent. - erased = get_proper_type(erase_typevars(erase_to_bound(arg_type))) - if not is_subtype(ref_type, erased, ignore_type_params=True): - if ( - isinstance(erased, Instance) - and erased.type.is_protocol - or isinstance(erased, TypeType) - and isinstance(erased.item, Instance) - and erased.item.type.is_protocol - ): - # We allow the explicit self-type to be not a supertype of - # the current class if it is a protocol. For such cases - # the consistency check will be performed at call sites. - msg = None - elif typ.arg_names[i] in {"self", "cls"}: - msg = message_registry.ERASED_SELF_TYPE_NOT_SUPERTYPE.format( - erased.str_with_options(self.options), - ref_type.str_with_options(self.options), - ) - else: - msg = message_registry.MISSING_OR_INVALID_SELF_TYPE - if msg: - self.fail(msg, defn) + if not is_same_type(arg_type, ref_type): + # This level of erasure matches the one in checkmember.check_self_arg(), + # better keep these two checks consistent. + erased = get_proper_type(erase_typevars(erase_to_bound(arg_type))) + if not is_subtype(ref_type, erased, ignore_type_params=True): + if ( + isinstance(erased, Instance) + and erased.type.is_protocol + or isinstance(erased, TypeType) + and isinstance(erased.item, Instance) + and erased.item.type.is_protocol + ): + # We allow the explicit self-type to be not a supertype of + # the current class if it is a protocol. For such cases + # the consistency check will be performed at call sites. + msg = None + elif typ.arg_names[i] in {"self", "cls"}: + msg = message_registry.ERASED_SELF_TYPE_NOT_SUPERTYPE.format( + erased.str_with_options(self.options), + ref_type.str_with_options(self.options), + ) + else: + msg = message_registry.MISSING_OR_INVALID_SELF_TYPE + if msg: + self.fail(msg, defn) elif isinstance(arg_type, TypeVarType): # Refuse covariant parameter type variables # TODO: check recursively for inner type variables diff --git a/mypy/subtypes.py b/mypy/subtypes.py index 6d129683c3f5..7e37751b1c15 100644 --- a/mypy/subtypes.py +++ b/mypy/subtypes.py @@ -258,6 +258,18 @@ def is_same_type( This means types may have different representation (e.g. an alias, or a non-simplified union) but are semantically exchangeable in all contexts. """ + # First, use fast path for some common types. This is performance-critical. + if ( + type(a) is Instance + and type(b) is Instance + and a.type == b.type + and len(a.args) == len(b.args) + and a.last_known_value is b.last_known_value + ): + return all(is_same_type(x, y) for x, y in zip(a.args, b.args)) + elif isinstance(a, TypeVarType) and isinstance(b, TypeVarType) and a.id == b.id: + return True + # Note that using ignore_promotions=True (default) makes types like int and int64 # considered not the same type (which is the case at runtime). # Also Union[bool, int] (if it wasn't simplified before) will be different From 2aa2443107534715a650dbe78474e7d91cc9df20 Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Sat, 28 Oct 2023 09:04:58 -0700 Subject: [PATCH 131/144] Avoid importing from setuptools._distutils (#16348) Fixes #16318, as requested by setuptools maintainer --- mypyc/build.py | 31 ++++++++++++++----------------- 1 file changed, 14 insertions(+), 17 deletions(-) diff --git a/mypyc/build.py b/mypyc/build.py index 9889577d4add..0af8908e14d0 100644 --- a/mypyc/build.py +++ b/mypyc/build.py @@ -40,8 +40,16 @@ from mypyc.namegen import exported_name from mypyc.options import CompilerOptions -if sys.version_info < (3, 12): - if TYPE_CHECKING: +try: + # Import setuptools so that it monkey-patch overrides distutils + import setuptools +except ImportError: + pass + +if TYPE_CHECKING: + if sys.version_info >= (3, 12): + from setuptools import Extension + else: from distutils.core import Extension as _distutils_Extension from typing_extensions import TypeAlias @@ -49,22 +57,11 @@ Extension: TypeAlias = Union[_setuptools_Extension, _distutils_Extension] - try: - # Import setuptools so that it monkey-patch overrides distutils - import setuptools - except ImportError: - pass - from distutils import ccompiler, sysconfig +if sys.version_info >= (3, 12): + # From setuptools' monkeypatch + from distutils import ccompiler, sysconfig # type: ignore[import-not-found] else: - import setuptools - from setuptools import Extension - from setuptools._distutils import ( - ccompiler as _ccompiler, # type: ignore[attr-defined] - sysconfig as _sysconfig, # type: ignore[attr-defined] - ) - - ccompiler = _ccompiler - sysconfig = _sysconfig + from distutils import ccompiler, sysconfig def get_extension() -> type[Extension]: From 65a068ed21c4563590062ad3fbd9e58fe0e7968d Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Sun, 29 Oct 2023 06:45:24 +0000 Subject: [PATCH 132/144] Speed up type argument checking (#16353) The upper bound is usually `object`, so add a fast path and skip a potentially slow subtype check if that's the case. Also make type annotations more precise. This seems to at least speed up type checker tests, by 1-2% or so. This also potentially speeds up self-check a bit, though probably by less than 1%. --- mypy/semanal_typeargs.py | 19 +++++++++++++------ 1 file changed, 13 insertions(+), 6 deletions(-) diff --git a/mypy/semanal_typeargs.py b/mypy/semanal_typeargs.py index a25bab8de054..15ea15d612c0 100644 --- a/mypy/semanal_typeargs.py +++ b/mypy/semanal_typeargs.py @@ -7,7 +7,7 @@ from __future__ import annotations -from typing import Callable, Sequence +from typing import Callable from mypy import errorcodes as codes, message_registry from mypy.errorcodes import ErrorCode @@ -88,7 +88,7 @@ def visit_type_alias_type(self, t: TypeAliasType) -> None: return self.seen_aliases.add(t) assert t.alias is not None, f"Unfixed type alias {t.type_ref}" - is_error = self.validate_args(t.alias.name, t.args, t.alias.alias_tvars, t) + is_error = self.validate_args(t.alias.name, tuple(t.args), t.alias.alias_tvars, t) if not is_error: # If there was already an error for the alias itself, there is no point in checking # the expansion, most likely it will result in the same kind of error. @@ -131,7 +131,7 @@ def visit_instance(self, t: Instance) -> None: t.args = unpacked.args def validate_args( - self, name: str, args: Sequence[Type], type_vars: list[TypeVarLikeType], ctx: Context + self, name: str, args: tuple[Type, ...], type_vars: list[TypeVarLikeType], ctx: Context ) -> bool: if any(isinstance(v, TypeVarTupleType) for v in type_vars): prefix = next(i for (i, v) in enumerate(type_vars) if isinstance(v, TypeVarTupleType)) @@ -140,7 +140,7 @@ def validate_args( start, middle, end = split_with_prefix_and_suffix( tuple(args), prefix, len(type_vars) - prefix - 1 ) - args = list(start) + [TupleType(list(middle), tvt.tuple_fallback)] + list(end) + args = start + (TupleType(list(middle), tvt.tuple_fallback),) + end is_error = False for (i, arg), tvar in zip(enumerate(args), type_vars): @@ -174,7 +174,14 @@ def validate_args( arg_values = [arg] if self.check_type_var_values(name, arg_values, tvar.name, tvar.values, ctx): is_error = True - if not is_subtype(arg, tvar.upper_bound): + # Check against upper bound. Since it's object the vast majority of the time, + # add fast path to avoid a potentially slow subtype check. + upper_bound = tvar.upper_bound + object_upper_bound = ( + type(upper_bound) is Instance + and upper_bound.type.fullname == "builtins.object" + ) + if not object_upper_bound and not is_subtype(arg, upper_bound): if self.in_type_alias_expr and isinstance(arg, TypeVarType): # Type aliases are allowed to use unconstrained type variables # error will be checked at substitution point. @@ -184,7 +191,7 @@ def validate_args( message_registry.INVALID_TYPEVAR_ARG_BOUND.format( format_type(arg, self.options), name, - format_type(tvar.upper_bound, self.options), + format_type(upper_bound, self.options), ), ctx, code=codes.TYPE_VAR, From cf045d924d6688f5f4d0c3402f38d30bc81db299 Mon Sep 17 00:00:00 2001 From: dinaldoap <38653153+dinaldoap@users.noreply.github.com> Date: Mon, 30 Oct 2023 02:06:19 -0300 Subject: [PATCH 133/144] doc: remove duplicate word (#16365) This PR removes one of the duplicate **in** in the sentence "This option is only useful in in the absence of `__init__.py`" in the file `docs/source/command_line.rst`. --- docs/source/command_line.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/command_line.rst b/docs/source/command_line.rst index 4e954c7c2ccb..5db118334519 100644 --- a/docs/source/command_line.rst +++ b/docs/source/command_line.rst @@ -133,7 +133,7 @@ imports. This flag tells mypy that top-level packages will be based in either the current directory, or a member of the ``MYPYPATH`` environment variable or - :confval:`mypy_path` config option. This option is only useful in + :confval:`mypy_path` config option. This option is only useful in the absence of `__init__.py`. See :ref:`Mapping file paths to modules ` for details. From b8c748a77a27b27599b9c2b4097427e055f4c16c Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Mon, 30 Oct 2023 11:07:20 +0000 Subject: [PATCH 134/144] Fix incremental crash on TypedDict in method (#16364) Fixes https://github.com/python/mypy/issues/16336 All the story with `@`-names is a mess. FWIW I just copied the logic from named tuples, where it works. So although it is a mess, it will be now be a consistent mess, with full parity between `NamedTuple` and `TypedDict`. --- mypy/semanal.py | 7 ++++--- mypy/semanal_typeddict.py | 2 ++ test-data/unit/check-incremental.test | 22 +++++++++++++++++++++- 3 files changed, 27 insertions(+), 4 deletions(-) diff --git a/mypy/semanal.py b/mypy/semanal.py index 41943e1db8b0..bd24c48ed24f 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -1745,7 +1745,7 @@ def analyze_typeddict_classdef(self, defn: ClassDef) -> bool: if info is None: self.mark_incomplete(defn.name, defn) else: - self.prepare_class_def(defn, info) + self.prepare_class_def(defn, info, custom_names=True) return True return False @@ -2099,8 +2099,9 @@ def prepare_class_def( # Preserve name from previous fine-grained incremental run. global_name = defn.info.name defn.fullname = defn.info._fullname - if defn.info.is_named_tuple: - # Named tuple nested within a class is stored in the class symbol table. + if defn.info.is_named_tuple or defn.info.typeddict_type: + # Named tuples and Typed dicts nested within a class are stored + # in the class symbol table. self.add_symbol_skip_local(global_name, defn.info) else: self.globals[global_name] = SymbolTableNode(GDEF, defn.info) diff --git a/mypy/semanal_typeddict.py b/mypy/semanal_typeddict.py index 51424d8800d2..e9aaee55879a 100644 --- a/mypy/semanal_typeddict.py +++ b/mypy/semanal_typeddict.py @@ -101,6 +101,8 @@ def analyze_typeddict_classdef(self, defn: ClassDef) -> tuple[bool, TypeInfo | N fields, types, statements, required_keys = self.analyze_typeddict_classdef_fields(defn) if fields is None: return True, None # Defer + if self.api.is_func_scope() and "@" not in defn.name: + defn.name += "@" + str(defn.line) info = self.build_typeddict_typeinfo( defn.name, fields, types, required_keys, defn.line, existing_info ) diff --git a/test-data/unit/check-incremental.test b/test-data/unit/check-incremental.test index eb7a795f99c0..806a585bff39 100644 --- a/test-data/unit/check-incremental.test +++ b/test-data/unit/check-incremental.test @@ -5135,7 +5135,6 @@ tmp/b.py:4: error: First argument to namedtuple() should be "NT", not "BadName" tmp/b.py:4: error: First argument to namedtuple() should be "NT", not "BadName" [case testNewAnalyzerIncrementalMethodNamedTuple] - import a [file a.py] from b import C @@ -6540,3 +6539,24 @@ from typing_extensions import TypedDict def test() -> None: Counts = TypedDict("Counts", {k: int for k in "abc"}) # type: ignore [builtins fixtures/dict.pyi] + +[case testNoIncrementalCrashOnTypedDictMethod] +import a +[file a.py] +from b import C +x: C +[file a.py.2] +from b import C +x: C +reveal_type(x.h) +[file b.py] +from typing_extensions import TypedDict +class C: + def __init__(self) -> None: + self.h: Hidden + class Hidden(TypedDict): + x: int +[builtins fixtures/dict.pyi] +[out] +[out2] +tmp/a.py:3: note: Revealed type is "TypedDict('b.C.Hidden@5', {'x': builtins.int})" From 4e30e896486b774cdecaef6d3521a585b8acf8bc Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Mon, 30 Oct 2023 11:55:21 +0000 Subject: [PATCH 135/144] Fix dmypy inspect for namespace packages (#16357) Fixes https://github.com/python/mypy/issues/15781 The fix is to switch to already resolved paths instead of relying on `crawl_up()`. This should be more robust w.r.t. various special cases. I also tweak the tests slightly to show full file names, to have a more consistent output. --- mypy/dmypy_server.py | 4 +- mypy/inspections.py | 16 ++--- mypy/test/testfinegrained.py | 2 +- test-data/unit/daemon.test | 18 +++++- test-data/unit/fine-grained-inspect.test | 80 ++++++++++++------------ 5 files changed, 65 insertions(+), 55 deletions(-) diff --git a/mypy/dmypy_server.py b/mypy/dmypy_server.py index 9cc0888fc208..0db349b5bf82 100644 --- a/mypy/dmypy_server.py +++ b/mypy/dmypy_server.py @@ -461,6 +461,7 @@ def initialize_fine_grained( messages = result.errors self.fine_grained_manager = FineGrainedBuildManager(result) + original_sources_len = len(sources) if self.following_imports(): sources = find_all_sources_in_build(self.fine_grained_manager.graph, sources) self.update_sources(sources) @@ -525,7 +526,8 @@ def initialize_fine_grained( __, n_notes, __ = count_stats(messages) status = 1 if messages and n_notes < len(messages) else 0 - messages = self.pretty_messages(messages, len(sources), is_tty, terminal_width) + # We use explicit sources length to match the logic in non-incremental mode. + messages = self.pretty_messages(messages, original_sources_len, is_tty, terminal_width) return {"out": "".join(s + "\n" for s in messages), "err": "", "status": status} def fine_grained_increment( diff --git a/mypy/inspections.py b/mypy/inspections.py index cb695a80eef2..45e981a24af2 100644 --- a/mypy/inspections.py +++ b/mypy/inspections.py @@ -6,7 +6,6 @@ from typing import Callable from mypy.build import State -from mypy.find_sources import InvalidSourceList, SourceFinder from mypy.messages import format_type from mypy.modulefinder import PYTHON_EXTENSIONS from mypy.nodes import ( @@ -206,9 +205,6 @@ def __init__( force_reload: bool = False, ) -> None: self.fg_manager = fg_manager - self.finder = SourceFinder( - self.fg_manager.manager.fscache, self.fg_manager.manager.options - ) self.verbosity = verbosity self.limit = limit self.include_span = include_span @@ -561,16 +557,14 @@ def find_module(self, file: str) -> tuple[State | None, dict[str, object]]: if not any(file.endswith(ext) for ext in PYTHON_EXTENSIONS): return None, {"error": "Source file is not a Python file"} - try: - module, _ = self.finder.crawl_up(os.path.normpath(file)) - except InvalidSourceList: - return None, {"error": "Invalid source file name: " + file} - - state = self.fg_manager.graph.get(module) + # We are using a bit slower but robust way to find a module by path, + # to be sure that namespace packages are handled properly. + abs_path = os.path.abspath(file) + state = next((s for s in self.fg_manager.graph.values() if s.abspath == abs_path), None) self.module = state return ( state, - {"out": f"Unknown module: {module}", "err": "", "status": 1} if state is None else {}, + {"out": f"Unknown module: {file}", "err": "", "status": 1} if state is None else {}, ) def run_inspection( diff --git a/mypy/test/testfinegrained.py b/mypy/test/testfinegrained.py index ba0526d32558..c517c54286d7 100644 --- a/mypy/test/testfinegrained.py +++ b/mypy/test/testfinegrained.py @@ -352,7 +352,7 @@ def maybe_inspect(self, step: int, server: Server, src: str) -> list[str]: ) val = res["error"] if "error" in res else res["out"] + res["err"] output.extend(val.strip().split("\n")) - return normalize_messages(output) + return output def get_suggest(self, program_text: str, incremental_step: int) -> list[tuple[str, str]]: step_bit = "1?" if incremental_step == 1 else str(incremental_step) diff --git a/test-data/unit/daemon.test b/test-data/unit/daemon.test index 18a03a92207d..ca0cd90911b9 100644 --- a/test-data/unit/daemon.test +++ b/test-data/unit/daemon.test @@ -379,7 +379,7 @@ $ dmypy inspect foo.pyc:1:1:2:2 Source file is not a Python file == Return code: 2 $ dmypy inspect bar/baz.py:1:1:2:2 -Unknown module: baz +Unknown module: bar/baz.py == Return code: 1 $ dmypy inspect foo.py:3:1:1:1 "end_line" must not be before "line" @@ -434,7 +434,7 @@ $ dmypy inspect foo.pyc:1:2 Source file is not a Python file == Return code: 2 $ dmypy inspect bar/baz.py:1:2 -Unknown module: baz +Unknown module: bar/baz.py == Return code: 1 $ dmypy inspect foo.py:7:5 --include-span 7:5:7:5 -> "int" @@ -571,3 +571,17 @@ class A: x: int class B: x: int + +[case testDaemonInspectSelectCorrectFile] +$ dmypy run test.py --export-types +Daemon started +Success: no issues found in 1 source file +$ dmypy inspect demo/test.py:1:1 +"int" +$ dmypy inspect test.py:1:1 +"str" +[file test.py] +b: str +from demo.test import a +[file demo/test.py] +a: int diff --git a/test-data/unit/fine-grained-inspect.test b/test-data/unit/fine-grained-inspect.test index 2c575ec365b1..f8ce35585c10 100644 --- a/test-data/unit/fine-grained-inspect.test +++ b/test-data/unit/fine-grained-inspect.test @@ -1,8 +1,8 @@ [case testInspectTypeBasic] -# inspect2: --include-kind foo.py:10:13 -# inspect2: --show=type --include-kind foo.py:10:13 -# inspect2: --include-span -vv foo.py:12:5 -# inspect2: --include-span --include-kind foo.py:12:5:12:9 +# inspect2: --include-kind tmp/foo.py:10:13 +# inspect2: --show=type --include-kind tmp/foo.py:10:13 +# inspect2: --include-span -vv tmp/foo.py:12:5 +# inspect2: --include-span --include-kind tmp/foo.py:12:5:12:9 import foo [file foo.py] from typing import TypeVar, Generic @@ -29,10 +29,10 @@ MemberExpr -> "T" CallExpr:12:5:12:9 -> "C[int]" [case testInspectAttrsBasic] -# inspect2: --show=attrs foo.py:6:1 -# inspect2: --show=attrs foo.py:7:1 -# inspect2: --show=attrs foo.py:10:1 -# inspect2: --show=attrs --include-object-attrs foo.py:10:1 +# inspect2: --show=attrs tmp/foo.py:6:1 +# inspect2: --show=attrs tmp/foo.py:7:1 +# inspect2: --show=attrs tmp/foo.py:10:1 +# inspect2: --show=attrs --include-object-attrs tmp/foo.py:10:1 import foo [file foo.py] from bar import Meta @@ -56,12 +56,12 @@ class Meta(type): {"function": ["__name__"], "object": ["__init__"]} [case testInspectDefBasic] -# inspect2: --show=definition foo.py:5:5 -# inspect2: --show=definition --include-kind foo.py:6:3 -# inspect2: --show=definition --include-span foo.py:7:5 -# inspect2: --show=definition foo.py:8:1:8:4 -# inspect2: --show=definition foo.py:8:6:8:8 -# inspect2: --show=definition foo.py:9:3 +# inspect2: --show=definition tmp/foo.py:5:5 +# inspect2: --show=definition --include-kind tmp/foo.py:6:3 +# inspect2: --show=definition --include-span tmp/foo.py:7:5 +# inspect2: --show=definition tmp/foo.py:8:1:8:4 +# inspect2: --show=definition tmp/foo.py:8:6:8:8 +# inspect2: --show=definition tmp/foo.py:9:3 import foo [file foo.py] from bar import var, test, A @@ -95,18 +95,18 @@ def foo(x: Union[int, str]) -> None: [builtins fixtures/classmethod.pyi] [out] == -bar.py:4:0:meth +tmp/bar.py:4:0:meth MemberExpr -> tmp/bar.py:2:5:x 7:1:7:5 -> tmp/bar.py:6:9:y -bar.py:9:1:test -bar.py:8:1:var -baz.py:3:2:foo +tmp/bar.py:9:1:test +tmp/bar.py:8:1:var +tmp/baz.py:3:2:foo [case testInspectFallbackAttributes] -# inspect2: --show=attrs --include-object-attrs foo.py:5:1 -# inspect2: --show=attrs foo.py:8:1 -# inspect2: --show=attrs --include-kind foo.py:10:1 -# inspect2: --show=attrs --include-kind --include-object-attrs foo.py:10:1 +# inspect2: --show=attrs --include-object-attrs tmp/foo.py:5:1 +# inspect2: --show=attrs tmp/foo.py:8:1 +# inspect2: --show=attrs --include-kind tmp/foo.py:10:1 +# inspect2: --show=attrs --include-kind --include-object-attrs tmp/foo.py:10:1 import foo [file foo.py] class B: ... @@ -128,7 +128,7 @@ NameExpr -> {} NameExpr -> {"object": ["__eq__", "__init__", "__ne__"]} [case testInspectTypeVarBoundAttrs] -# inspect2: --show=attrs foo.py:8:13 +# inspect2: --show=attrs tmp/foo.py:8:13 import foo [file foo.py] from typing import TypeVar @@ -144,10 +144,10 @@ def foo(arg: T) -> T: {"C": ["x"]} [case testInspectTypeVarValuesAttrs] -# inspect2: --show=attrs --force-reload foo.py:13:13 -# inspect2: --show=attrs --force-reload --union-attrs foo.py:13:13 -# inspect2: --show=attrs foo.py:16:5 -# inspect2: --show=attrs --union-attrs foo.py:16:5 +# inspect2: --show=attrs --force-reload tmp/foo.py:13:13 +# inspect2: --show=attrs --force-reload --union-attrs tmp/foo.py:13:13 +# inspect2: --show=attrs tmp/foo.py:16:5 +# inspect2: --show=attrs --union-attrs tmp/foo.py:16:5 import foo [file foo.py] from typing import TypeVar, Generic @@ -174,8 +174,8 @@ class C(Generic[T]): {"A": ["x", "z"], "B": ["y", "z"]} [case testInspectTypeVarBoundDef] -# inspect2: --show=definition foo.py:9:13 -# inspect2: --show=definition foo.py:8:9 +# inspect2: --show=definition tmp/foo.py:9:13 +# inspect2: --show=definition tmp/foo.py:8:9 import foo [file foo.py] from typing import TypeVar @@ -189,13 +189,13 @@ def foo(arg: T) -> T: return arg [out] == -foo.py:7:9:arg -foo.py:4:5:x +tmp/foo.py:7:9:arg +tmp/foo.py:4:5:x [case testInspectTypeVarValuesDef] -# inspect2: --show=definition --force-reload foo.py:13:9 -# inspect2: --show=definition --force-reload foo.py:14:13 -# inspect2: --show=definition foo.py:18:7 +# inspect2: --show=definition --force-reload tmp/foo.py:13:9 +# inspect2: --show=definition --force-reload tmp/foo.py:14:13 +# inspect2: --show=definition tmp/foo.py:18:7 import foo [file foo.py] from typing import TypeVar, Generic @@ -218,12 +218,12 @@ class C(Generic[T]): x.z [out] == -foo.py:5:5:z, tmp/foo.py:9:5:z -foo.py:12:9:arg -foo.py:5:5:z, tmp/foo.py:9:5:z +tmp/foo.py:5:5:z, tmp/foo.py:9:5:z +tmp/foo.py:12:9:arg +tmp/foo.py:5:5:z, tmp/foo.py:9:5:z [case testInspectModuleAttrs] -# inspect2: --show=attrs foo.py:2:1 +# inspect2: --show=attrs tmp/foo.py:2:1 import foo [file foo.py] from pack import bar @@ -239,7 +239,7 @@ class C: ... {"": ["C", "__annotations__", "__doc__", "__file__", "__name__", "__package__", "bar", "x"], "ModuleType": ["__file__", "__getattr__"]} [case testInspectModuleDef] -# inspect2: --show=definition --include-kind foo.py:2:1 +# inspect2: --show=definition --include-kind tmp/foo.py:2:1 import foo [file foo.py] from pack import bar @@ -255,7 +255,7 @@ NameExpr -> tmp/pack/bar.py:1:1:bar MemberExpr -> tmp/pack/bar.py:3:5:x [case testInspectFunctionArgDef] -# inspect2: --show=definition --include-span foo.py:4:13 +# inspect2: --show=definition --include-span tmp/foo.py:4:13 # TODO: for now all arguments have line/column set to function definition. import foo [file foo.py] From b064a5c183b53a84d895bb8e3c36a3a74e24be9c Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Mon, 30 Oct 2023 11:57:42 +0000 Subject: [PATCH 136/144] Fix dmypy inspect on Windows (#16355) Fixes https://github.com/python/mypy/issues/15780 --- mypy/inspections.py | 24 ++++++++++++++++-------- mypy/test/testutil.py | 5 +++++ test-data/unit/daemon.test | 3 +++ 3 files changed, 24 insertions(+), 8 deletions(-) diff --git a/mypy/inspections.py b/mypy/inspections.py index 45e981a24af2..3e660a0bd7a6 100644 --- a/mypy/inspections.py +++ b/mypy/inspections.py @@ -215,13 +215,6 @@ def __init__( # Module for which inspection was requested. self.module: State | None = None - def parse_location(self, location: str) -> tuple[str, list[int]]: - if location.count(":") not in [2, 4]: - raise ValueError("Format should be file:line:column[:end_line:end_column]") - parts = location.split(":") - module, *rest = parts - return module, [int(p) for p in rest] - def reload_module(self, state: State) -> None: """Reload given module while temporary exporting types.""" old = self.fg_manager.manager.options.export_types @@ -575,7 +568,7 @@ def run_inspection( This can be re-used by various simple inspections. """ try: - file, pos = self.parse_location(location) + file, pos = parse_location(location) except ValueError as err: return {"error": str(err)} @@ -617,3 +610,18 @@ def get_definition(self, location: str) -> dict[str, object]: result["out"] = f"No name or member expressions at {location}" result["status"] = 1 return result + + +def parse_location(location: str) -> tuple[str, list[int]]: + if location.count(":") < 2: + raise ValueError("Format should be file:line:column[:end_line:end_column]") + parts = location.rsplit(":", maxsplit=2) + start, *rest = parts + # Note: we must allow drive prefix like `C:` on Windows. + if start.count(":") < 2: + return start, [int(p) for p in rest] + parts = start.rsplit(":", maxsplit=2) + start, *start_rest = parts + if start.count(":") < 2: + return start, [int(p) for p in start_rest + rest] + raise ValueError("Format should be file:line:column[:end_line:end_column]") diff --git a/mypy/test/testutil.py b/mypy/test/testutil.py index 89184b11a826..571e4d0b11f2 100644 --- a/mypy/test/testutil.py +++ b/mypy/test/testutil.py @@ -3,6 +3,7 @@ import os from unittest import TestCase, mock +from mypy.inspections import parse_location from mypy.util import get_terminal_width @@ -15,3 +16,7 @@ def test_get_terminal_size_in_pty_defaults_to_80(self) -> None: with mock.patch.object(os, "get_terminal_size", return_value=ret): with mock.patch.dict(os.environ, values=mock_environ, clear=True): assert get_terminal_width() == 80 + + def test_parse_location_windows(self) -> None: + assert parse_location(r"C:\test.py:1:1") == (r"C:\test.py", [1, 1]) + assert parse_location(r"C:\test.py:1:1:1:1") == (r"C:\test.py", [1, 1, 1, 1]) diff --git a/test-data/unit/daemon.test b/test-data/unit/daemon.test index ca0cd90911b9..77367eb02bfe 100644 --- a/test-data/unit/daemon.test +++ b/test-data/unit/daemon.test @@ -372,6 +372,9 @@ foo.py:3: error: Incompatible types in assignment (expression has type "str", va $ dmypy inspect foo:1 Format should be file:line:column[:end_line:end_column] == Return code: 2 +$ dmypy inspect foo:1:2:3 +Source file is not a Python file +== Return code: 2 $ dmypy inspect foo.py:1:2:a:b invalid literal for int() with base 10: 'a' == Return code: 2 From ad0e183b0df7cc3dd94d9e1cd6f5710859beda96 Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Mon, 30 Oct 2023 12:14:00 +0000 Subject: [PATCH 137/144] Enable Unpack/TypeVarTuple support (#16354) Fixes https://github.com/python/mypy/issues/12280 Fixes https://github.com/python/mypy/issues/14697 In this PR: * Enable `TypeVarTuple` and `Unpack` features. * Delete the old blanket `--enable-incomplete-features` flag that was deprecated a year ago. * Switch couple corner cases to `PreciseTupleTypes` feature. * Add the draft docs about the new feature. * Handle a previously unhandled case where variadic tuple appears in string formatting (discovered on mypy self-check, where `PreciseTupleTypes` is already enabled). --------- Co-authored-by: Jelle Zijlstra --- docs/source/command_line.rst | 52 +++++++++++++++++++++++++ mypy/checkexpr.py | 8 ++-- mypy/checkstrformat.py | 19 +++++++++ mypy/main.py | 17 +++----- mypy/options.py | 6 +-- mypy/semanal.py | 5 +-- mypy/test/testcheck.py | 3 -- mypy/test/testfinegrained.py | 3 +- mypy/test/testsemanal.py | 3 +- mypy/test/testtransform.py | 2 - mypy/typeanal.py | 4 +- test-data/unit/check-flags.test | 12 ------ test-data/unit/check-tuples.test | 16 ++++++++ test-data/unit/check-typevar-tuple.test | 3 ++ test-data/unit/cmdline.test | 18 +++++---- 15 files changed, 116 insertions(+), 55 deletions(-) diff --git a/docs/source/command_line.rst b/docs/source/command_line.rst index 5db118334519..a810c35cb77f 100644 --- a/docs/source/command_line.rst +++ b/docs/source/command_line.rst @@ -991,6 +991,58 @@ format into the specified directory. library or specify mypy installation with the setuptools extra ``mypy[reports]``. + +Enabling incomplete/experimental features +***************************************** + +.. option:: --enable-incomplete-feature FEATURE + + Some features may require several mypy releases to implement, for example + due to their complexity, potential for backwards incompatibility, or + ambiguous semantics that would benefit from feedback from the community. + You can enable such features for early preview using this flag. Note that + it is not guaranteed that all features will be ultimately enabled by + default. In *rare cases* we may decide to not go ahead with certain + features. + +List of currently incomplete/experimental features: + +* ``PreciseTupleTypes``: this feature will infer more precise tuple types in + various scenarios. Before variadic types were added to the Python type system + by :pep:`646`, it was impossible to express a type like "a tuple with + at least two integers". The best type available was ``tuple[int, ...]``. + Therefore, mypy applied very lenient checking for variable-length tuples. + Now this type can be expressed as ``tuple[int, int, *tuple[int, ...]]``. + For such more precise types (when explicitly *defined* by a user) mypy, + for example, warns about unsafe index access, and generally handles them + in a type-safe manner. However, to avoid problems in existing code, mypy + does not *infer* these precise types when it technically can. Here are + notable examples where ``PreciseTupleTypes`` infers more precise types: + + .. code-block:: python + + numbers: tuple[int, ...] + + more_numbers = (1, *numbers, 1) + reveal_type(more_numbers) + # Without PreciseTupleTypes: tuple[int, ...] + # With PreciseTupleTypes: tuple[int, *tuple[int, ...], int] + + other_numbers = (1, 1) + numbers + reveal_type(other_numbers) + # Without PreciseTupleTypes: tuple[int, ...] + # With PreciseTupleTypes: tuple[int, int, *tuple[int, ...]] + + if len(numbers) > 2: + reveal_type(numbers) + # Without PreciseTupleTypes: tuple[int, ...] + # With PreciseTupleTypes: tuple[int, int, int, *tuple[int, ...]] + else: + reveal_type(numbers) + # Without PreciseTupleTypes: tuple[int, ...] + # With PreciseTupleTypes: tuple[()] | tuple[int] | tuple[int, int] + + Miscellaneous ************* diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index df6000050986..0207c245b1f9 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -97,7 +97,7 @@ YieldExpr, YieldFromExpr, ) -from mypy.options import TYPE_VAR_TUPLE +from mypy.options import PRECISE_TUPLE_TYPES from mypy.plugin import ( FunctionContext, FunctionSigContext, @@ -3377,7 +3377,7 @@ def visit_op_expr(self, e: OpExpr) -> Type: ): return self.concat_tuples(proper_left_type, proper_right_type) elif ( - TYPE_VAR_TUPLE in self.chk.options.enable_incomplete_feature + PRECISE_TUPLE_TYPES in self.chk.options.enable_incomplete_feature and isinstance(proper_right_type, Instance) and self.chk.type_is_iterable(proper_right_type) ): @@ -3411,7 +3411,7 @@ def visit_op_expr(self, e: OpExpr) -> Type: if is_named_instance(proper_right_type, "builtins.dict"): use_reverse = USE_REVERSE_NEVER - if TYPE_VAR_TUPLE in self.chk.options.enable_incomplete_feature: + if PRECISE_TUPLE_TYPES in self.chk.options.enable_incomplete_feature: # Handle tuple[X, ...] + tuple[Y, Z] = tuple[*tuple[X, ...], Y, Z]. if ( e.op == "+" @@ -4988,7 +4988,7 @@ def visit_tuple_expr(self, e: TupleExpr) -> Type: j += len(tt.items) else: if ( - TYPE_VAR_TUPLE in self.chk.options.enable_incomplete_feature + PRECISE_TUPLE_TYPES in self.chk.options.enable_incomplete_feature and not seen_unpack_in_items ): # Handle (x, *y, z), where y is e.g. tuple[Y, ...]. diff --git a/mypy/checkstrformat.py b/mypy/checkstrformat.py index eeb9e7633756..39d44e84a9c1 100644 --- a/mypy/checkstrformat.py +++ b/mypy/checkstrformat.py @@ -47,8 +47,11 @@ TupleType, Type, TypeOfAny, + TypeVarTupleType, TypeVarType, UnionType, + UnpackType, + find_unpack_in_list, get_proper_type, get_proper_types, ) @@ -728,6 +731,22 @@ def check_simple_str_interpolation( rep_types: list[Type] = [] if isinstance(rhs_type, TupleType): rep_types = rhs_type.items + unpack_index = find_unpack_in_list(rep_types) + if unpack_index is not None: + # TODO: we should probably warn about potentially short tuple. + # However, without special-casing for tuple(f(i) for in other_tuple) + # this causes false positive on mypy self-check in report.py. + extras = max(0, len(checkers) - len(rep_types) + 1) + unpacked = rep_types[unpack_index] + assert isinstance(unpacked, UnpackType) + unpacked = get_proper_type(unpacked.type) + if isinstance(unpacked, TypeVarTupleType): + unpacked = get_proper_type(unpacked.upper_bound) + assert ( + isinstance(unpacked, Instance) and unpacked.type.fullname == "builtins.tuple" + ) + unpack_items = [unpacked.args[0]] * extras + rep_types = rep_types[:unpack_index] + unpack_items + rep_types[unpack_index + 1 :] elif isinstance(rhs_type, AnyType): return elif isinstance(rhs_type, Instance) and rhs_type.type.fullname == "builtins.tuple": diff --git a/mypy/main.py b/mypy/main.py index 43ab761072ca..1aede530c33e 100644 --- a/mypy/main.py +++ b/mypy/main.py @@ -22,7 +22,7 @@ from mypy.find_sources import InvalidSourceList, create_source_list from mypy.fscache import FileSystemCache from mypy.modulefinder import BuildSource, FindModuleCache, SearchPaths, get_search_dirs, mypy_path -from mypy.options import INCOMPLETE_FEATURES, BuildType, Options +from mypy.options import COMPLETE_FEATURES, INCOMPLETE_FEATURES, BuildType, Options from mypy.split_namespace import SplitNamespace from mypy.version import __version__ @@ -1151,10 +1151,7 @@ def add_invertible_flag( # --debug-serialize will run tree.serialize() even if cache generation is disabled. # Useful for mypy_primer to detect serialize errors earlier. parser.add_argument("--debug-serialize", action="store_true", help=argparse.SUPPRESS) - # This one is deprecated, but we will keep it for few releases. - parser.add_argument( - "--enable-incomplete-features", action="store_true", help=argparse.SUPPRESS - ) + parser.add_argument( "--disable-bytearray-promotion", action="store_true", help=argparse.SUPPRESS ) @@ -1334,14 +1331,10 @@ def set_strict_flags() -> None: # Validate incomplete features. for feature in options.enable_incomplete_feature: - if feature not in INCOMPLETE_FEATURES: + if feature not in INCOMPLETE_FEATURES | COMPLETE_FEATURES: parser.error(f"Unknown incomplete feature: {feature}") - if options.enable_incomplete_features: - print( - "Warning: --enable-incomplete-features is deprecated, use" - " --enable-incomplete-feature=FEATURE instead" - ) - options.enable_incomplete_feature = list(INCOMPLETE_FEATURES) + if feature in COMPLETE_FEATURES: + print(f"Warning: {feature} is already enabled by default") # Compute absolute path for custom typeshed (if present). if options.custom_typeshed_dir is not None: diff --git a/mypy/options.py b/mypy/options.py index 31d5d584f897..8bb20dbd4410 100644 --- a/mypy/options.py +++ b/mypy/options.py @@ -69,11 +69,12 @@ class BuildType: } ) - {"debug_cache"} -# Features that are currently incomplete/experimental +# Features that are currently (or were recently) incomplete/experimental TYPE_VAR_TUPLE: Final = "TypeVarTuple" UNPACK: Final = "Unpack" PRECISE_TUPLE_TYPES: Final = "PreciseTupleTypes" -INCOMPLETE_FEATURES: Final = frozenset((TYPE_VAR_TUPLE, UNPACK, PRECISE_TUPLE_TYPES)) +INCOMPLETE_FEATURES: Final = frozenset((PRECISE_TUPLE_TYPES,)) +COMPLETE_FEATURES: Final = frozenset((TYPE_VAR_TUPLE, UNPACK)) class Options: @@ -307,7 +308,6 @@ def __init__(self) -> None: self.dump_type_stats = False self.dump_inference_stats = False self.dump_build_stats = False - self.enable_incomplete_features = False # deprecated self.enable_incomplete_feature: list[str] = [] self.timing_stats: str | None = None self.line_checking_stats: str | None = None diff --git a/mypy/semanal.py b/mypy/semanal.py index bd24c48ed24f..6f322af816ea 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -179,7 +179,7 @@ type_aliases_source_versions, typing_extensions_aliases, ) -from mypy.options import TYPE_VAR_TUPLE, Options +from mypy.options import Options from mypy.patterns import ( AsPattern, ClassPattern, @@ -4417,9 +4417,6 @@ def process_typevartuple_declaration(self, s: AssignmentStmt) -> bool: else: self.fail(f'Unexpected keyword argument "{param_name}" for "TypeVarTuple"', s) - if not self.incomplete_feature_enabled(TYPE_VAR_TUPLE, s): - return False - name = self.extract_typevarlike_name(s, call) if name is None: return False diff --git a/mypy/test/testcheck.py b/mypy/test/testcheck.py index 591421465a97..3ad97ced61f2 100644 --- a/mypy/test/testcheck.py +++ b/mypy/test/testcheck.py @@ -10,7 +10,6 @@ from mypy.build import Graph from mypy.errors import CompileError from mypy.modulefinder import BuildSource, FindModuleCache, SearchPaths -from mypy.options import TYPE_VAR_TUPLE, UNPACK from mypy.test.config import test_data_prefix, test_temp_dir from mypy.test.data import DataDrivenTestCase, DataSuite, FileOperation, module_from_path from mypy.test.helpers import ( @@ -125,8 +124,6 @@ def run_case_once( # Parse options after moving files (in case mypy.ini is being moved). options = parse_options(original_program_text, testcase, incremental_step) options.use_builtins_fixtures = True - if not testcase.name.endswith("_no_incomplete"): - options.enable_incomplete_feature += [TYPE_VAR_TUPLE, UNPACK] options.show_traceback = True # Enable some options automatically based on test file name. diff --git a/mypy/test/testfinegrained.py b/mypy/test/testfinegrained.py index c517c54286d7..953f91a60df7 100644 --- a/mypy/test/testfinegrained.py +++ b/mypy/test/testfinegrained.py @@ -28,7 +28,7 @@ from mypy.errors import CompileError from mypy.find_sources import create_source_list from mypy.modulefinder import BuildSource -from mypy.options import TYPE_VAR_TUPLE, UNPACK, Options +from mypy.options import Options from mypy.server.mergecheck import check_consistency from mypy.server.update import sort_messages_preserving_file_order from mypy.test.config import test_temp_dir @@ -149,7 +149,6 @@ def get_options(self, source: str, testcase: DataDrivenTestCase, build_cache: bo options.use_fine_grained_cache = self.use_cache and not build_cache options.cache_fine_grained = self.use_cache options.local_partial_types = True - options.enable_incomplete_feature = [TYPE_VAR_TUPLE, UNPACK] # Treat empty bodies safely for these test cases. options.allow_empty_bodies = not testcase.name.endswith("_no_empty") if re.search("flags:.*--follow-imports", source) is None: diff --git a/mypy/test/testsemanal.py b/mypy/test/testsemanal.py index 3455f41aa20a..cdecc4739168 100644 --- a/mypy/test/testsemanal.py +++ b/mypy/test/testsemanal.py @@ -10,7 +10,7 @@ from mypy.errors import CompileError from mypy.modulefinder import BuildSource from mypy.nodes import TypeInfo -from mypy.options import TYPE_VAR_TUPLE, UNPACK, Options +from mypy.options import Options from mypy.test.config import test_temp_dir from mypy.test.data import DataDrivenTestCase, DataSuite from mypy.test.helpers import ( @@ -45,7 +45,6 @@ def get_semanal_options(program_text: str, testcase: DataDrivenTestCase) -> Opti options.semantic_analysis_only = True options.show_traceback = True options.python_version = PYTHON3_VERSION - options.enable_incomplete_feature = [TYPE_VAR_TUPLE, UNPACK] options.force_uppercase_builtins = True return options diff --git a/mypy/test/testtransform.py b/mypy/test/testtransform.py index ba9fe8668fb4..9388dca02c7a 100644 --- a/mypy/test/testtransform.py +++ b/mypy/test/testtransform.py @@ -5,7 +5,6 @@ from mypy import build from mypy.errors import CompileError from mypy.modulefinder import BuildSource -from mypy.options import TYPE_VAR_TUPLE, UNPACK from mypy.test.config import test_temp_dir from mypy.test.data import DataDrivenTestCase, DataSuite from mypy.test.helpers import assert_string_arrays_equal, normalize_error_messages, parse_options @@ -38,7 +37,6 @@ def test_transform(testcase: DataDrivenTestCase) -> None: options = parse_options(src, testcase, 1) options.use_builtins_fixtures = True options.semantic_analysis_only = True - options.enable_incomplete_feature = [TYPE_VAR_TUPLE, UNPACK] options.show_traceback = True options.force_uppercase_builtins = True result = build.build( diff --git a/mypy/typeanal.py b/mypy/typeanal.py index 03579404aac9..d238a452e7a9 100644 --- a/mypy/typeanal.py +++ b/mypy/typeanal.py @@ -35,7 +35,7 @@ check_arg_names, get_nongen_builtins, ) -from mypy.options import UNPACK, Options +from mypy.options import Options from mypy.plugin import AnalyzeTypeContext, Plugin, TypeAnalyzerPluginInterface from mypy.semanal_shared import SemanticAnalyzerCoreInterface, paramspec_args, paramspec_kwargs from mypy.tvar_scope import TypeVarLikeScope @@ -664,8 +664,6 @@ def try_analyze_special_unbound_type(self, t: UnboundType, fullname: str) -> Typ # In most contexts, TypeGuard[...] acts as an alias for bool (ignoring its args) return self.named_type("builtins.bool") elif fullname in ("typing.Unpack", "typing_extensions.Unpack"): - if not self.api.incomplete_feature_enabled(UNPACK, t): - return AnyType(TypeOfAny.from_error) if len(t.args) != 1: self.fail("Unpack[...] requires exactly one type argument", t) return AnyType(TypeOfAny.from_error) diff --git a/test-data/unit/check-flags.test b/test-data/unit/check-flags.test index 546d02a07ad0..04adaca317c1 100644 --- a/test-data/unit/check-flags.test +++ b/test-data/unit/check-flags.test @@ -2190,18 +2190,6 @@ x: int = "" # E: Incompatible types in assignment (expression has type "str", v # flags: --hide-error-codes x: int = "" # E: Incompatible types in assignment (expression has type "str", variable has type "int") -[case testTypeVarTupleDisabled_no_incomplete] -from typing_extensions import TypeVarTuple -Ts = TypeVarTuple("Ts") # E: "TypeVarTuple" support is experimental, use --enable-incomplete-feature=TypeVarTuple to enable -[builtins fixtures/tuple.pyi] - -[case testTypeVarTupleEnabled_no_incomplete] -# flags: --enable-incomplete-feature=TypeVarTuple -from typing_extensions import TypeVarTuple -Ts = TypeVarTuple("Ts") # OK -[builtins fixtures/tuple.pyi] - - [case testDisableBytearrayPromotion] # flags: --disable-bytearray-promotion def f(x: bytes) -> None: ... diff --git a/test-data/unit/check-tuples.test b/test-data/unit/check-tuples.test index 7070ead43746..4f468b59fc3f 100644 --- a/test-data/unit/check-tuples.test +++ b/test-data/unit/check-tuples.test @@ -1100,12 +1100,28 @@ reveal_type(b) # N: Revealed type is "Tuple[builtins.int, builtins.int, builtin [case testTupleWithStarExpr2] a = [1] b = (0, *a) +reveal_type(b) # N: Revealed type is "builtins.tuple[builtins.int, ...]" +[builtins fixtures/tuple.pyi] + +[case testTupleWithStarExpr2Precise] +# flags: --enable-incomplete-feature=PreciseTupleTypes +a = [1] +b = (0, *a) reveal_type(b) # N: Revealed type is "Tuple[builtins.int, Unpack[builtins.tuple[builtins.int, ...]]]" [builtins fixtures/tuple.pyi] [case testTupleWithStarExpr3] a = [''] b = (0, *a) +reveal_type(b) # N: Revealed type is "builtins.tuple[builtins.object, ...]" +c = (*a, '') +reveal_type(c) # N: Revealed type is "builtins.tuple[builtins.str, ...]" +[builtins fixtures/tuple.pyi] + +[case testTupleWithStarExpr3Precise] +# flags: --enable-incomplete-feature=PreciseTupleTypes +a = [''] +b = (0, *a) reveal_type(b) # N: Revealed type is "Tuple[builtins.int, Unpack[builtins.tuple[builtins.str, ...]]]" c = (*a, '') reveal_type(c) # N: Revealed type is "Tuple[Unpack[builtins.tuple[builtins.str, ...]], builtins.str]" diff --git a/test-data/unit/check-typevar-tuple.test b/test-data/unit/check-typevar-tuple.test index 7b8a22313b36..a51b535a873c 100644 --- a/test-data/unit/check-typevar-tuple.test +++ b/test-data/unit/check-typevar-tuple.test @@ -1653,6 +1653,7 @@ def foo(arg: Tuple[int, Unpack[Ts], str]) -> None: [builtins fixtures/tuple.pyi] [case testPackingVariadicTuplesHomogeneous] +# flags: --enable-incomplete-feature=PreciseTupleTypes from typing import Tuple from typing_extensions import Unpack @@ -1689,6 +1690,7 @@ def foo(arg: Tuple[int, Unpack[Ts], str]) -> None: [builtins fixtures/isinstancelist.pyi] [case testVariadicTupleInTupleContext] +# flags: --enable-incomplete-feature=PreciseTupleTypes from typing import Tuple, Optional from typing_extensions import TypeVarTuple, Unpack @@ -1701,6 +1703,7 @@ vt2 = 1, *test(), 2 # E: Need type annotation for "vt2" [builtins fixtures/tuple.pyi] [case testVariadicTupleConcatenation] +# flags: --enable-incomplete-feature=PreciseTupleTypes from typing import Tuple from typing_extensions import TypeVarTuple, Unpack diff --git a/test-data/unit/cmdline.test b/test-data/unit/cmdline.test index 91242eb62fcf..f286f4781ed5 100644 --- a/test-data/unit/cmdline.test +++ b/test-data/unit/cmdline.test @@ -1421,14 +1421,6 @@ b \d+ b\.c \d+ .* -[case testCmdlineEnableIncompleteFeatures] -# cmd: mypy --enable-incomplete-features a.py -[file a.py] -pass -[out] -Warning: --enable-incomplete-features is deprecated, use --enable-incomplete-feature=FEATURE instead -== Return code: 0 - [case testShadowTypingModuleEarlyLoad] # cmd: mypy dir [file dir/__init__.py] @@ -1585,3 +1577,13 @@ disable_error_code = always_true = MY_VAR, [out] + +[case testTypeVarTupleUnpackEnabled] +# cmd: mypy --enable-incomplete-feature=TypeVarTuple --enable-incomplete-feature=Unpack a.py +[file a.py] +from typing_extensions import TypeVarTuple +Ts = TypeVarTuple("Ts") +[out] +Warning: TypeVarTuple is already enabled by default +Warning: Unpack is already enabled by default +== Return code: 0 From 5624f401b3786ebdbe167c27297ed778cce3faa5 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Mon, 30 Oct 2023 14:27:43 +0000 Subject: [PATCH 138/144] Fix daemon crash caused by deleted submodule (#16370) If a submodule has been deleted while using a fine-grained cache, the daemon could crash during fixup, since there could be a symbol table entry in a parent package that would appear to refer to itself. Handle the case by adding a placeholder symbol table entry instead. Eventually the parent package will be reprocessed and the symbol table will be completed. --- mypy/fixup.py | 19 +++++++++++++++++-- mypy/nodes.py | 2 ++ test-data/unit/fine-grained.test | 19 +++++++++++++++++++ 3 files changed, 38 insertions(+), 2 deletions(-) diff --git a/mypy/fixup.py b/mypy/fixup.py index 5ffc47120734..02c6ab93f29e 100644 --- a/mypy/fixup.py +++ b/mypy/fixup.py @@ -128,8 +128,23 @@ def visit_symbol_table(self, symtab: SymbolTable, table_fullname: str) -> None: cross_ref, self.modules, raise_on_missing=not self.allow_missing ) if stnode is not None: - assert stnode.node is not None, (table_fullname + "." + key, cross_ref) - value.node = stnode.node + if stnode is value: + # The node seems to refer to itself, which can mean that + # the target is a deleted submodule of the current module, + # and thus lookup falls back to the symbol table of the parent + # package. Here's how this may happen: + # + # pkg/__init__.py: + # from pkg import sub + # + # Now if pkg.sub is deleted, the pkg.sub symbol table entry + # appears to refer to itself. Replace the entry with a + # placeholder to avoid a crash. We can't delete the entry, + # as it would stop dependency propagation. + value.node = Var(key + "@deleted") + else: + assert stnode.node is not None, (table_fullname + "." + key, cross_ref) + value.node = stnode.node elif not self.allow_missing: assert False, f"Could not find cross-ref {cross_ref}" else: diff --git a/mypy/nodes.py b/mypy/nodes.py index 1d7b3e3be84b..d65a23a6b7fe 100644 --- a/mypy/nodes.py +++ b/mypy/nodes.py @@ -3824,6 +3824,8 @@ def __str__(self) -> str: # Include declared type of variables and functions. if self.type is not None: s += f" : {self.type}" + if self.cross_ref: + s += f" cross_ref:{self.cross_ref}" return s def serialize(self, prefix: str, name: str) -> JsonDict: diff --git a/test-data/unit/fine-grained.test b/test-data/unit/fine-grained.test index 5dc42bd62d9b..165a2089b466 100644 --- a/test-data/unit/fine-grained.test +++ b/test-data/unit/fine-grained.test @@ -10486,3 +10486,22 @@ reveal_type(s) == == b.py:2: note: Revealed type is "builtins.str" + +[case testRenameSubModule] +import a + +[file a.py] +import pkg.sub + +[file pkg/__init__.py] +[file pkg/sub/__init__.py] +from pkg.sub import mod +[file pkg/sub/mod.py] + +[file pkg/sub/__init__.py.2] +from pkg.sub import modb +[delete pkg/sub/mod.py.2] +[file pkg/sub/modb.py.2] + +[out] +== From f68f46351e30644aefd19900ba1634595adc1d09 Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Wed, 1 Nov 2023 15:33:45 +0000 Subject: [PATCH 139/144] Fix file reloading in dmypy with --export-types (#16359) Fixes https://github.com/python/mypy/issues/15794 Unfortunately, this requires to pass `--export-types` to `dmypy run` if one wants to inspect a file that was previously kicked out of the build. --- mypy/dmypy_server.py | 52 +++++++++++++++++++++++++++++++----- mypy/test/testfinegrained.py | 3 ++- test-data/unit/daemon.test | 27 +++++++++++++++++++ 3 files changed, 74 insertions(+), 8 deletions(-) diff --git a/mypy/dmypy_server.py b/mypy/dmypy_server.py index 0db349b5bf82..42236497f275 100644 --- a/mypy/dmypy_server.py +++ b/mypy/dmypy_server.py @@ -393,15 +393,21 @@ def cmd_recheck( t1 = time.time() manager = self.fine_grained_manager.manager manager.log(f"fine-grained increment: cmd_recheck: {t1 - t0:.3f}s") - self.options.export_types = export_types + old_export_types = self.options.export_types + self.options.export_types = self.options.export_types or export_types if not self.following_imports(): - messages = self.fine_grained_increment(sources, remove, update) + messages = self.fine_grained_increment( + sources, remove, update, explicit_export_types=export_types + ) else: assert remove is None and update is None - messages = self.fine_grained_increment_follow_imports(sources) + messages = self.fine_grained_increment_follow_imports( + sources, explicit_export_types=export_types + ) res = self.increment_output(messages, sources, is_tty, terminal_width) self.flush_caches() self.update_stats(res) + self.options.export_types = old_export_types return res def check( @@ -412,17 +418,21 @@ def check( If is_tty is True format the output nicely with colors and summary line (unless disabled in self.options). Also pass the terminal_width to formatter. """ - self.options.export_types = export_types + old_export_types = self.options.export_types + self.options.export_types = self.options.export_types or export_types if not self.fine_grained_manager: res = self.initialize_fine_grained(sources, is_tty, terminal_width) else: if not self.following_imports(): - messages = self.fine_grained_increment(sources) + messages = self.fine_grained_increment(sources, explicit_export_types=export_types) else: - messages = self.fine_grained_increment_follow_imports(sources) + messages = self.fine_grained_increment_follow_imports( + sources, explicit_export_types=export_types + ) res = self.increment_output(messages, sources, is_tty, terminal_width) self.flush_caches() self.update_stats(res) + self.options.export_types = old_export_types return res def flush_caches(self) -> None: @@ -535,6 +545,7 @@ def fine_grained_increment( sources: list[BuildSource], remove: list[str] | None = None, update: list[str] | None = None, + explicit_export_types: bool = False, ) -> list[str]: """Perform a fine-grained type checking increment. @@ -545,6 +556,8 @@ def fine_grained_increment( sources: sources passed on the command line remove: paths of files that have been removed update: paths of files that have been changed or created + explicit_export_types: --export-type was passed in a check command + (as opposite to being set in dmypy start) """ assert self.fine_grained_manager is not None manager = self.fine_grained_manager.manager @@ -559,6 +572,10 @@ def fine_grained_increment( # Use the remove/update lists to update fswatcher. # This avoids calling stat() for unchanged files. changed, removed = self.update_changed(sources, remove or [], update or []) + if explicit_export_types: + # If --export-types is given, we need to force full re-checking of all + # explicitly passed files, since we need to visit each expression. + add_all_sources_to_changed(sources, changed) changed += self.find_added_suppressed( self.fine_grained_manager.graph, set(), manager.search_paths ) @@ -577,7 +594,9 @@ def fine_grained_increment( self.previous_sources = sources return messages - def fine_grained_increment_follow_imports(self, sources: list[BuildSource]) -> list[str]: + def fine_grained_increment_follow_imports( + self, sources: list[BuildSource], explicit_export_types: bool = False + ) -> list[str]: """Like fine_grained_increment, but follow imports.""" t0 = time.time() @@ -603,6 +622,9 @@ def fine_grained_increment_follow_imports(self, sources: list[BuildSource]) -> l changed, new_files = self.find_reachable_changed_modules( sources, graph, seen, changed_paths ) + if explicit_export_types: + # Same as in fine_grained_increment(). + add_all_sources_to_changed(sources, changed) sources.extend(new_files) # Process changes directly reachable from roots. @@ -1011,6 +1033,22 @@ def find_all_sources_in_build( return result +def add_all_sources_to_changed(sources: list[BuildSource], changed: list[tuple[str, str]]) -> None: + """Add all (explicit) sources to the list changed files in place. + + Use this when re-processing of unchanged files is needed (e.g. for + the purpose of exporting types for inspections). + """ + changed_set = set(changed) + changed.extend( + [ + (bs.module, bs.path) + for bs in sources + if bs.path and (bs.module, bs.path) not in changed_set + ] + ) + + def fix_module_deps(graph: mypy.build.Graph) -> None: """After an incremental update, update module dependencies to reflect the new state. diff --git a/mypy/test/testfinegrained.py b/mypy/test/testfinegrained.py index 953f91a60df7..f61a58c425fc 100644 --- a/mypy/test/testfinegrained.py +++ b/mypy/test/testfinegrained.py @@ -149,6 +149,7 @@ def get_options(self, source: str, testcase: DataDrivenTestCase, build_cache: bo options.use_fine_grained_cache = self.use_cache and not build_cache options.cache_fine_grained = self.use_cache options.local_partial_types = True + options.export_types = "inspect" in testcase.file # Treat empty bodies safely for these test cases. options.allow_empty_bodies = not testcase.name.endswith("_no_empty") if re.search("flags:.*--follow-imports", source) is None: @@ -163,7 +164,7 @@ def get_options(self, source: str, testcase: DataDrivenTestCase, build_cache: bo return options def run_check(self, server: Server, sources: list[BuildSource]) -> list[str]: - response = server.check(sources, export_types=True, is_tty=False, terminal_width=-1) + response = server.check(sources, export_types=False, is_tty=False, terminal_width=-1) out = response["out"] or response["err"] assert isinstance(out, str) return out.splitlines() diff --git a/test-data/unit/daemon.test b/test-data/unit/daemon.test index 77367eb02bfe..ca2c969d2f5e 100644 --- a/test-data/unit/daemon.test +++ b/test-data/unit/daemon.test @@ -360,6 +360,33 @@ def bar() -> None: x = foo('abc') # type: str foo(arg='xyz') +[case testDaemonInspectCheck] +$ dmypy start +Daemon started +$ dmypy check foo.py +Success: no issues found in 1 source file +$ dmypy check foo.py --export-types +Success: no issues found in 1 source file +$ dmypy inspect foo.py:1:1 +"int" +[file foo.py] +x = 1 + +[case testDaemonInspectRun] +$ dmypy run test1.py +Daemon started +Success: no issues found in 1 source file +$ dmypy run test2.py +Success: no issues found in 1 source file +$ dmypy run test1.py --export-types +Success: no issues found in 1 source file +$ dmypy inspect test1.py:1:1 +"int" +[file test1.py] +a: int +[file test2.py] +a: str + [case testDaemonGetType] $ dmypy start --log-file log.txt -- --follow-imports=error --no-error-summary --python-version 3.8 Daemon started From 681e54cfe1642adddc41c4ff11198b8bc955d5af Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Wed, 1 Nov 2023 10:44:58 +0000 Subject: [PATCH 140/144] Fix crash on unpack call special-casing (#16381) Fixes https://github.com/python/mypy/issues/16380 Fix is quite straightforward, what was an `assert` really needs to be an `if`. --------- Co-authored-by: Jelle Zijlstra --- mypy/checkexpr.py | 38 +++++++++++-------------- test-data/unit/check-typevar-tuple.test | 22 ++++++++++++++ 2 files changed, 38 insertions(+), 22 deletions(-) diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index 0207c245b1f9..95700a52af02 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -2440,34 +2440,28 @@ def check_argument_types( # the suffices to the tuple, e.g. a single actual like # Tuple[Unpack[Ts], int] expanded_tuple = False + actual_kinds = [arg_kinds[a] for a in actuals] if len(actuals) > 1: - first_actual_arg_type = get_proper_type(arg_types[actuals[0]]) + p_actual_type = get_proper_type(arg_types[actuals[0]]) if ( - isinstance(first_actual_arg_type, TupleType) - and len(first_actual_arg_type.items) == 1 - and isinstance(first_actual_arg_type.items[0], UnpackType) + isinstance(p_actual_type, TupleType) + and len(p_actual_type.items) == 1 + and isinstance(p_actual_type.items[0], UnpackType) + and actual_kinds == [nodes.ARG_STAR] + [nodes.ARG_POS] * (len(actuals) - 1) ): - # TODO: use walrus operator - actual_types = [first_actual_arg_type.items[0]] + [ - arg_types[a] for a in actuals[1:] - ] - actual_kinds = [nodes.ARG_STAR] + [nodes.ARG_POS] * (len(actuals) - 1) - - # If we got here, the callee was previously inferred to have a suffix. - assert isinstance(orig_callee_arg_type, UnpackType) - assert isinstance(orig_callee_arg_type.type, ProperType) and isinstance( - orig_callee_arg_type.type, TupleType - ) - assert orig_callee_arg_type.type.items - callee_arg_types = orig_callee_arg_type.type.items - callee_arg_kinds = [nodes.ARG_STAR] + [nodes.ARG_POS] * ( - len(orig_callee_arg_type.type.items) - 1 - ) - expanded_tuple = True + actual_types = [p_actual_type.items[0]] + [arg_types[a] for a in actuals[1:]] + if isinstance(orig_callee_arg_type, UnpackType): + p_callee_type = get_proper_type(orig_callee_arg_type.type) + if isinstance(p_callee_type, TupleType): + assert p_callee_type.items + callee_arg_types = p_callee_type.items + callee_arg_kinds = [nodes.ARG_STAR] + [nodes.ARG_POS] * ( + len(p_callee_type.items) - 1 + ) + expanded_tuple = True if not expanded_tuple: actual_types = [arg_types[a] for a in actuals] - actual_kinds = [arg_kinds[a] for a in actuals] if isinstance(orig_callee_arg_type, UnpackType): unpacked_type = get_proper_type(orig_callee_arg_type.type) if isinstance(unpacked_type, TupleType): diff --git a/test-data/unit/check-typevar-tuple.test b/test-data/unit/check-typevar-tuple.test index a51b535a873c..e85863f0ed04 100644 --- a/test-data/unit/check-typevar-tuple.test +++ b/test-data/unit/check-typevar-tuple.test @@ -2185,3 +2185,25 @@ def test2( # E: Missing named argument "b" return func(*args, **kwargs) [builtins fixtures/tuple.pyi] + +[case testUnpackTupleSpecialCaseNoCrash] +from typing import Tuple, TypeVar +from typing_extensions import Unpack + +T = TypeVar("T") + +def foo(*x: object) -> None: ... +def bar(*x: int) -> None: ... +def baz(*x: T) -> T: ... + +keys: Tuple[Unpack[Tuple[int, ...]]] + +foo(keys, 1) +foo(*keys, 1) + +bar(keys, 1) # E: Argument 1 to "bar" has incompatible type "Tuple[Unpack[Tuple[int, ...]]]"; expected "int" +bar(*keys, 1) # OK + +reveal_type(baz(keys, 1)) # N: Revealed type is "builtins.object" +reveal_type(baz(*keys, 1)) # N: Revealed type is "builtins.int" +[builtins fixtures/tuple.pyi] From 8813968abb657113df5edfa207db46b0649c9dce Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Sat, 4 Nov 2023 23:42:22 +0000 Subject: [PATCH 141/144] Fix type narrowing in lambda expressions (#16407) Fixes https://github.com/python/mypy/issues/4297 Fix is straightforward: without properly pushing lambda expression on the stack, the previous fix @JukkaL added for nested functions doesn't work for lambdas (it thinks that we are at global scope). --- mypy/checkexpr.py | 3 ++- test-data/unit/check-inference-context.test | 13 +++++++++++++ 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index 95700a52af02..056b2f7bd2c6 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -5195,7 +5195,8 @@ def visit_lambda_expr(self, e: LambdaExpr) -> Type: else: # Type context available. self.chk.return_types.append(inferred_type.ret_type) - self.chk.check_func_item(e, type_override=type_override) + with self.chk.tscope.function_scope(e): + self.chk.check_func_item(e, type_override=type_override) if not self.chk.has_type(e.expr()): # TODO: return expression must be accepted before exiting function scope. self.accept(e.expr(), allow_none_return=True) diff --git a/test-data/unit/check-inference-context.test b/test-data/unit/check-inference-context.test index a933acbf7f32..afe6548df2d4 100644 --- a/test-data/unit/check-inference-context.test +++ b/test-data/unit/check-inference-context.test @@ -1482,3 +1482,16 @@ b: Any i = i if isinstance(i, int) else b reveal_type(i) # N: Revealed type is "Union[Any, builtins.int]" [builtins fixtures/isinstance.pyi] + +[case testLambdaInferenceUsesNarrowedTypes] +from typing import Optional, Callable + +def f1(key: Callable[[], str]) -> None: ... +def f2(key: object) -> None: ... + +def g(b: Optional[str]) -> None: + if b: + f1(lambda: reveal_type(b)) # N: Revealed type is "builtins.str" + z: Callable[[], str] = lambda: reveal_type(b) # N: Revealed type is "builtins.str" + f2(lambda: reveal_type(b)) # N: Revealed type is "builtins.str" + lambda: reveal_type(b) # N: Revealed type is "builtins.str" From c22294a80b000ea673e407994ac5111644944486 Mon Sep 17 00:00:00 2001 From: robjhornby Date: Fri, 10 Nov 2023 00:59:16 +0000 Subject: [PATCH 142/144] Handle TypeVarTupleType when checking overload constraints (#16428) Fixes https://github.com/python/mypy/issues/16427 The test case added in the first commit crashes. The second commit addresses the crash - I don't know whether this fix is correct, it just happens to stop the crash but it leads to a code branch which just `continue`s out of a for loop iteration, so it might be bypassing something it shouldn't. I don't completely understand it. --------- Co-authored-by: Ivan Levkivskyi --- mypy/constraints.py | 2 +- test-data/unit/check-typevar-tuple.test | 18 ++++++++++++++++++ 2 files changed, 19 insertions(+), 1 deletion(-) diff --git a/mypy/constraints.py b/mypy/constraints.py index 49e542a49e56..88ede372e011 100644 --- a/mypy/constraints.py +++ b/mypy/constraints.py @@ -949,7 +949,7 @@ def visit_instance(self, template: Instance) -> list[Constraint]: for item in actual.items: if isinstance(item, UnpackType): unpacked = get_proper_type(item.type) - if isinstance(unpacked, TypeVarType): + if isinstance(unpacked, TypeVarTupleType): # Cannot infer anything for T from [T, ...] <: *Ts continue assert ( diff --git a/test-data/unit/check-typevar-tuple.test b/test-data/unit/check-typevar-tuple.test index e85863f0ed04..25babf442d21 100644 --- a/test-data/unit/check-typevar-tuple.test +++ b/test-data/unit/check-typevar-tuple.test @@ -1789,6 +1789,24 @@ def test(a: Container[Any], b: Container[int], c: Container[str]): reveal_type(build(b, c)) # N: Revealed type is "__main__.Array[builtins.int, builtins.str]" [builtins fixtures/tuple.pyi] +[case testTypeVarTupleOverloadArbitraryLength] +from typing import Any, Tuple, TypeVar, TypeVarTuple, Unpack, overload + +T = TypeVar("T") +Ts = TypeVarTuple("Ts") +@overload +def add(self: Tuple[Unpack[Ts]], other: Tuple[T]) -> Tuple[Unpack[Ts], T]: + ... +@overload +def add(self: Tuple[T, ...], other: Tuple[T, ...]) -> Tuple[T, ...]: + ... +def add(self: Any, other: Any) -> Any: + ... +def test(a: Tuple[int, str], b: Tuple[bool], c: Tuple[bool, ...]): + reveal_type(add(a, b)) # N: Revealed type is "Tuple[builtins.int, builtins.str, builtins.bool]" + reveal_type(add(b, c)) # N: Revealed type is "builtins.tuple[builtins.bool, ...]" +[builtins fixtures/tuple.pyi] + [case testTypeVarTupleIndexOldStyleNonNormalizedAndNonLiteral] from typing import Any, Tuple from typing_extensions import Unpack From 62bcae2d9bad12c5d3b5dda23dc031e1c7ddf136 Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Fri, 10 Nov 2023 11:32:05 +0000 Subject: [PATCH 143/144] Fix handling of tuple type context with unpacks (#16444) Fixes https://github.com/python/mypy/issues/16425 Fix is straightforward. --- mypy/checkexpr.py | 10 +++++----- test-data/unit/check-typevar-tuple.test | 10 ++++++++++ 2 files changed, 15 insertions(+), 5 deletions(-) diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index 056b2f7bd2c6..c87d1f6cd31c 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -4902,7 +4902,7 @@ def tuple_context_matches(self, expr: TupleExpr, ctx: TupleType) -> bool: return len([e for e in expr.items if not isinstance(e, StarExpr)]) <= len(ctx.items) # For variadic context, the only easy case is when structure matches exactly. # TODO: try using tuple type context in more cases. - if len([e for e in expr.items if not isinstance(e, StarExpr)]) != 1: + if len([e for e in expr.items if isinstance(e, StarExpr)]) != 1: return False expr_star_index = next(i for i, lv in enumerate(expr.items) if isinstance(lv, StarExpr)) return len(expr.items) == len(ctx.items) and ctx_unpack_index == expr_star_index @@ -4941,6 +4941,9 @@ def visit_tuple_expr(self, e: TupleExpr) -> Type: if type_context_items is not None: unpack_in_context = find_unpack_in_list(type_context_items) is not None seen_unpack_in_items = False + allow_precise_tuples = ( + unpack_in_context or PRECISE_TUPLE_TYPES in self.chk.options.enable_incomplete_feature + ) # Infer item types. Give up if there's a star expression # that's not a Tuple. @@ -4981,10 +4984,7 @@ def visit_tuple_expr(self, e: TupleExpr) -> Type: # result in an error later, just do something predictable here. j += len(tt.items) else: - if ( - PRECISE_TUPLE_TYPES in self.chk.options.enable_incomplete_feature - and not seen_unpack_in_items - ): + if allow_precise_tuples and not seen_unpack_in_items: # Handle (x, *y, z), where y is e.g. tuple[Y, ...]. if isinstance(tt, Instance) and self.chk.type_is_iterable(tt): item_type = self.chk.iterable_item_type(tt, e) diff --git a/test-data/unit/check-typevar-tuple.test b/test-data/unit/check-typevar-tuple.test index 25babf442d21..487f22699724 100644 --- a/test-data/unit/check-typevar-tuple.test +++ b/test-data/unit/check-typevar-tuple.test @@ -2225,3 +2225,13 @@ bar(*keys, 1) # OK reveal_type(baz(keys, 1)) # N: Revealed type is "builtins.object" reveal_type(baz(*keys, 1)) # N: Revealed type is "builtins.int" [builtins fixtures/tuple.pyi] + +[case testVariadicTupleContextNoCrash] +from typing import Tuple, Unpack + +x: Tuple[int, Unpack[Tuple[int, ...]]] = () # E: Incompatible types in assignment (expression has type "Tuple[()]", variable has type "Tuple[int, Unpack[Tuple[int, ...]]]") +y: Tuple[int, Unpack[Tuple[int, ...]]] = (1, 2) +z: Tuple[int, Unpack[Tuple[int, ...]]] = (1,) +w: Tuple[int, Unpack[Tuple[int, ...]]] = (1, *[2, 3, 4]) +t: Tuple[int, Unpack[Tuple[int, ...]]] = (1, *(2, 3, 4)) +[builtins fixtures/tuple.pyi] From f6b9972329d5d68f6defc92a10cc4c3bc339c27b Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Fri, 10 Nov 2023 12:01:51 +0000 Subject: [PATCH 144/144] Remove +dev from version --- mypy/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mypy/version.py b/mypy/version.py index 7cfc68d6e553..c75c9e67a3e1 100644 --- a/mypy/version.py +++ b/mypy/version.py @@ -8,7 +8,7 @@ # - Release versions have the form "1.2.3". # - Dev versions have the form "1.2.3+dev" (PLUS sign to conform to PEP 440). # - Before 1.0 we had the form "0.NNN". -__version__ = "1.7.0+dev" +__version__ = "1.7.0" base_version = __version__ mypy_dir = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))