From d25f0741e015eb01d360221f0a6565035d3446dc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=A9rome=20Perrin?= Date: Fri, 13 Dec 2019 05:08:56 +0900 Subject: [PATCH 001/117] stubgen: fix return type of annotated __init__ (#8135) When `__init__` had partial type annotations, for parameters but not for the return type, the return type was annotated as Any, which is reported as an error by mypy when checking the generated stubs. Make sure to annotate the return value as None also in the case of existing type annotations. Fixes #8110 --- mypy/stubgen.py | 4 ++-- test-data/unit/stubgen.test | 8 ++++++++ 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/mypy/stubgen.py b/mypy/stubgen.py index 1b59061b237f..b86da770b4d3 100755 --- a/mypy/stubgen.py +++ b/mypy/stubgen.py @@ -602,14 +602,14 @@ def visit_func_def(self, o: FuncDef, is_abstract: bool = False) -> None: arg = name + annotation args.append(arg) retname = None - if isinstance(o.unanalyzed_type, CallableType): + if o.name != '__init__' and isinstance(o.unanalyzed_type, CallableType): retname = self.print_annotation(o.unanalyzed_type.ret_type) elif isinstance(o, FuncDef) and (o.is_abstract or o.name in METHODS_WITH_RETURN_VALUE): # Always assume abstract methods return Any unless explicitly annotated. Also # some dunder methods should not have a None return type. retname = self.typing_name('Any') self.add_typing_import("Any") - elif o.name == '__init__' or not has_return_statement(o) and not is_abstract: + elif not has_return_statement(o) and not is_abstract: retname = 'None' retfield = '' if retname is not None: diff --git a/test-data/unit/stubgen.test b/test-data/unit/stubgen.test index f201e3dcb3f7..6a3adf3d699b 100644 --- a/test-data/unit/stubgen.test +++ b/test-data/unit/stubgen.test @@ -163,6 +163,14 @@ class C: class C: x: int = ... +[case testInitTypeAnnotationPreserved] +class C: + def __init__(self, x: str): + pass +[out] +class C: + def __init__(self, x: str) -> None: ... + [case testSelfAssignment] class C: def __init__(self): From 24f562a6c207bfc69c08b536c7a1020dfa0329f9 Mon Sep 17 00:00:00 2001 From: HeShan Date: Fri, 13 Dec 2019 07:01:39 +0800 Subject: [PATCH 002/117] Ignore files passed to dmypy start (#8133) Fixes #8088 --- mypy/dmypy_server.py | 8 +++----- test-data/unit/daemon.test | 7 +++++++ 2 files changed, 10 insertions(+), 5 deletions(-) diff --git a/mypy/dmypy_server.py b/mypy/dmypy_server.py index 57aeee52a7b9..4899acd8f0a6 100644 --- a/mypy/dmypy_server.py +++ b/mypy/dmypy_server.py @@ -131,11 +131,9 @@ def daemonize(options: Options, def process_start_options(flags: List[str], allow_sources: bool) -> Options: - sources, options = mypy.main.process_options(['-i'] + flags, - require_targets=False, - server_options=True) - if sources and not allow_sources: - sys.exit("dmypy: start/restart does not accept sources") + _, options = mypy.main.process_options( + ['-i'] + flags, require_targets=False, server_options=True + ) if options.report_dirs: sys.exit("dmypy: start/restart cannot generate reports") if options.junit_xml: diff --git a/test-data/unit/daemon.test b/test-data/unit/daemon.test index db985864354d..d7dad66b5ef3 100644 --- a/test-data/unit/daemon.test +++ b/test-data/unit/daemon.test @@ -28,6 +28,13 @@ Daemon stopped [file foo.py] def f(): pass +[case testDaemonIgnoreConfigFiles] +$ dmypy start -- --follow-imports=error +Daemon started +[file mypy.ini] +\[mypy] +files = ./foo.py + [case testDaemonRunRestart] $ dmypy run -- foo.py --follow-imports=error Daemon started From c957ac8d6b448dccd88889fa833aab3ae7be8a5e Mon Sep 17 00:00:00 2001 From: lazytype Date: Fri, 13 Dec 2019 15:19:31 -0500 Subject: [PATCH 003/117] Exclude "__order__" attribute from Enum Union expansion (#8140) --- mypy/checkmember.py | 6 ++++++ mypyc/genops.py | 4 ++-- test-data/unit/check-enum.test | 34 ++++++++++++++++++++++++++++++++++ 3 files changed, 42 insertions(+), 2 deletions(-) diff --git a/mypy/checkmember.py b/mypy/checkmember.py index c0f6ce2081f2..d0c705e2d7b3 100644 --- a/mypy/checkmember.py +++ b/mypy/checkmember.py @@ -705,6 +705,12 @@ def analyze_class_attribute_access(itype: Instance, check_final_member(name, info, mx.msg, mx.context) if info.is_enum and not (mx.is_lvalue or is_decorated or is_method): + # Skip "_order_" and "__order__", since Enum will remove it + if name in ("_order_", "__order__"): + return mx.msg.has_no_attr( + mx.original_type, itype, name, mx.context, mx.module_symbol_table + ) + enum_literal = LiteralType(name, fallback=itype) # When we analyze enums, the corresponding Instance is always considered to be erased # due to how the signature of Enum.__new__ is `(cls: Type[_T], value: object) -> _T` diff --git a/mypyc/genops.py b/mypyc/genops.py index 61b6ec9e1f0d..6f16b270d694 100644 --- a/mypyc/genops.py +++ b/mypyc/genops.py @@ -1445,8 +1445,8 @@ def add_non_ext_class_attr(self, non_ext: NonExtClassInfo, lvalue: NameExpr, if ( cdef.info.bases and cdef.info.bases[0].type.fullname == 'enum.Enum' - # Skip "_order_", since Enum will remove it - and lvalue.name != '_order_' + # Skip "_order_" and "__order__", since Enum will remove it + and lvalue.name not in ('_order_', '__order__') ): attr_to_cache.append(lvalue) diff --git a/test-data/unit/check-enum.test b/test-data/unit/check-enum.test index 5e5eae417d26..81715b3eec54 100644 --- a/test-data/unit/check-enum.test +++ b/test-data/unit/check-enum.test @@ -659,6 +659,40 @@ else: reveal_type(y) # No output here: this branch is unreachable [builtins fixtures/bool.pyi] +[case testEnumReachabilityChecksWithOrdering] +from enum import Enum +from typing_extensions import Literal + +class Foo(Enum): + _order_ = "A B" + A = 1 + B = 2 + +Foo._order_ # E: "Type[Foo]" has no attribute "_order_" + +x: Literal[Foo.A, Foo.B] +if x is Foo.A: + reveal_type(x) # N: Revealed type is 'Literal[__main__.Foo.A]' +elif x is Foo.B: + reveal_type(x) # N: Revealed type is 'Literal[__main__.Foo.B]' +else: + reveal_type(x) # No output here: this branch is unreachable + +class Bar(Enum): + __order__ = "A B" + A = 1 + B = 2 + +Bar.__order__ # E: "Type[Bar]" has no attribute "__order__" + +y: Literal[Bar.A, Bar.B] +if y is Bar.A: + reveal_type(y) # N: Revealed type is 'Literal[__main__.Bar.A]' +elif y is Bar.B: + reveal_type(y) # N: Revealed type is 'Literal[__main__.Bar.B]' +else: + reveal_type(y) # No output here: this branch is unreachable + [case testEnumReachabilityChecksIndirect] from enum import Enum from typing_extensions import Literal, Final From 8a08e78ff58d94d5376d856c75793dd0b05eda24 Mon Sep 17 00:00:00 2001 From: Xuanda Yang Date: Sat, 14 Dec 2019 04:56:07 +0800 Subject: [PATCH 004/117] Fix incorrect type inference when inherited from Any class (#8019) Fixes #8001 --- mypy/checkexpr.py | 8 ++++++++ test-data/unit/check-classes.test | 21 +++++++++++++++++++++ 2 files changed, 29 insertions(+) diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index 8c05a0dff9a9..35c58478ce1e 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -2470,6 +2470,14 @@ def lookup_definer(typ: Instance, attr_name: str) -> Optional[str]: else: return result + # We finish invoking above operators and no early return happens. Therefore, + # we check if either the LHS or the RHS is Instance and fallbacks to Any, + # if so, we also return Any + if ((isinstance(left_type, Instance) and left_type.type.fallback_to_any) or + (isinstance(right_type, Instance) and right_type.type.fallback_to_any)): + any_type = AnyType(TypeOfAny.special_form) + return any_type, any_type + # STEP 4b: # Sometimes, the variants list is empty. In that case, we fall-back to attempting to # call the __op__ method (even though it's missing). diff --git a/test-data/unit/check-classes.test b/test-data/unit/check-classes.test index 557e992ef1dd..6681d30df76a 100644 --- a/test-data/unit/check-classes.test +++ b/test-data/unit/check-classes.test @@ -6575,3 +6575,24 @@ class C: from typing import Callable class C: x: Callable[[C], int] = lambda x: x.y.g() # E: "C" has no attribute "y" + +[case testOpWithInheritedFromAny] +from typing import Any +C: Any +class D(C): + pass + +class D1(C): + def __add__(self, rhs: float) -> D1: + return self + +reveal_type(0.5 + C) # N: Revealed type is 'Any' + +reveal_type(0.5 + D()) # N: Revealed type is 'Any' +reveal_type(D() + 0.5) # N: Revealed type is 'Any' +reveal_type("str" + D()) # N: Revealed type is 'builtins.str' +reveal_type(D() + "str") # N: Revealed type is 'Any' + + +reveal_type(0.5 + D1()) # N: Revealed type is 'Any' +reveal_type(D1() + 0.5) # N: Revealed type is '__main__.D1' From a918ce82301fc1b81f535715aa849d54f2478974 Mon Sep 17 00:00:00 2001 From: Anthony Sottile Date: Fri, 13 Dec 2019 17:47:09 -0800 Subject: [PATCH 005/117] Fix mypy pretty output within a pty (#8145) Resolves #8144 --- mypy/util.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/mypy/util.py b/mypy/util.py index 8f9448bcff4c..84859fa94e70 100644 --- a/mypy/util.py +++ b/mypy/util.py @@ -424,9 +424,12 @@ def get_terminal_width() -> int: """Get current terminal width if possible, otherwise return the default one.""" try: cols, _ = os.get_terminal_size() - return cols except OSError: return DEFAULT_COLUMNS + else: + if cols == 0: + return DEFAULT_COLUMNS + return cols def soft_wrap(msg: str, max_len: int, first_offset: int, From da8880e3a309e982acec7a1677203442dafd864e Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Mon, 16 Dec 2019 18:09:51 +0000 Subject: [PATCH 006/117] [mypyc] Force type error helper to not be inlined (#8154) On macOS, this made the generated binary about 8% smaller (when using -O3). --- mypyc/lib-rt/CPy.h | 1 + 1 file changed, 1 insertion(+) diff --git a/mypyc/lib-rt/CPy.h b/mypyc/lib-rt/CPy.h index 8379486355c8..fd0995149d21 100644 --- a/mypyc/lib-rt/CPy.h +++ b/mypyc/lib-rt/CPy.h @@ -1062,6 +1062,7 @@ static PyObject *CPy_FormatTypeName(PyObject *value) { return output; } +CPy_NOINLINE static void CPy_TypeError(const char *expected, PyObject *value) { PyObject *out = CPy_FormatTypeName(value); if (out) { From 5fd431f8f93b2d51ef102cc41966d5e89767e86d Mon Sep 17 00:00:00 2001 From: lazytype Date: Tue, 17 Dec 2019 13:33:29 -0500 Subject: [PATCH 007/117] Actually exclude "__order__" attribute from Enum Union expansion (#8160) My previous attempt didn't actually work as advertised as it was lacking complete test coverage. It merely handled the case `Bar.__order__ # E: "Type[Bar]" has no attribute "__order__"` This PR adds more tests and implements the desired behavior this time. --- mypy/typeops.py | 3 +++ test-data/unit/check-enum.test | 16 ++++++++++++++++ 2 files changed, 19 insertions(+) diff --git a/mypy/typeops.py b/mypy/typeops.py index c068d4efcd4a..266a0fa0bb88 100644 --- a/mypy/typeops.py +++ b/mypy/typeops.py @@ -623,6 +623,9 @@ class Status(Enum): for name, symbol in typ.type.names.items(): if not isinstance(symbol.node, Var): continue + # Skip "_order_" and "__order__", since Enum will remove it + if name in ("_order_", "__order__"): + continue new_items.append(LiteralType(name, typ)) # SymbolTables are really just dicts, and dicts are guaranteed to preserve # insertion order only starting with Python 3.7. So, we sort these for older diff --git a/test-data/unit/check-enum.test b/test-data/unit/check-enum.test index 81715b3eec54..27979af0ee21 100644 --- a/test-data/unit/check-enum.test +++ b/test-data/unit/check-enum.test @@ -693,6 +693,22 @@ elif y is Bar.B: else: reveal_type(y) # No output here: this branch is unreachable +x2: Foo +if x2 is Foo.A: + reveal_type(x2) # N: Revealed type is 'Literal[__main__.Foo.A]' +elif x2 is Foo.B: + reveal_type(x2) # N: Revealed type is 'Literal[__main__.Foo.B]' +else: + reveal_type(x2) # No output here: this branch is unreachable + +y2: Bar +if y2 is Bar.A: + reveal_type(y2) # N: Revealed type is 'Literal[__main__.Bar.A]' +elif y2 is Bar.B: + reveal_type(y2) # N: Revealed type is 'Literal[__main__.Bar.B]' +else: + reveal_type(y2) # No output here: this branch is unreachable + [case testEnumReachabilityChecksIndirect] from enum import Enum from typing_extensions import Literal, Final From 5f164167b874e132b1c72e9c3b9579642f0b911b Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Tue, 17 Dec 2019 23:34:13 +0000 Subject: [PATCH 008/117] Bump version to 0.770+dev (#8163) --- mypy/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mypy/version.py b/mypy/version.py index cb5547279b0d..81a8cfca378b 100644 --- a/mypy/version.py +++ b/mypy/version.py @@ -5,7 +5,7 @@ # - Release versions have the form "0.NNN". # - Dev versions have the form "0.NNN+dev" (PLUS sign to conform to PEP 440). # - For 1.0 we'll switch back to 1.2.3 form. -__version__ = '0.760+dev' +__version__ = '0.770+dev' base_version = __version__ mypy_dir = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) From c1cd529181ef4086b03ea14fb22163aeb5b005a4 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Wed, 18 Dec 2019 16:45:45 +0000 Subject: [PATCH 009/117] Support type inference for defaultdict() (#8167) This allows inferring type of `x`, for example: ``` from collections import defaultdict x = defaultdict(list) # defaultdict[str, List[int]] x['foo'].append(1) ``` The implemention is not pretty and we have probably reached about the maximum reasonable level of special casing in type inference now. There is a hack to work around the problem with leaking type variable types in nested generics calls (I think). This will break some (likely very rare) use cases. --- mypy/checker.py | 53 ++++++++-- mypy/checkexpr.py | 125 +++++++++++++++++------- mypy/types.py | 7 +- mypyc/test-data/fixtures/ir.py | 3 + test-data/unit/check-inference.test | 94 ++++++++++++++++++ test-data/unit/fixtures/dict.pyi | 1 + test-data/unit/lib-stub/collections.pyi | 12 ++- test-data/unit/python2eval.test | 6 +- 8 files changed, 250 insertions(+), 51 deletions(-) diff --git a/mypy/checker.py b/mypy/checker.py index 9a826cd41496..4b98ed2937ed 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -2813,7 +2813,8 @@ def infer_partial_type(self, name: Var, lvalue: Lvalue, init_type: Type) -> bool partial_type = PartialType(None, name) elif isinstance(init_type, Instance): fullname = init_type.type.fullname - if (isinstance(lvalue, (NameExpr, MemberExpr)) and + is_ref = isinstance(lvalue, RefExpr) + if (is_ref and (fullname == 'builtins.list' or fullname == 'builtins.set' or fullname == 'builtins.dict' or @@ -2821,6 +2822,17 @@ def infer_partial_type(self, name: Var, lvalue: Lvalue, init_type: Type) -> bool all(isinstance(t, (NoneType, UninhabitedType)) for t in get_proper_types(init_type.args))): partial_type = PartialType(init_type.type, name) + elif is_ref and fullname == 'collections.defaultdict': + arg0 = get_proper_type(init_type.args[0]) + arg1 = get_proper_type(init_type.args[1]) + if (isinstance(arg0, (NoneType, UninhabitedType)) and + isinstance(arg1, Instance) and + self.is_valid_defaultdict_partial_value_type(arg1)): + # Erase type argument, if one exists (this fills in Anys) + arg1 = self.named_type(arg1.type.fullname) + partial_type = PartialType(init_type.type, name, arg1) + else: + return False else: return False else: @@ -2829,6 +2841,28 @@ def infer_partial_type(self, name: Var, lvalue: Lvalue, init_type: Type) -> bool self.partial_types[-1].map[name] = lvalue return True + def is_valid_defaultdict_partial_value_type(self, t: Instance) -> bool: + """Check if t can be used as the basis for a partial defaultddict value type. + + Examples: + + * t is 'int' --> True + * t is 'list[]' --> True + * t is 'dict[...]' --> False (only generic types with a single type + argument supported) + """ + if len(t.args) == 0: + return True + if len(t.args) == 1: + arg = get_proper_type(t.args[0]) + # TODO: This is too permissive -- we only allow TypeVarType since + # they leak in cases like defaultdict(list) due to a bug. + # This can result in incorrect types being inferred, but only + # in rare cases. + if isinstance(arg, (TypeVarType, UninhabitedType, NoneType)): + return True + return False + def set_inferred_type(self, var: Var, lvalue: Lvalue, type: Type) -> None: """Store inferred variable type. @@ -3018,16 +3052,21 @@ def try_infer_partial_type_from_indexed_assignment( if partial_types is None: return typename = type_type.fullname - if typename == 'builtins.dict' or typename == 'collections.OrderedDict': + if (typename == 'builtins.dict' + or typename == 'collections.OrderedDict' + or typename == 'collections.defaultdict'): # TODO: Don't infer things twice. key_type = self.expr_checker.accept(lvalue.index) value_type = self.expr_checker.accept(rvalue) if (is_valid_inferred_type(key_type) and - is_valid_inferred_type(value_type)): - if not self.current_node_deferred: - var.type = self.named_generic_type(typename, - [key_type, value_type]) - del partial_types[var] + is_valid_inferred_type(value_type) and + not self.current_node_deferred and + not (typename == 'collections.defaultdict' and + var.type.value_type is not None and + not is_equivalent(value_type, var.type.value_type))): + var.type = self.named_generic_type(typename, + [key_type, value_type]) + del partial_types[var] def visit_expression_stmt(self, s: ExpressionStmt) -> None: self.expr_checker.accept(s.expr, allow_none_return=True, always_allow_any=True) diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index 35c58478ce1e..0e5b42abde0a 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -567,42 +567,91 @@ def get_partial_self_var(self, expr: MemberExpr) -> Optional[Var]: } # type: ClassVar[Dict[str, Dict[str, List[str]]]] def try_infer_partial_type(self, e: CallExpr) -> None: - if isinstance(e.callee, MemberExpr) and isinstance(e.callee.expr, RefExpr): - var = e.callee.expr.node - if var is None and isinstance(e.callee.expr, MemberExpr): - var = self.get_partial_self_var(e.callee.expr) - if not isinstance(var, Var): + """Try to make partial type precise from a call.""" + if not isinstance(e.callee, MemberExpr): + return + callee = e.callee + if isinstance(callee.expr, RefExpr): + # Call a method with a RefExpr callee, such as 'x.method(...)'. + ret = self.get_partial_var(callee.expr) + if ret is None: return - partial_types = self.chk.find_partial_types(var) - if partial_types is not None and not self.chk.current_node_deferred: - partial_type = var.type - if (partial_type is None or - not isinstance(partial_type, PartialType) or - partial_type.type is None): - # A partial None type -> can't infer anything. - return - typename = partial_type.type.fullname - methodname = e.callee.name - # Sometimes we can infer a full type for a partial List, Dict or Set type. - # TODO: Don't infer argument expression twice. - if (typename in self.item_args and methodname in self.item_args[typename] - and e.arg_kinds == [ARG_POS]): - item_type = self.accept(e.args[0]) - if mypy.checker.is_valid_inferred_type(item_type): - var.type = self.chk.named_generic_type(typename, [item_type]) - del partial_types[var] - elif (typename in self.container_args - and methodname in self.container_args[typename] - and e.arg_kinds == [ARG_POS]): - arg_type = get_proper_type(self.accept(e.args[0])) - if isinstance(arg_type, Instance): - arg_typename = arg_type.type.fullname - if arg_typename in self.container_args[typename][methodname]: - if all(mypy.checker.is_valid_inferred_type(item_type) - for item_type in arg_type.args): - var.type = self.chk.named_generic_type(typename, - list(arg_type.args)) - del partial_types[var] + var, partial_types = ret + typ = self.try_infer_partial_value_type_from_call(e, callee.name, var) + if typ is not None: + var.type = typ + del partial_types[var] + elif isinstance(callee.expr, IndexExpr) and isinstance(callee.expr.base, RefExpr): + # Call 'x[y].method(...)'; may infer type of 'x' if it's a partial defaultdict. + if callee.expr.analyzed is not None: + return # A special form + base = callee.expr.base + index = callee.expr.index + ret = self.get_partial_var(base) + if ret is None: + return + var, partial_types = ret + partial_type = get_partial_instance_type(var.type) + if partial_type is None or partial_type.value_type is None: + return + value_type = self.try_infer_partial_value_type_from_call(e, callee.name, var) + if value_type is not None: + # Infer key type. + key_type = self.accept(index) + if mypy.checker.is_valid_inferred_type(key_type): + # Store inferred partial type. + assert partial_type.type is not None + typename = partial_type.type.fullname + var.type = self.chk.named_generic_type(typename, + [key_type, value_type]) + del partial_types[var] + + def get_partial_var(self, ref: RefExpr) -> Optional[Tuple[Var, Dict[Var, Context]]]: + var = ref.node + if var is None and isinstance(ref, MemberExpr): + var = self.get_partial_self_var(ref) + if not isinstance(var, Var): + return None + partial_types = self.chk.find_partial_types(var) + if partial_types is None: + return None + return var, partial_types + + def try_infer_partial_value_type_from_call( + self, + e: CallExpr, + methodname: str, + var: Var) -> Optional[Instance]: + """Try to make partial type precise from a call such as 'x.append(y)'.""" + if self.chk.current_node_deferred: + return None + partial_type = get_partial_instance_type(var.type) + if partial_type is None: + return None + if partial_type.value_type: + typename = partial_type.value_type.type.fullname + else: + assert partial_type.type is not None + typename = partial_type.type.fullname + # Sometimes we can infer a full type for a partial List, Dict or Set type. + # TODO: Don't infer argument expression twice. + if (typename in self.item_args and methodname in self.item_args[typename] + and e.arg_kinds == [ARG_POS]): + item_type = self.accept(e.args[0]) + if mypy.checker.is_valid_inferred_type(item_type): + return self.chk.named_generic_type(typename, [item_type]) + elif (typename in self.container_args + and methodname in self.container_args[typename] + and e.arg_kinds == [ARG_POS]): + arg_type = get_proper_type(self.accept(e.args[0])) + if isinstance(arg_type, Instance): + arg_typename = arg_type.type.fullname + if arg_typename in self.container_args[typename][methodname]: + if all(mypy.checker.is_valid_inferred_type(item_type) + for item_type in arg_type.args): + return self.chk.named_generic_type(typename, + list(arg_type.args)) + return None def apply_function_plugin(self, callee: CallableType, @@ -4299,3 +4348,9 @@ def is_operator_method(fullname: Optional[str]) -> bool: short_name in nodes.op_methods.values() or short_name in nodes.reverse_op_methods.values() or short_name in nodes.unary_op_methods.values()) + + +def get_partial_instance_type(t: Optional[Type]) -> Optional[PartialType]: + if t is None or not isinstance(t, PartialType) or t.type is None: + return None + return t diff --git a/mypy/types.py b/mypy/types.py index ae678acedb3a..40b8d311d5cd 100644 --- a/mypy/types.py +++ b/mypy/types.py @@ -1763,13 +1763,18 @@ class PartialType(ProperType): # None for the 'None' partial type; otherwise a generic class type = None # type: Optional[mypy.nodes.TypeInfo] var = None # type: mypy.nodes.Var + # For partial defaultdict[K, V], the type V (K is unknown). If V is generic, + # the type argument is Any and will be replaced later. + value_type = None # type: Optional[Instance] def __init__(self, type: 'Optional[mypy.nodes.TypeInfo]', - var: 'mypy.nodes.Var') -> None: + var: 'mypy.nodes.Var', + value_type: 'Optional[Instance]' = None) -> None: super().__init__() self.type = type self.var = var + self.value_type = value_type def accept(self, visitor: 'TypeVisitor[T]') -> T: return visitor.visit_partial_type(self) diff --git a/mypyc/test-data/fixtures/ir.py b/mypyc/test-data/fixtures/ir.py index c7a1b35c7cbe..323800429522 100644 --- a/mypyc/test-data/fixtures/ir.py +++ b/mypyc/test-data/fixtures/ir.py @@ -26,6 +26,9 @@ class ellipsis: pass # Primitive types are special in generated code. class int: + @overload + def __init__(self) -> None: pass + @overload def __init__(self, x: object, base: int = 10) -> None: pass def __add__(self, n: int) -> int: pass def __sub__(self, n: int) -> int: pass diff --git a/test-data/unit/check-inference.test b/test-data/unit/check-inference.test index 91b3a93506f5..19d1554c5ef6 100644 --- a/test-data/unit/check-inference.test +++ b/test-data/unit/check-inference.test @@ -2976,3 +2976,97 @@ x: Optional[str] y = filter(None, [x]) reveal_type(y) # N: Revealed type is 'builtins.list[builtins.str*]' [builtins fixtures/list.pyi] + +[case testPartialDefaultDict] +from collections import defaultdict +x = defaultdict(int) +x[''] = 1 +reveal_type(x) # N: Revealed type is 'collections.defaultdict[builtins.str, builtins.int]' + +y = defaultdict(int) # E: Need type annotation for 'y' + +z = defaultdict(int) # E: Need type annotation for 'z' +z[''] = '' +reveal_type(z) # N: Revealed type is 'collections.defaultdict[Any, Any]' +[builtins fixtures/dict.pyi] + +[case testPartialDefaultDictInconsistentValueTypes] +from collections import defaultdict +a = defaultdict(int) # E: Need type annotation for 'a' +a[''] = '' +a[''] = 1 +reveal_type(a) # N: Revealed type is 'collections.defaultdict[builtins.str, builtins.int]' +[builtins fixtures/dict.pyi] + +[case testPartialDefaultDictListValue] +# flags: --no-strict-optional +from collections import defaultdict +a = defaultdict(list) +a['x'].append(1) +reveal_type(a) # N: Revealed type is 'collections.defaultdict[builtins.str, builtins.list[builtins.int]]' + +b = defaultdict(lambda: []) +b[1].append('x') +reveal_type(b) # N: Revealed type is 'collections.defaultdict[builtins.int, builtins.list[builtins.str]]' +[builtins fixtures/dict.pyi] + +[case testPartialDefaultDictListValueStrictOptional] +# flags: --strict-optional +from collections import defaultdict +a = defaultdict(list) +a['x'].append(1) +reveal_type(a) # N: Revealed type is 'collections.defaultdict[builtins.str, builtins.list[builtins.int]]' + +b = defaultdict(lambda: []) +b[1].append('x') +reveal_type(b) # N: Revealed type is 'collections.defaultdict[builtins.int, builtins.list[builtins.str]]' +[builtins fixtures/dict.pyi] + +[case testPartialDefaultDictSpecialCases] +from collections import defaultdict +class A: + def f(self) -> None: + self.x = defaultdict(list) + self.x['x'].append(1) + reveal_type(self.x) # N: Revealed type is 'collections.defaultdict[builtins.str, builtins.list[builtins.int]]' + self.y = defaultdict(list) # E: Need type annotation for 'y' + s = self + s.y['x'].append(1) + +x = {} # E: Need type annotation for 'x' (hint: "x: Dict[, ] = ...") +x['x'].append(1) + +y = defaultdict(list) # E: Need type annotation for 'y' +y[[]].append(1) +[builtins fixtures/dict.pyi] + +[case testPartialDefaultDictSpecialCases2] +from collections import defaultdict + +x = defaultdict(lambda: [1]) # E: Need type annotation for 'x' +x[1].append('') # E: Argument 1 to "append" of "list" has incompatible type "str"; expected "int" +reveal_type(x) # N: Revealed type is 'collections.defaultdict[Any, builtins.list[builtins.int]]' + +xx = defaultdict(lambda: {'x': 1}) # E: Need type annotation for 'xx' +xx[1]['z'] = 3 +reveal_type(xx) # N: Revealed type is 'collections.defaultdict[Any, builtins.dict[builtins.str, builtins.int]]' + +y = defaultdict(dict) # E: Need type annotation for 'y' +y['x'][1] = [3] + +z = defaultdict(int) # E: Need type annotation for 'z' +z[1].append('') +reveal_type(z) # N: Revealed type is 'collections.defaultdict[Any, Any]' +[builtins fixtures/dict.pyi] + +[case testPartialDefaultDictSpecialCase3] +from collections import defaultdict + +x = defaultdict(list) +x['a'] = [1, 2, 3] +reveal_type(x) # N: Revealed type is 'collections.defaultdict[builtins.str, builtins.list[builtins.int*]]' + +y = defaultdict(list) # E: Need type annotation for 'y' +y['a'] = [] +reveal_type(y) # N: Revealed type is 'collections.defaultdict[Any, Any]' +[builtins fixtures/dict.pyi] diff --git a/test-data/unit/fixtures/dict.pyi b/test-data/unit/fixtures/dict.pyi index 9e7970b34705..99c950d8fc9f 100644 --- a/test-data/unit/fixtures/dict.pyi +++ b/test-data/unit/fixtures/dict.pyi @@ -42,6 +42,7 @@ class list(Sequence[T]): # needed by some test cases def __iter__(self) -> Iterator[T]: pass def __mul__(self, x: int) -> list[T]: pass def __contains__(self, item: object) -> bool: pass + def append(self, item: T) -> None: pass class tuple(Generic[T]): pass class function: pass diff --git a/test-data/unit/lib-stub/collections.pyi b/test-data/unit/lib-stub/collections.pyi index c93fea198ebf..c5b5ef0504e6 100644 --- a/test-data/unit/lib-stub/collections.pyi +++ b/test-data/unit/lib-stub/collections.pyi @@ -1,4 +1,4 @@ -from typing import Any, Iterable, Union, Optional, Dict, TypeVar +from typing import Any, Iterable, Union, Optional, Dict, TypeVar, overload, Optional, Callable def namedtuple( typename: str, @@ -10,8 +10,10 @@ def namedtuple( defaults: Optional[Iterable[Any]] = ... ) -> Any: ... -K = TypeVar('K') -V = TypeVar('V') +KT = TypeVar('KT') +VT = TypeVar('VT') -class OrderedDict(Dict[K, V]): - def __setitem__(self, k: K, v: V) -> None: ... +class OrderedDict(Dict[KT, VT]): ... + +class defaultdict(Dict[KT, VT]): + def __init__(self, default_factory: Optional[Callable[[], VT]]) -> None: ... diff --git a/test-data/unit/python2eval.test b/test-data/unit/python2eval.test index 2267cadb1a08..93fe668a8b81 100644 --- a/test-data/unit/python2eval.test +++ b/test-data/unit/python2eval.test @@ -420,11 +420,11 @@ if MYPY: x = b'abc' [out] -[case testNestedGenericFailedInference] +[case testDefaultDictInference] from collections import defaultdict def foo() -> None: - x = defaultdict(list) # type: ignore + x = defaultdict(list) x['lol'].append(10) reveal_type(x) [out] -_testNestedGenericFailedInference.py:5: note: Revealed type is 'collections.defaultdict[Any, builtins.list[Any]]' +_testDefaultDictInference.py:5: note: Revealed type is 'collections.defaultdict[builtins.str, builtins.list[builtins.int]]' From 09322614e69be51f5ad67cca46fa42edda319e15 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Wed, 18 Dec 2019 17:36:43 +0000 Subject: [PATCH 010/117] Remove dead code (#8168) The `in` operator is handled by `visit_comparison_expr`. --- mypy/checkexpr.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index 0e5b42abde0a..4a3da6a5c4a6 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -2070,10 +2070,6 @@ def visit_ellipsis(self, e: EllipsisExpr) -> Type: def visit_op_expr(self, e: OpExpr) -> Type: """Type check a binary operator expression.""" - if e.op == 'in': - self.accept(e.right) - self.accept(e.left) - return self.bool_type() if e.op == 'and' or e.op == 'or': return self.check_boolean_op(e, e) if e.op == '*' and isinstance(e.left, ListExpr): From e0281be028f74a7c3e0d912dd891eb8d0854b78a Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Wed, 18 Dec 2019 18:32:25 +0000 Subject: [PATCH 011/117] Infer type from "x.extend(y)" where y has type Any (etc.) (#8169) Previously we inferred a type from an argument like `List[Any]` but not from plain `Any`, which was inconsistent. --- mypy/checkexpr.py | 3 +++ test-data/unit/check-inference.test | 15 +++++++++++++++ 2 files changed, 18 insertions(+) diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index 4a3da6a5c4a6..58afc48e3a95 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -651,6 +651,9 @@ def try_infer_partial_value_type_from_call( for item_type in arg_type.args): return self.chk.named_generic_type(typename, list(arg_type.args)) + elif isinstance(arg_type, AnyType): + return self.chk.named_type(typename) + return None def apply_function_plugin(self, diff --git a/test-data/unit/check-inference.test b/test-data/unit/check-inference.test index 19d1554c5ef6..2cd51dd619bf 100644 --- a/test-data/unit/check-inference.test +++ b/test-data/unit/check-inference.test @@ -1723,6 +1723,21 @@ class C: self.a = 1 reveal_type(C().a) # N: Revealed type is 'Union[builtins.int, None]' +[case testInferListTypeFromEmptyListAndAny] +def f(): + return [] + +def g() -> None: + x = [] + if bool(): + x = f() + reveal_type(x) # N: Revealed type is 'builtins.list[Any]' + + y = [] + y.extend(f()) + reveal_type(y) # N: Revealed type is 'builtins.list[Any]' +[builtins fixtures/list.pyi] + -- Inferring types of variables first initialized to None (partial types) -- ---------------------------------------------------------------------- From 3db05b27a02a6af4ffbdff1695ca990e0393d1fd Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Wed, 18 Dec 2019 19:58:37 +0000 Subject: [PATCH 012/117] Attempt to fix a crash related to partial defaultdict types (#8170) It seems that in some cases `named_type` can fail, so instead I erase the type using `erase_type`. I don't have a simplified repro, but I verified that this fixes a crash in an internal codebase. --- mypy/checker.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/mypy/checker.py b/mypy/checker.py index 4b98ed2937ed..f17ca5240b71 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -63,7 +63,7 @@ from mypy.typevars import fill_typevars, has_no_typevars, fill_typevars_with_any from mypy.semanal import set_callable_name, refers_to_fullname from mypy.mro import calculate_mro -from mypy.erasetype import erase_typevars, remove_instance_last_known_values +from mypy.erasetype import erase_typevars, remove_instance_last_known_values, erase_type from mypy.expandtype import expand_type, expand_type_by_instance from mypy.visitor import NodeVisitor from mypy.join import join_types @@ -2826,10 +2826,9 @@ def infer_partial_type(self, name: Var, lvalue: Lvalue, init_type: Type) -> bool arg0 = get_proper_type(init_type.args[0]) arg1 = get_proper_type(init_type.args[1]) if (isinstance(arg0, (NoneType, UninhabitedType)) and - isinstance(arg1, Instance) and self.is_valid_defaultdict_partial_value_type(arg1)): - # Erase type argument, if one exists (this fills in Anys) - arg1 = self.named_type(arg1.type.fullname) + arg1 = erase_type(arg1) + assert isinstance(arg1, Instance) partial_type = PartialType(init_type.type, name, arg1) else: return False @@ -2841,7 +2840,7 @@ def infer_partial_type(self, name: Var, lvalue: Lvalue, init_type: Type) -> bool self.partial_types[-1].map[name] = lvalue return True - def is_valid_defaultdict_partial_value_type(self, t: Instance) -> bool: + def is_valid_defaultdict_partial_value_type(self, t: ProperType) -> bool: """Check if t can be used as the basis for a partial defaultddict value type. Examples: @@ -2851,6 +2850,8 @@ def is_valid_defaultdict_partial_value_type(self, t: Instance) -> bool: * t is 'dict[...]' --> False (only generic types with a single type argument supported) """ + if not isinstance(t, Instance): + return False if len(t.args) == 0: return True if len(t.args) == 1: From 331329c45cd60352f0678cf532b86acfc55ed577 Mon Sep 17 00:00:00 2001 From: "Michael J. Sullivan" Date: Wed, 18 Dec 2019 15:29:01 -0800 Subject: [PATCH 013/117] Fix some daemon crashes involving classes becoming generic (#8157) Fixes #3279. Also fixes another related crash. --- mypy/constraints.py | 24 +++++---- mypy/join.py | 6 ++- mypy/meet.py | 6 ++- test-data/unit/fine-grained.test | 89 ++++++++++++++++++++++++++++++++ 4 files changed, 111 insertions(+), 14 deletions(-) diff --git a/mypy/constraints.py b/mypy/constraints.py index ca254026c310..aa4ce24b65df 100644 --- a/mypy/constraints.py +++ b/mypy/constraints.py @@ -347,29 +347,33 @@ def visit_instance(self, template: Instance) -> List[Constraint]: template.type.has_base(instance.type.fullname)): mapped = map_instance_to_supertype(template, instance.type) tvars = mapped.type.defn.type_vars - for i in range(len(instance.args)): + # N.B: We use zip instead of indexing because the lengths might have + # mismatches during daemon reprocessing. + for tvar, mapped_arg, instance_arg in zip(tvars, mapped.args, instance.args): # The constraints for generic type parameters depend on variance. # Include constraints from both directions if invariant. - if tvars[i].variance != CONTRAVARIANT: + if tvar.variance != CONTRAVARIANT: res.extend(infer_constraints( - mapped.args[i], instance.args[i], self.direction)) - if tvars[i].variance != COVARIANT: + mapped_arg, instance_arg, self.direction)) + if tvar.variance != COVARIANT: res.extend(infer_constraints( - mapped.args[i], instance.args[i], neg_op(self.direction))) + mapped_arg, instance_arg, neg_op(self.direction))) return res elif (self.direction == SUPERTYPE_OF and instance.type.has_base(template.type.fullname)): mapped = map_instance_to_supertype(instance, template.type) tvars = template.type.defn.type_vars - for j in range(len(template.args)): + # N.B: We use zip instead of indexing because the lengths might have + # mismatches during daemon reprocessing. + for tvar, mapped_arg, template_arg in zip(tvars, mapped.args, template.args): # The constraints for generic type parameters depend on variance. # Include constraints from both directions if invariant. - if tvars[j].variance != CONTRAVARIANT: + if tvar.variance != CONTRAVARIANT: res.extend(infer_constraints( - template.args[j], mapped.args[j], self.direction)) - if tvars[j].variance != COVARIANT: + template_arg, mapped_arg, self.direction)) + if tvar.variance != COVARIANT: res.extend(infer_constraints( - template.args[j], mapped.args[j], neg_op(self.direction))) + template_arg, mapped_arg, neg_op(self.direction))) return res if (template.type.is_protocol and self.direction == SUPERTYPE_OF and # We avoid infinite recursion for structural subtypes by checking diff --git a/mypy/join.py b/mypy/join.py index 8989a596b70e..a2513bd36201 100644 --- a/mypy/join.py +++ b/mypy/join.py @@ -345,8 +345,10 @@ def join_instances(t: Instance, s: Instance) -> ProperType: if is_subtype(t, s) or is_subtype(s, t): # Compatible; combine type arguments. args = [] # type: List[Type] - for i in range(len(t.args)): - args.append(join_types(t.args[i], s.args[i])) + # N.B: We use zip instead of indexing because the lengths might have + # mismatches during daemon reprocessing. + for ta, sa in zip(t.args, s.args): + args.append(join_types(ta, sa)) return Instance(t.type, args) else: # Incompatible; return trivial result object. diff --git a/mypy/meet.py b/mypy/meet.py index 608faf8f25fe..548278c154da 100644 --- a/mypy/meet.py +++ b/mypy/meet.py @@ -491,8 +491,10 @@ def visit_instance(self, t: Instance) -> ProperType: # Combine type arguments. We could have used join below # equivalently. args = [] # type: List[Type] - for i in range(len(t.args)): - args.append(self.meet(t.args[i], si.args[i])) + # N.B: We use zip instead of indexing because the lengths might have + # mismatches during daemon reprocessing. + for ta, sia in zip(t.args, si.args): + args.append(self.meet(ta, sia)) return Instance(t.type, args) else: if state.strict_optional: diff --git a/test-data/unit/fine-grained.test b/test-data/unit/fine-grained.test index 11e83f560eee..2dc598661fd9 100644 --- a/test-data/unit/fine-grained.test +++ b/test-data/unit/fine-grained.test @@ -9288,3 +9288,92 @@ class B: self.x = 0 [out] == + +[case testGenericChange1] +import a +[file a.py] +import b +def f() -> b.C: pass +[file b.py] +import a +class C: pass +[file b.py.2] +from typing import TypeVar, Generic, List +import a + +T = TypeVar('T') +class C(Generic[T]): pass + +reveal_type(a.f) +c: C[int] +l = a.f() if True else c +d = a.f() +d = c +c = d + +x: List[C] = [a.f(), a.f()] + +[out] +== +b.py:7: note: Revealed type is 'def () -> b.C[Any]' +[builtins fixtures/list.pyi] + +[case testGenericChange2] +import a +[file a.py] +import b +def f() -> b.C[int]: pass +[file b.py] +from typing import TypeVar, Generic +import a +T = TypeVar('T') +class C(Generic[T]): pass +[file b.py.2] +from typing import List +import a + +class C(): pass + +c: C +l = a.f() if True else c +d = a.f() +d = c +c = d + +x: List[C] = [a.f(), a.f()] + +[builtins fixtures/list.pyi] +[out] +== +a.py:2: error: "C" expects no type arguments, but 1 given + +[case testGenericChange3] +import a +[file a.py] +import b +def f() -> b.C[int]: pass +[file b.py] +from typing import TypeVar, Generic +import a +T = TypeVar('T') +class C(Generic[T]): pass +[file b.py.2] +from typing import TypeVar, Generic, List +import a + +T = TypeVar('T') +S = TypeVar('S') +class C(Generic[S, T]): pass + +c: C[int, str] +l = a.f() if True else c +d = a.f() +d = c +c = d + +x: List[C] = [a.f(), a.f()] + +[out] +== +a.py:2: error: "C" expects 2 type arguments, but 1 given +[builtins fixtures/list.pyi] From e7866d0839202234db1f975a68a9284bb59d8544 Mon Sep 17 00:00:00 2001 From: Xuanda Yang Date: Thu, 19 Dec 2019 18:31:01 +0800 Subject: [PATCH 014/117] Fix TypedDict is recognized only when imported directly (#8174) Resolves #8162 --- mypy/checkexpr.py | 2 +- test-data/unit/check-modules.test | 25 +++++++++++++++++++++++++ 2 files changed, 26 insertions(+), 1 deletion(-) diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index 58afc48e3a95..de7b99d827e7 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -263,7 +263,7 @@ def visit_call_expr(self, e: CallExpr, allow_none_return: bool = False) -> Type: return self.visit_call_expr_inner(e, allow_none_return=allow_none_return) def visit_call_expr_inner(self, e: CallExpr, allow_none_return: bool = False) -> Type: - if isinstance(e.callee, NameExpr) and isinstance(e.callee.node, TypeInfo) and \ + if isinstance(e.callee, RefExpr) and isinstance(e.callee.node, TypeInfo) and \ e.callee.node.typeddict_type is not None: # Use named fallback for better error messages. typeddict_type = e.callee.node.typeddict_type.copy_modified( diff --git a/test-data/unit/check-modules.test b/test-data/unit/check-modules.test index 663750df2c07..526b7b72b298 100644 --- a/test-data/unit/check-modules.test +++ b/test-data/unit/check-modules.test @@ -2781,3 +2781,28 @@ aaaab: int aaaba: int aabaa: int [builtins fixtures/module.pyi] + +[case testDirectlyImportTypedDictObjectAtTopLevel] +import foo.bar.custom_dict +from foo import bar +from foo.bar import custom_dict +from foo.bar.custom_dict import CustomDict + +foo.bar.custom_dict.CustomDict(foo="abc", bar="def") +bar.custom_dict.CustomDict(foo="abc", bar="def") +custom_dict.CustomDict(foo="abc", bar="def") +CustomDict(foo="abc", bar="def") + +[file foo/__init__.py] +[file foo/bar/__init__.py] +[file foo/bar/custom_dict.py] +from typing_extensions import TypedDict + +CustomDict = TypedDict( + "CustomDict", + { + "foo": str, + "bar": str, + }, +) +[typing fixtures/typing-full.pyi] From 387a9119a9422a9671fe0f18d3f0f3636aed24f1 Mon Sep 17 00:00:00 2001 From: Jacob Beck Date: Thu, 19 Dec 2019 11:47:01 -0700 Subject: [PATCH 015/117] Fix bugs where overriding init in a dataclass subclass crashed mypy (#8159) Fixes #8015 Fixes #8022 This fixes two bugs related to dataclass `InitVar`s and overriding `__init__` by changing mypy to ignore `InitVar`s in parent classes when the class supplied its own `__init__` and set `(init=False)` on the dataclass. It also fixes a bug with multiple inheritance of dataclasses. Previously dataclasses attempted to look up the current class' `__init__` to find the definition of `InitVar`s, which worked fine as long as the current class was a direct subclass of the parent and didn't override `__init__`. Unfortunately that didn't work when the `InitVar` came from a subclass that was not first in MRO, or the parent had an item in its `__init__` definition that the subclass didn't use. Now mypy will look up the `__init__` for the current parent class that is being processed in order to find the appropriate `InitVar` definition. I've added test cases for all the issues. --- mypy/plugins/dataclasses.py | 19 +++-- test-data/unit/check-dataclasses.test | 104 ++++++++++++++++++++++++++ 2 files changed, 118 insertions(+), 5 deletions(-) diff --git a/mypy/plugins/dataclasses.py b/mypy/plugins/dataclasses.py index ed3a0f4c997f..318603734342 100644 --- a/mypy/plugins/dataclasses.py +++ b/mypy/plugins/dataclasses.py @@ -290,7 +290,6 @@ def collect_attributes(self) -> Optional[List[DataclassAttribute]]: # copy() because we potentially modify all_attrs below and if this code requires debugging # we'll have unmodified attrs laying around. all_attrs = attrs.copy() - init_method = cls.info.get_method('__init__') for info in cls.info.mro[1:-1]: if 'dataclass' not in info.metadata: continue @@ -303,15 +302,17 @@ def collect_attributes(self) -> Optional[List[DataclassAttribute]]: name = data['name'] # type: str if name not in known_attrs: attr = DataclassAttribute.deserialize(info, data) - if attr.is_init_var and isinstance(init_method, FuncDef): + if attr.is_init_var: # InitVars are removed from classes so, in order for them to be inherited # properly, we need to re-inject them into subclasses' sym tables here. # To do that, we look 'em up from the parents' __init__. These variables # are subsequently removed from the sym table at the end of # DataclassTransformer.transform. - for arg, arg_name in zip(init_method.arguments, init_method.arg_names): - if arg_name == attr.name: - cls.info.names[attr.name] = SymbolTableNode(MDEF, arg.variable) + superclass_init = info.get_method('__init__') + if isinstance(superclass_init, FuncDef): + attr_node = _get_arg_from_init(superclass_init, attr.name) + if attr_node is not None: + cls.info.names[attr.name] = attr_node known_attrs.add(name) super_attrs.append(attr) @@ -367,6 +368,14 @@ def _freeze(self, attributes: List[DataclassAttribute]) -> None: info.names[var.name] = SymbolTableNode(MDEF, var) +def _get_arg_from_init(init_method: FuncDef, attr_name: str) -> Optional[SymbolTableNode]: + """Given an init method and an attribute name, find the Var in the init method's args.""" + for arg, arg_name in zip(init_method.arguments, init_method.arg_names): + if arg_name == attr_name: + return SymbolTableNode(MDEF, arg.variable) + return None + + def dataclass_class_maker_callback(ctx: ClassDefContext) -> None: """Hooks into the class typechecking process to add support for dataclasses. """ diff --git a/test-data/unit/check-dataclasses.test b/test-data/unit/check-dataclasses.test index 2822abec62b4..ccf767ce3c30 100644 --- a/test-data/unit/check-dataclasses.test +++ b/test-data/unit/check-dataclasses.test @@ -80,6 +80,28 @@ reveal_type(D) # N: Revealed type is 'def (a: builtins.int, b: builtins.int, c: [builtins fixtures/list.pyi] +[case testDataclassesMultipleInheritance] +from dataclasses import dataclass, field, InitVar +@dataclass +class A: + a: bool + +@dataclass +class B: + b: InitVar[bool] + _b: bool = field(init=False) + + def __post_init__(self, b: bool): + self._b = b + +@dataclass +class C(A, B): + pass + +reveal_type(C) # N: Revealed type is 'def (b: builtins.bool, a: builtins.bool) -> __main__.C' + +[builtins fixtures/bool.pyi] + [case testDataclassesOverriding] # flags: --python-version 3.6 from dataclasses import dataclass @@ -589,6 +611,88 @@ class A: return cls() [builtins fixtures/classmethod.pyi] +[case testDataclassesInitVarOverride] +import dataclasses + +@dataclasses.dataclass +class A: + a: dataclasses.InitVar[int] + _a: int = dataclasses.field(init=False) + + def __post_init__(self, a): + self._a = a + +@dataclasses.dataclass(init=False) +class B(A): + b: dataclasses.InitVar[int] + _b: int = dataclasses.field(init=False) + + def __init__(self, b): + super().__init__(b+1) + self._b = b + +[builtins fixtures/bool.pyi] + +[case testDataclassesInitVarNoOverride] +import dataclasses + +@dataclasses.dataclass +class A: + a: dataclasses.InitVar[int] + _a: int = dataclasses.field(init=False) + + def __post_init__(self, a): + self._a = a + +@dataclasses.dataclass(init=True) +class B(A): + b: dataclasses.InitVar[int] + _b: int = dataclasses.field(init=False) + + def __post_init__(self, a, b): + self._a = a + self._b = b + +B(1, 2) +B(1, 'a') # E: Argument 2 to "B" has incompatible type "str"; expected "int" + +[builtins fixtures/bool.pyi] + +[case testDataclassesInitVarPostInitOverride] +import dataclasses + +@dataclasses.dataclass +class A: + a: dataclasses.InitVar[int] + _a: int = dataclasses.field(init=False) + + def __post_init__(self, a: int) -> None: + self._a = a + +@dataclasses.dataclass +class B(A): + b: int = dataclasses.field(init=False) + + def __post_init__(self, a: int) -> None: + super().__post_init__(a) + self.b = a + 1 + +@dataclasses.dataclass(init=False) +class C(B): + c: int + + def __init__(self, a: int, c: int) -> None: + super().__init__(a) + self.c = c + self.b + +A(1) +B(1) +B(1, 2) # E: Too many arguments for "B" +C(1, 2) +C(1, 'a') # E: Argument 2 to "C" has incompatible type "str"; expected "int" + +[builtins fixtures/primitives.pyi] + [case testNoComplainFieldNone] # flags: --python-version 3.6 # flags: --no-strict-optional From ce186f488a8ef4dcdf80b47652f6b29e750bfb70 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Thu, 19 Dec 2019 19:00:16 +0000 Subject: [PATCH 016/117] Allow 'in' operations with partial types (#8177) This improves type inference in code like this: ``` d = {} for k in f(): if k not in d: d[k] = 0 else: d[k] += 1 ``` Unfortunately, this change breaks strict equality checking in 'in' operations involving partial types. I'm not 100% sure this is a net improvement. I couldn't find a clean way of getting strict equality working together with this. --- mypy/checkexpr.py | 22 +++++++++++++++++++++- test-data/unit/check-inference.test | 28 ++++++++++++++++++++++++++++ 2 files changed, 49 insertions(+), 1 deletion(-) diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index de7b99d827e7..2f8a5098baa4 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -2125,7 +2125,11 @@ def visit_comparison_expr(self, e: ComparisonExpr) -> Type: method_type = None # type: Optional[mypy.types.Type] if operator == 'in' or operator == 'not in': - right_type = self.accept(right) # always validate the right operand + # If the right operand has partial type, look it up without triggering + # a "Need type annotation ..." message, as it would be noise. + right_type = self.find_partial_type_ref_fast_path(right) + if right_type is None: + right_type = self.accept(right) # Validate the right operand # Keep track of whether we get type check errors (these won't be reported, they # are just to verify whether something is valid typing wise). @@ -2206,6 +2210,22 @@ def visit_comparison_expr(self, e: ComparisonExpr) -> Type: assert result is not None return result + def find_partial_type_ref_fast_path(self, expr: Expression) -> Optional[Type]: + """If expression has a partial generic type, return it without additional checks. + + In particular, this does not generate an error about a missing annotation. + + Otherwise, return None. + """ + if not isinstance(expr, RefExpr): + return None + if isinstance(expr.node, Var): + result = self.analyze_var_ref(expr.node, expr) + if isinstance(result, PartialType) and result.type is not None: + self.chk.store_type(expr, self.chk.fixup_partial_type(result)) + return result + return None + def dangerous_comparison(self, left: Type, right: Type, original_container: Optional[Type] = None) -> bool: """Check for dangerous non-overlapping comparisons like 42 == 'no'. diff --git a/test-data/unit/check-inference.test b/test-data/unit/check-inference.test index 2cd51dd619bf..d672f73b5f15 100644 --- a/test-data/unit/check-inference.test +++ b/test-data/unit/check-inference.test @@ -1738,6 +1738,34 @@ def g() -> None: reveal_type(y) # N: Revealed type is 'builtins.list[Any]' [builtins fixtures/list.pyi] +[case testInferFromEmptyDictWhenUsingIn] +d = {} +if 'x' in d: + d['x'] = 1 +reveal_type(d) # N: Revealed type is 'builtins.dict[builtins.str, builtins.int]' + +dd = {} +if 'x' not in dd: + dd['x'] = 1 +reveal_type(dd) # N: Revealed type is 'builtins.dict[builtins.str, builtins.int]' +[builtins fixtures/dict.pyi] + +[case testInferFromEmptyDictWhenUsingInSpecialCase] +d = None +if 'x' in d: # E: "None" has no attribute "__iter__" (not iterable) + pass +reveal_type(d) # N: Revealed type is 'None' +[builtins fixtures/dict.pyi] + +[case testInferFromEmptyListWhenUsingInWithStrictEquality] +# flags: --strict-equality +def f() -> None: + a = [] + if 1 in a: # TODO: This should be an error + a.append('x') +[builtins fixtures/list.pyi] +[typing fixtures/typing-full.pyi] + -- Inferring types of variables first initialized to None (partial types) -- ---------------------------------------------------------------------- From 04366e731d0112f82a8c24a2ba44890fc0196296 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Fri, 20 Dec 2019 12:25:39 +0000 Subject: [PATCH 017/117] Simplify default test stubs to speed up tests slightly (#8179) This speeds up the set of "quick" tests that I typically run by about 2%. --- test-data/unit/check-dataclasses.test | 2 ++ test-data/unit/check-errorcodes.test | 1 + test-data/unit/check-expressions.test | 18 +++++++++++++++++- test-data/unit/fixtures/async_await.pyi | 1 + test-data/unit/fixtures/bool.pyi | 1 + test-data/unit/fixtures/python2.pyi | 1 + test-data/unit/fixtures/typing-full.pyi | 3 +++ test-data/unit/lib-stub/typing.pyi | 7 ------- test-data/unit/typexport-basic.test | 3 ++- 9 files changed, 28 insertions(+), 9 deletions(-) diff --git a/test-data/unit/check-dataclasses.test b/test-data/unit/check-dataclasses.test index ccf767ce3c30..ed26afed5e6c 100644 --- a/test-data/unit/check-dataclasses.test +++ b/test-data/unit/check-dataclasses.test @@ -15,6 +15,7 @@ Person('John', 32) Person('Jonh', 21, None) # E: Too many arguments for "Person" [builtins fixtures/list.pyi] +[typing fixtures/typing-full.pyi] [case testDataclassesCustomInit] # flags: --python-version 3.6 @@ -52,6 +53,7 @@ Person(32, 'John') Person(21, 'Jonh', None) # E: Too many arguments for "Person" [builtins fixtures/list.pyi] +[typing fixtures/typing-full.pyi] [case testDataclassesDeepInheritance] # flags: --python-version 3.6 diff --git a/test-data/unit/check-errorcodes.test b/test-data/unit/check-errorcodes.test index 3f6fc14171fa..4dfb47fcc51e 100644 --- a/test-data/unit/check-errorcodes.test +++ b/test-data/unit/check-errorcodes.test @@ -615,6 +615,7 @@ def g() -> int: '{}'.format(b'abc') # E: On Python 3 '{}'.format(b'abc') produces "b'abc'"; use !r if this is a desired behavior [str-bytes-safe] '%s' % b'abc' # E: On Python 3 '%s' % b'abc' produces "b'abc'"; use %r if this is a desired behavior [str-bytes-safe] [builtins fixtures/primitives.pyi] +[typing fixtures/typing-full.pyi] [case testErrorCodeIgnoreNamedDefinedNote] x: List[int] # type: ignore[name-defined] diff --git a/test-data/unit/check-expressions.test b/test-data/unit/check-expressions.test index 944f1e8a6901..ff8d7cdd4fe8 100644 --- a/test-data/unit/check-expressions.test +++ b/test-data/unit/check-expressions.test @@ -1152,6 +1152,7 @@ i, f, s, t = None, None, None, None # type: (int, float, str, Tuple[int]) '%i' % f '%o' % f # E: Incompatible types in string interpolation (expression has type "float", placeholder has type "int") [builtins fixtures/primitives.pyi] +[typing fixtures/typing-full.pyi] [case testStringInterpolationSAcceptsAnyType] from typing import Any @@ -1179,6 +1180,7 @@ reveal_type('%(key)s' % {'key': xu}) # N: Revealed type is 'builtins.unicode' reveal_type('%r' % xu) # N: Revealed type is 'builtins.str' reveal_type('%s' % xs) # N: Revealed type is 'builtins.str' [builtins fixtures/primitives.pyi] +[typing fixtures/typing-full.pyi] [case testStringInterpolationCount] '%d %d' % 1 # E: Not enough arguments for format string @@ -1189,12 +1191,14 @@ t = 1, 's' '%s %d' % t # E: Incompatible types in string interpolation (expression has type "str", placeholder has type "Union[int, float, SupportsInt]") '%d' % t # E: Not all arguments converted during string formatting [builtins fixtures/primitives.pyi] +[typing fixtures/typing-full.pyi] [case testStringInterpolationWithAnyType] from typing import Any a = None # type: Any '%d %d' % a [builtins fixtures/primitives.pyi] +[typing fixtures/typing-full.pyi] [case testStringInterpolationInvalidPlaceholder] '%W' % 1 # E: Unsupported format character 'W' @@ -1221,6 +1225,7 @@ b'%a' % 3 '%*f' % (4, 3.14) '%*f' % (1.1, 3.14) # E: * wants int [builtins fixtures/primitives.pyi] +[typing fixtures/typing-full.pyi] [case testStringInterpolationPrecision] '%.2f' % 3.14 @@ -1228,6 +1233,7 @@ b'%a' % 3 '%.*f' % (4, 3.14) '%.*f' % (1.1, 3.14) # E: * wants int [builtins fixtures/primitives.pyi] +[typing fixtures/typing-full.pyi] [case testStringInterpolationWidthAndPrecision] '%4.2f' % 3.14 @@ -1236,6 +1242,7 @@ b'%a' % 3 '%*.*f' % 3.14 # E: Not enough arguments for format string '%*.*f' % (4, 2, 3.14) [builtins fixtures/primitives.pyi] +[typing fixtures/typing-full.pyi] [case testStringInterpolationFlagsAndLengthModifiers] '%04hd' % 1 @@ -1243,6 +1250,7 @@ b'%a' % 3 '%+*Ld' % (1, 1) '% .*ld' % (1, 1) [builtins fixtures/primitives.pyi] +[typing fixtures/typing-full.pyi] [case testStringInterpolationDoublePercentage] '%% %d' % 1 @@ -1250,6 +1258,7 @@ b'%a' % 3 '%*%' % 1 '%*% %d' % 1 # E: Not enough arguments for format string [builtins fixtures/primitives.pyi] +[typing fixtures/typing-full.pyi] [case testStringInterpolationC] '%c' % 1 @@ -1263,14 +1272,15 @@ b'%a' % 3 '%(a)d %(b)s' % {'a': 's', 'b': 1} # E: Incompatible types in string interpolation (expression has type "str", placeholder with key 'a' has type "Union[int, float, SupportsInt]") b'%(x)s' % {b'x': b'data'} [builtins fixtures/primitives.pyi] +[typing fixtures/typing-full.pyi] [case testStringInterpolationMappingKeys] '%()d' % {'': 2} '%(a)d' % {'a': 1, 'b': 2, 'c': 3} '%(q)d' % {'a': 1, 'b': 2, 'c': 3} # E: Key 'q' not found in mapping '%(a)d %%' % {'a': 1} - [builtins fixtures/primitives.pyi] +[typing fixtures/typing-full.pyi] [case testStringInterpolationMappingDictTypes] from typing import Any, Dict @@ -1300,6 +1310,7 @@ di = None # type: Dict[int, int] '%(a).1d' % {'a': 1} '%(a)#1.1ld' % {'a': 1} [builtins fixtures/primitives.pyi] +[typing fixtures/typing-full.pyi] [case testStringInterpolationFloatPrecision] '%.f' % 1.2 @@ -1307,6 +1318,7 @@ di = None # type: Dict[int, int] '%.f' % 'x' '%.3f' % 'x' [builtins fixtures/primitives.pyi] +[typing fixtures/typing-full.pyi] [out] main:3: error: Incompatible types in string interpolation (expression has type "str", placeholder has type "Union[int, float, SupportsFloat]") main:4: error: Incompatible types in string interpolation (expression has type "str", placeholder has type "Union[int, float, SupportsFloat]") @@ -1322,6 +1334,7 @@ foo(b'a', b'b') == b'a:b' [case testStringInterpolationStarArgs] x = (1, 2) "%d%d" % (*x,) +[typing fixtures/typing-full.pyi] [case testBytePercentInterpolationSupported] b'%s' % (b'xyz',) @@ -1338,6 +1351,7 @@ def f(t: Tuple[int, ...]) -> None: '%d %d' % t '%d %d %d' % t [builtins fixtures/primitives.pyi] +[typing fixtures/typing-full.pyi] [case testStringInterpolationUnionType] from typing import Tuple, Union @@ -1672,6 +1686,7 @@ class Good: x: Union[float, Good] '{:+f}'.format(x) [builtins fixtures/primitives.pyi] +[typing fixtures/typing-full.pyi] [case testFormatCallSpecialCases] '{:08b}'.format(int('3')) @@ -1683,6 +1698,7 @@ class S: '%d' % S() # This is OK however '{:%}'.format(0.001) [builtins fixtures/primitives.pyi] +[typing fixtures/typing-full.pyi] -- Lambdas -- ------- diff --git a/test-data/unit/fixtures/async_await.pyi b/test-data/unit/fixtures/async_await.pyi index ed64289c0d4d..96ade881111b 100644 --- a/test-data/unit/fixtures/async_await.pyi +++ b/test-data/unit/fixtures/async_await.pyi @@ -12,6 +12,7 @@ class object: class type: pass class function: pass class int: pass +class float: pass class str: pass class bool(int): pass class dict(typing.Generic[T, U]): pass diff --git a/test-data/unit/fixtures/bool.pyi b/test-data/unit/fixtures/bool.pyi index 07bc461819a0..b4f99451aea6 100644 --- a/test-data/unit/fixtures/bool.pyi +++ b/test-data/unit/fixtures/bool.pyi @@ -12,6 +12,7 @@ class tuple(Generic[T]): pass class function: pass class bool: pass class int: pass +class float: pass class str: pass class unicode: pass class ellipsis: pass diff --git a/test-data/unit/fixtures/python2.pyi b/test-data/unit/fixtures/python2.pyi index 0f5425f6682c..44cb9de9be1d 100644 --- a/test-data/unit/fixtures/python2.pyi +++ b/test-data/unit/fixtures/python2.pyi @@ -11,6 +11,7 @@ class type: class function: pass class int: pass +class float: pass class str: def format(self, *args, **kwars) -> str: ... class unicode: diff --git a/test-data/unit/fixtures/typing-full.pyi b/test-data/unit/fixtures/typing-full.pyi index 19ab22257158..4478f0260c4c 100644 --- a/test-data/unit/fixtures/typing-full.pyi +++ b/test-data/unit/fixtures/typing-full.pyi @@ -140,6 +140,9 @@ class MutableMapping(Mapping[T, U], metaclass=ABCMeta): class SupportsInt(Protocol): def __int__(self) -> int: pass +class SupportsFloat(Protocol): + def __float__(self) -> float: pass + class SupportsAbs(Protocol[T_co]): def __abs__(self) -> T_co: pass diff --git a/test-data/unit/lib-stub/typing.pyi b/test-data/unit/lib-stub/typing.pyi index f22720cbb5ef..94324734a647 100644 --- a/test-data/unit/lib-stub/typing.pyi +++ b/test-data/unit/lib-stub/typing.pyi @@ -38,13 +38,6 @@ class Sequence(Iterable[T_co]): class Mapping(Generic[T, T_co]): def __getitem__(self, key: T) -> T_co: pass -class SupportsInt(Protocol): - def __int__(self) -> int: pass - -class SupportsFloat(Protocol): - def __float__(self) -> float: pass - -# This is an unofficial extension. def final(meth: T) -> T: pass TYPE_CHECKING = 1 diff --git a/test-data/unit/typexport-basic.test b/test-data/unit/typexport-basic.test index 7f6a6f0bda2a..8d6e93ac340c 100644 --- a/test-data/unit/typexport-basic.test +++ b/test-data/unit/typexport-basic.test @@ -1151,9 +1151,10 @@ ListExpr(3) : builtins.list[builtins.str] OpExpr(3) : builtins.list[builtins.str] [case testStringFormatting] -## .* +## IntExpr|OpExpr|StrExpr '%d' % 1 [builtins fixtures/primitives.pyi] +[typing fixtures/typing-full.pyi] [out] IntExpr(2) : Literal[1]? OpExpr(2) : builtins.str From fe8309aaed79a714c3e5267c25ad7812f909259e Mon Sep 17 00:00:00 2001 From: Xuanda Yang Date: Fri, 20 Dec 2019 21:24:18 +0800 Subject: [PATCH 018/117] Flatten TypeAliasType when it is aliased as a Union (#8146) Resolves #8125 The main problem is not about flattening unions inside variants since the following code generates no error ```python from typing import Union T1 = Union[int, float] T2 = Union[Union[Union[int, float], float], Union[float, complex], complex] def foo(a: T2, b: T2) -> T2: return a + b ``` The problem, however, is because when using `TypeAliasType` to alias a Union, the `TypeAliasType` will not get flattened, so this PR fixes this. --- mypy/checkexpr.py | 10 ++++++---- mypy/types.py | 8 ++++++-- test-data/unit/check-unions.test | 15 +++++++++++++++ test-data/unit/fixtures/ops.pyi | 4 ++++ 4 files changed, 31 insertions(+), 6 deletions(-) diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index 2f8a5098baa4..7edaf7e2ad89 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -19,7 +19,7 @@ PartialType, DeletedType, UninhabitedType, TypeType, TypeOfAny, LiteralType, LiteralValue, is_named_instance, FunctionLike, StarType, is_optional, remove_optional, is_generic_instance, get_proper_type, ProperType, - get_proper_types + get_proper_types, flatten_nested_unions ) from mypy.nodes import ( NameExpr, RefExpr, Var, FuncDef, OverloadedFuncDef, TypeInfo, CallExpr, @@ -2589,7 +2589,9 @@ def check_op(self, method: str, base_type: Type, left_variants = [base_type] base_type = get_proper_type(base_type) if isinstance(base_type, UnionType): - left_variants = [item for item in base_type.relevant_items()] + left_variants = [item for item in + flatten_nested_unions(base_type.relevant_items(), + handle_type_alias_type=True)] right_type = self.accept(arg) # Step 1: We first try leaving the right arguments alone and destructure @@ -2632,8 +2634,8 @@ def check_op(self, method: str, base_type: Type, right_type = get_proper_type(right_type) if isinstance(right_type, UnionType): right_variants = [(item, TempNode(item, context=context)) - for item in right_type.relevant_items()] - + for item in flatten_nested_unions(right_type.relevant_items(), + handle_type_alias_type=True)] msg = self.msg.clean_copy() msg.disable_count = 0 all_results = [] diff --git a/mypy/types.py b/mypy/types.py index 40b8d311d5cd..f377753425f0 100644 --- a/mypy/types.py +++ b/mypy/types.py @@ -2236,15 +2236,19 @@ def has_type_vars(typ: Type) -> bool: return typ.accept(HasTypeVars()) -def flatten_nested_unions(types: Iterable[Type]) -> List[Type]: +def flatten_nested_unions(types: Iterable[Type], + handle_type_alias_type: bool = False) -> List[Type]: """Flatten nested unions in a type list.""" # This and similar functions on unions can cause infinite recursion # if passed a "pathological" alias like A = Union[int, A] or similar. # TODO: ban such aliases in semantic analyzer. flat_items = [] # type: List[Type] + if handle_type_alias_type: + types = get_proper_types(types) for tp in types: if isinstance(tp, ProperType) and isinstance(tp, UnionType): - flat_items.extend(flatten_nested_unions(tp.items)) + flat_items.extend(flatten_nested_unions(tp.items, + handle_type_alias_type=handle_type_alias_type)) else: flat_items.append(tp) return flat_items diff --git a/test-data/unit/check-unions.test b/test-data/unit/check-unions.test index ed2b415e8f99..92e886fee419 100644 --- a/test-data/unit/check-unions.test +++ b/test-data/unit/check-unions.test @@ -1013,3 +1013,18 @@ y: Union[int, Dict[int, int]] = 1 if bool() else {} u: Union[int, List[int]] = [] if bool() else 1 v: Union[int, Dict[int, int]] = {} if bool() else 1 [builtins fixtures/isinstancelist.pyi] + +[case testFlattenTypeAliasWhenAliasedAsUnion] +from typing import Union + +T1 = int +T2 = Union[T1, float] +T3 = Union[T2, complex] +T4 = Union[T3, int] + +def foo(a: T2, b: T2) -> T2: + return a + b + +def bar(a: T4, b: T4) -> T4: # test multi-level alias + return a + b +[builtins fixtures/ops.pyi] diff --git a/test-data/unit/fixtures/ops.pyi b/test-data/unit/fixtures/ops.pyi index 34cfb176243e..0c3497b1667f 100644 --- a/test-data/unit/fixtures/ops.pyi +++ b/test-data/unit/fixtures/ops.pyi @@ -64,6 +64,10 @@ class float: def __truediv__(self, x: 'float') -> 'float': pass def __rtruediv__(self, x: 'float') -> 'float': pass +class complex: + def __add__(self, x: complex) -> complex: pass + def __radd__(self, x: complex) -> complex: pass + class BaseException: pass def __print(a1=None, a2=None, a3=None, a4=None): pass From 6972d79875e6e7519e3145cc8640280becfa4abd Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Fri, 20 Dec 2019 16:12:47 +0000 Subject: [PATCH 019/117] Support partial type inference with += and |= (#8183) Code like this no longer requires a type annotation: ``` x = [] x += [1, 2] ``` --- mypy/checker.py | 18 ++++++++++++++++-- test-data/unit/check-inference.test | 12 ++++++++++++ test-data/unit/fixtures/set.pyi | 1 + 3 files changed, 29 insertions(+), 2 deletions(-) diff --git a/mypy/checker.py b/mypy/checker.py index f17ca5240b71..8528bf35248d 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -2042,7 +2042,7 @@ def check_assignment(self, lvalue: Lvalue, rvalue: Expression, infer_lvalue_type self.check_assignment_to_multiple_lvalues(lvalue.items, rvalue, rvalue, infer_lvalue_type) else: - self.try_infer_partial_generic_type_from_assignment(lvalue, rvalue) + self.try_infer_partial_generic_type_from_assignment(lvalue, rvalue, '=') lvalue_type, index_lvalue, inferred = self.check_lvalue(lvalue) # If we're assigning to __getattr__ or similar methods, check that the signature is # valid. @@ -2142,11 +2142,21 @@ def check_assignment(self, lvalue: Lvalue, rvalue: Expression, infer_lvalue_type rvalue_type = remove_instance_last_known_values(rvalue_type) self.infer_variable_type(inferred, lvalue, rvalue_type, rvalue) + # (type, operator) tuples for augmented assignments supported with partial types + partial_type_augmented_ops = { + ('builtins.list', '+'), + ('builtins.set', '|'), + } # type: Final + def try_infer_partial_generic_type_from_assignment(self, lvalue: Lvalue, - rvalue: Expression) -> None: + rvalue: Expression, + op: str) -> None: """Try to infer a precise type for partial generic type from assignment. + 'op' is '=' for normal assignment and a binary operator ('+', ...) for + augmented assignment. + Example where this happens: x = [] @@ -2165,6 +2175,9 @@ def try_infer_partial_generic_type_from_assignment(self, assert isinstance(typ, PartialType) if typ.type is None: return + # Return if this is an unsupported augmented assignment. + if op != '=' and (typ.type.fullname, op) not in self.partial_type_augmented_ops: + return # TODO: some logic here duplicates the None partial type counterpart # inlined in check_assignment(), see # 8043. partial_types = self.find_partial_types(var) @@ -3193,6 +3206,7 @@ def visit_while_stmt(self, s: WhileStmt) -> None: def visit_operator_assignment_stmt(self, s: OperatorAssignmentStmt) -> None: """Type check an operator assignment statement, e.g. x += 1.""" + self.try_infer_partial_generic_type_from_assignment(s.lvalue, s.rvalue, s.op) if isinstance(s.lvalue, MemberExpr): # Special case, some additional errors may be given for # assignments to read-only or final attributes. diff --git a/test-data/unit/check-inference.test b/test-data/unit/check-inference.test index d672f73b5f15..ec837570e0bd 100644 --- a/test-data/unit/check-inference.test +++ b/test-data/unit/check-inference.test @@ -1766,6 +1766,18 @@ def f() -> None: [builtins fixtures/list.pyi] [typing fixtures/typing-full.pyi] +[case testInferListTypeFromInplaceAdd] +a = [] +a += [1] +reveal_type(a) # N: Revealed type is 'builtins.list[builtins.int*]' +[builtins fixtures/list.pyi] + +[case testInferSetTypeFromInplaceOr] +a = set() +a |= {'x'} +reveal_type(a) # N: Revealed type is 'builtins.set[builtins.str*]' +[builtins fixtures/set.pyi] + -- Inferring types of variables first initialized to None (partial types) -- ---------------------------------------------------------------------- diff --git a/test-data/unit/fixtures/set.pyi b/test-data/unit/fixtures/set.pyi index 335b1ad865f3..c2e1f6f75237 100644 --- a/test-data/unit/fixtures/set.pyi +++ b/test-data/unit/fixtures/set.pyi @@ -19,6 +19,7 @@ class ellipsis: pass class set(Iterable[T], Generic[T]): def __iter__(self) -> Iterator[T]: pass def __contains__(self, item: object) -> bool: pass + def __ior__(self, x: Set[T]) -> None: pass def add(self, x: T) -> None: pass def discard(self, x: T) -> None: pass def update(self, x: Set[T]) -> None: pass From 7ef87728ff5e7776c4e2574045a7d5a6f04d4703 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Fri, 20 Dec 2019 16:13:12 +0000 Subject: [PATCH 020/117] Further speed up tests (#8188) This adds 3 additional fixtures for typing and simplifies the default stubs for builtins and typing slightly. Previously we had a large number of fixtures for builtins but only two fixtures for typing, which didn't seem very balanced to me. I leave the typing-full.pyi fixture as a catch-all fixture when extra features are needed, so this shouldn't affect developer experience much. Overall this speeds up quick tests by about 6%. --- test-data/unit/check-abstract.test | 6 +- test-data/unit/check-async-await.test | 82 ++++++------- test-data/unit/check-class-namedtuple.test | 2 +- test-data/unit/check-classes.test | 13 +- test-data/unit/check-columns.test | 3 +- test-data/unit/check-dataclasses.test | 4 +- test-data/unit/check-default-plugin.test | 4 +- test-data/unit/check-enum.test | 2 +- test-data/unit/check-errorcodes.test | 4 +- test-data/unit/check-expressions.test | 46 +++---- test-data/unit/check-flags.test | 6 +- test-data/unit/check-functions.test | 14 ++- test-data/unit/check-incremental.test | 2 +- test-data/unit/check-inference.test | 5 +- test-data/unit/check-isinstance.test | 4 +- test-data/unit/check-literal.test | 16 +-- test-data/unit/check-modules.test | 1 + test-data/unit/check-newsemanal.test | 1 + test-data/unit/check-optional.test | 4 + test-data/unit/check-overloading.test | 30 ++--- test-data/unit/check-protocols.test | 10 +- test-data/unit/check-python38.test | 1 + test-data/unit/check-serialize.test | 2 +- test-data/unit/check-statements.test | 4 +- test-data/unit/check-tuples.test | 4 +- test-data/unit/check-typeddict.test | 103 ++++++++-------- test-data/unit/check-unions.test | 1 + test-data/unit/check-unreachable-code.test | 13 +- test-data/unit/check-varargs.test | 2 +- test-data/unit/deps-expressions.test | 2 +- test-data/unit/diff.test | 2 +- test-data/unit/fine-grained-modules.test | 2 +- test-data/unit/fine-grained-suggest.test | 1 + test-data/unit/fine-grained.test | 28 +++-- test-data/unit/fixtures/classmethod.pyi | 1 + test-data/unit/fixtures/primitives.pyi | 1 + test-data/unit/fixtures/typing-async.pyi | 120 +++++++++++++++++++ test-data/unit/fixtures/typing-medium.pyi | 69 +++++++++++ test-data/unit/fixtures/typing-typeddict.pyi | 67 +++++++++++ test-data/unit/lib-stub/builtins.pyi | 8 +- test-data/unit/lib-stub/typing.pyi | 12 +- test-data/unit/merge.test | 2 +- test-data/unit/semanal-types.test | 2 +- test-data/unit/typexport-basic.test | 2 +- 44 files changed, 497 insertions(+), 211 deletions(-) create mode 100644 test-data/unit/fixtures/typing-async.pyi create mode 100644 test-data/unit/fixtures/typing-medium.pyi create mode 100644 test-data/unit/fixtures/typing-typeddict.pyi diff --git a/test-data/unit/check-abstract.test b/test-data/unit/check-abstract.test index 1fcbf8bf9f4e..eb78c287cd71 100644 --- a/test-data/unit/check-abstract.test +++ b/test-data/unit/check-abstract.test @@ -982,11 +982,11 @@ my_concrete_types = { my_abstract_types = { 'A': MyAbstractA, - 'B': MyAbstractB, + 'B': MyAbstractB, } -reveal_type(my_concrete_types) # N: Revealed type is 'builtins.dict[builtins.str*, def () -> __main__.MyAbstractType]' -reveal_type(my_abstract_types) # N: Revealed type is 'builtins.dict[builtins.str*, def () -> __main__.MyAbstractType]' +reveal_type(my_concrete_types) # N: Revealed type is 'builtins.dict[builtins.str*, def () -> __main__.MyAbstractType]' +reveal_type(my_abstract_types) # N: Revealed type is 'builtins.dict[builtins.str*, def () -> __main__.MyAbstractType]' a = my_concrete_types['A']() a.do() diff --git a/test-data/unit/check-async-await.test b/test-data/unit/check-async-await.test index 2a3ce15fdf50..dacdfde9b556 100644 --- a/test-data/unit/check-async-await.test +++ b/test-data/unit/check-async-await.test @@ -6,7 +6,7 @@ async def f() -> int: pass [builtins fixtures/async_await.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-async.pyi] [case testAsyncDefReturn] @@ -14,14 +14,14 @@ async def f() -> int: return 0 reveal_type(f()) # N: Revealed type is 'typing.Coroutine[Any, Any, builtins.int]' [builtins fixtures/async_await.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-async.pyi] [case testAsyncDefMissingReturn] # flags: --warn-no-return async def f() -> int: make_this_not_trivial = 1 [builtins fixtures/async_await.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-async.pyi] [out] main:2: error: Missing return statement @@ -31,7 +31,7 @@ async def f() -> int: make_this_not_trivial = 1 return [builtins fixtures/async_await.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-async.pyi] [out] main:4: error: Return value expected @@ -42,7 +42,7 @@ async def f() -> int: reveal_type(x) # N: Revealed type is 'builtins.int*' return x [builtins fixtures/async_await.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-async.pyi] [out] [case testAwaitDefaultContext] @@ -53,7 +53,7 @@ async def f(x: T) -> T: y = await f(x) reveal_type(y) return y -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-async.pyi] [out] main:6: note: Revealed type is 'T`-1' @@ -65,7 +65,7 @@ async def f(x: T) -> T: y = await f(x) # type: Any reveal_type(y) return y -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-async.pyi] [out] main:6: note: Revealed type is 'Any' @@ -77,7 +77,7 @@ async def f(x: T) -> T: y = await f(x) # type: int reveal_type(y) return x -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-async.pyi] [out] main:5: error: Argument 1 to "f" has incompatible type "T"; expected "int" main:6: note: Revealed type is 'builtins.int' @@ -91,7 +91,7 @@ def g() -> Generator[int, None, str]: async def f() -> int: x = await g() return x -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-async.pyi] [out] main:7: error: Incompatible types in "await" (actual type "Generator[int, None, str]", expected type "Awaitable[Any]") @@ -103,7 +103,7 @@ def g() -> Iterator[Any]: async def f() -> int: x = await g() return x -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-async.pyi] [out] main:6: error: Incompatible types in "await" (actual type "Iterator[Any]", expected type "Awaitable[Any]") @@ -115,7 +115,7 @@ async def f() -> int: x = await g() return x [builtins fixtures/async_await.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-async.pyi] [out] main:5: error: Incompatible types in "await" (actual type "int", expected type "Awaitable[Any]") @@ -127,7 +127,7 @@ async def f() -> str: x = await g() # type: str return x [builtins fixtures/async_await.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-async.pyi] [out] main:5: error: Incompatible types in assignment (expression has type "int", variable has type "str") @@ -139,7 +139,7 @@ async def f() -> str: x = await g() return x [builtins fixtures/async_await.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-async.pyi] [out] main:6: error: Incompatible return value type (got "int", expected "str") @@ -152,7 +152,7 @@ async def f() -> None: async for x in C(): reveal_type(x) # N: Revealed type is 'builtins.int*' [builtins fixtures/async_await.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-async.pyi] [case testAsyncForError] @@ -161,7 +161,7 @@ async def f() -> None: async for x in [1]: pass [builtins fixtures/async_await.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-async.pyi] [out] main:4: error: "List[int]" has no attribute "__aiter__" (not async iterable) @@ -180,7 +180,7 @@ async def f() -> None: async for z in C(): # type: Union[int, str] reveal_type(z) # N: Revealed type is 'Union[builtins.int, builtins.str]' [builtins fixtures/async_await.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-async.pyi] [case testAsyncForComprehension] # flags: --python-version 3.6 @@ -220,7 +220,7 @@ async def generatorexp(obj: Iterable[int]): reveal_type(lst2) # N: Revealed type is 'typing.AsyncGenerator[builtins.int*, None]' [builtins fixtures/async_await.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-async.pyi] [case testAsyncForComprehensionErrors] # flags: --python-version 3.6 @@ -251,7 +251,7 @@ main:19: error: "asyncify[int]" has no attribute "__iter__"; maybe "__aiter__"? main:20: error: "Iterable[int]" has no attribute "__aiter__" (not async iterable) main:21: error: "asyncify[int]" has no attribute "__iter__"; maybe "__aiter__"? (not iterable) [builtins fixtures/async_await.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-async.pyi] [case testAsyncWith] @@ -262,7 +262,7 @@ async def f() -> None: async with C() as x: reveal_type(x) # N: Revealed type is 'builtins.int*' [builtins fixtures/async_await.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-async.pyi] [case testAsyncWithError] @@ -274,7 +274,7 @@ async def f() -> None: async with C() as x: pass [builtins fixtures/async_await.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-async.pyi] [out] main:6: error: "C" has no attribute "__aenter__"; maybe "__enter__"? main:6: error: "C" has no attribute "__aexit__"; maybe "__exit__"? @@ -288,7 +288,7 @@ async def f() -> None: async with C() as x: # E: Incompatible types in "async with" for "__aenter__" (actual type "int", expected type "Awaitable[Any]") pass [builtins fixtures/async_await.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-async.pyi] [case testAsyncWithErrorBadAenter2] @@ -299,7 +299,7 @@ async def f() -> None: async with C() as x: # E: "None" has no attribute "__await__" pass [builtins fixtures/async_await.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-async.pyi] [case testAsyncWithErrorBadAexit] @@ -310,7 +310,7 @@ async def f() -> None: async with C() as x: # E: Incompatible types in "async with" for "__aexit__" (actual type "int", expected type "Awaitable[Any]") pass [builtins fixtures/async_await.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-async.pyi] [case testAsyncWithErrorBadAexit2] @@ -321,7 +321,7 @@ async def f() -> None: async with C() as x: # E: "None" has no attribute "__await__" pass [builtins fixtures/async_await.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-async.pyi] [case testAsyncWithTypeComments] @@ -338,7 +338,7 @@ async def f() -> None: async with C() as a: # type: int, int # E: Syntax error in type annotation # N: Suggestion: Use Tuple[T1, ..., Tn] instead of (T1, ..., Tn) pass [builtins fixtures/async_await.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-async.pyi] [case testNoYieldInAsyncDef] # flags: --python-version 3.5 @@ -376,7 +376,7 @@ def g() -> Generator[Any, None, str]: x = yield from f() return x [builtins fixtures/async_await.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-async.pyi] [out] main:6: error: "yield from" can't be applied to "Coroutine[Any, Any, str]" @@ -405,7 +405,7 @@ async def main() -> None: async for z in I(): reveal_type(z) # N: Revealed type is 'builtins.int' [builtins fixtures/async_await.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-async.pyi] [case testYieldTypeCheckInDecoratedCoroutine] @@ -421,7 +421,7 @@ def f() -> Generator[int, str, int]: else: return '' # E: Incompatible return value type (got "str", expected "int") [builtins fixtures/async_await.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-async.pyi] -- Async generators (PEP 525), some test cases adapted from the PEP text -- --------------------------------------------------------------------- @@ -452,7 +452,7 @@ async def wrong_return() -> Generator[int, None, None]: # E: The return type of yield 3 [builtins fixtures/dict.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-async.pyi] [case testAsyncGeneratorReturnIterator] # flags: --python-version 3.6 @@ -468,7 +468,7 @@ async def use_gen() -> None: reveal_type(item) # N: Revealed type is 'builtins.int*' [builtins fixtures/dict.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-async.pyi] [case testAsyncGeneratorManualIter] # flags: --python-version 3.6 @@ -486,7 +486,7 @@ async def user() -> None: reveal_type(await gen.__anext__()) # N: Revealed type is 'builtins.int*' [builtins fixtures/dict.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-async.pyi] [case testAsyncGeneratorAsend] # flags: --python-version 3.6 @@ -507,7 +507,7 @@ async def h() -> None: reveal_type(await g.asend('hello')) # N: Revealed type is 'builtins.int*' [builtins fixtures/dict.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-async.pyi] [case testAsyncGeneratorAthrow] # flags: --python-version 3.6 @@ -526,7 +526,7 @@ async def h() -> None: reveal_type(await g.athrow(BaseException)) # N: Revealed type is 'builtins.str*' [builtins fixtures/dict.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-async.pyi] [case testAsyncGeneratorNoSyncIteration] # flags: --python-version 3.6 @@ -541,7 +541,7 @@ def h() -> None: pass [builtins fixtures/dict.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-async.pyi] [out] main:9: error: "AsyncGenerator[int, None]" has no attribute "__iter__"; maybe "__aiter__"? (not iterable) @@ -557,7 +557,7 @@ async def gen() -> AsyncGenerator[int, None]: yield from f() # E: 'yield from' in async function [builtins fixtures/dict.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-async.pyi] [case testAsyncGeneratorNoReturnWithValue] # flags: --python-version 3.6 @@ -579,7 +579,7 @@ async def return_f() -> AsyncGenerator[int, None]: return f() # E: 'return' with value in async generator is not allowed [builtins fixtures/dict.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-async.pyi] -- The full matrix of coroutine compatibility -- ------------------------------------------ @@ -667,7 +667,7 @@ async def decorated_host_coroutine() -> None: x = await other_coroutine() [builtins fixtures/async_await.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-async.pyi] [out] [case testAsyncGenDisallowUntyped] @@ -681,7 +681,7 @@ async def f() -> AsyncGenerator[int, None]: async def g() -> AsyncGenerator[Any, None]: yield 0 [builtins fixtures/async_await.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-async.pyi] [out] [case testAsyncGenDisallowUntypedTriggers] @@ -697,7 +697,7 @@ async def h() -> Any: async def g(): # E: Function is missing a return type annotation yield 0 [builtins fixtures/async_await.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-async.pyi] [out] [case testAsyncOverloadedFunction] @@ -712,7 +712,7 @@ async def f(x): reveal_type(f) # N: Revealed type is 'Overload(def (x: builtins.int) -> typing.Coroutine[Any, Any, builtins.int], def (x: builtins.str) -> typing.Coroutine[Any, Any, builtins.str])' [builtins fixtures/async_await.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-async.pyi] [case testAsyncForwardRefInBody] @@ -730,4 +730,4 @@ async def g() -> None: reveal_type(f) # N: Revealed type is 'def () -> typing.Coroutine[Any, Any, None]' reveal_type(g) # N: Revealed type is 'Any' [builtins fixtures/async_await.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-async.pyi] diff --git a/test-data/unit/check-class-namedtuple.test b/test-data/unit/check-class-namedtuple.test index d9e4061e6aac..c0146aedf245 100644 --- a/test-data/unit/check-class-namedtuple.test +++ b/test-data/unit/check-class-namedtuple.test @@ -518,7 +518,7 @@ reveal_type(XMeth(1).asyncdouble()) # N: Revealed type is 'typing.Coroutine[Any reveal_type(XMeth(42).x) # N: Revealed type is 'builtins.int' reveal_type(XRepr(42).__str__()) # N: Revealed type is 'builtins.str' reveal_type(XRepr(1, 2).__add__(XRepr(3))) # N: Revealed type is 'builtins.int' -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-async.pyi] [case testNewNamedTupleOverloading] from typing import NamedTuple, overload diff --git a/test-data/unit/check-classes.test b/test-data/unit/check-classes.test index 6681d30df76a..333ffd7b7ccd 100644 --- a/test-data/unit/check-classes.test +++ b/test-data/unit/check-classes.test @@ -1075,7 +1075,7 @@ class A: def g() -> None: "" + 1 # E: Unsupported operand types for + ("str" and "int") "" + 1 # E: Unsupported operand types for + ("str" and "int") -[out] +[builtins fixtures/primitives.pyi] -- Static methods -- -------------- @@ -1683,7 +1683,7 @@ b = None # type: B if int(): b = a # E: Incompatible types in assignment (expression has type "A", variable has type "B") a = b -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-medium.pyi] [case testDucktypeTransitivityDecorator] from typing import _promote @@ -1697,7 +1697,7 @@ c = None # type: C if int(): c = a # E: Incompatible types in assignment (expression has type "A", variable has type "C") a = c -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-medium.pyi] -- Hard coded type promotions @@ -4433,7 +4433,7 @@ def parse_ast(name_dict: NameDict) -> None: pass reveal_type(name_dict['test']) # N: Revealed type is 'Tuple[builtins.bool, fallback=__main__.NameInfo]' [builtins fixtures/isinstancelist.pyi] -[out] +[typing fixtures/typing-medium.pyi] [case testCrashInForwardRefToTypedDictWithIsinstance] from mypy_extensions import TypedDict @@ -4448,7 +4448,7 @@ def parse_ast(name_dict: NameDict) -> None: pass reveal_type(name_dict['']['ast']) # N: Revealed type is 'builtins.bool' [builtins fixtures/isinstancelist.pyi] -[out] +[typing fixtures/typing-medium.pyi] [case testCorrectIsinstanceInForwardRefToNewType] from typing import Dict, NewType @@ -4467,7 +4467,7 @@ def parse_ast(name_dict: NameDict) -> None: x = NameInfo(Base()) # OK x = Base() # E: Incompatible types in assignment (expression has type "Base", variable has type "NameInfo") [builtins fixtures/isinstancelist.pyi] -[out] +[typing fixtures/typing-medium.pyi] [case testNoCrashForwardRefToBrokenDoubleNewType] from typing import Any, Dict, List, NewType @@ -6596,3 +6596,4 @@ reveal_type(D() + "str") # N: Revealed type is 'Any' reveal_type(0.5 + D1()) # N: Revealed type is 'Any' reveal_type(D1() + 0.5) # N: Revealed type is '__main__.D1' +[builtins fixtures/primitives.pyi] diff --git a/test-data/unit/check-columns.test b/test-data/unit/check-columns.test index aeb6777d8bd2..4bc70457ac29 100644 --- a/test-data/unit/check-columns.test +++ b/test-data/unit/check-columns.test @@ -80,6 +80,7 @@ x = 15 y = 'hello' if int(): x = 2; y = x; y += 1 +[builtins fixtures/primitives.pyi] [out] main:4:16: error: Incompatible types in assignment (expression has type "int", variable has type "str") main:4:24: error: Unsupported operand types for + ("str" and "int") @@ -246,7 +247,7 @@ t: D = {'x': if int(): del t['y'] # E:5: TypedDict "D" has no key 'y' [builtins fixtures/dict.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-typeddict.pyi] [case testColumnSignatureIncompatibleWithSuperType] class A: diff --git a/test-data/unit/check-dataclasses.test b/test-data/unit/check-dataclasses.test index ed26afed5e6c..783a142339ba 100644 --- a/test-data/unit/check-dataclasses.test +++ b/test-data/unit/check-dataclasses.test @@ -15,7 +15,7 @@ Person('John', 32) Person('Jonh', 21, None) # E: Too many arguments for "Person" [builtins fixtures/list.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-medium.pyi] [case testDataclassesCustomInit] # flags: --python-version 3.6 @@ -53,7 +53,7 @@ Person(32, 'John') Person(21, 'Jonh', None) # E: Too many arguments for "Person" [builtins fixtures/list.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-medium.pyi] [case testDataclassesDeepInheritance] # flags: --python-version 3.6 diff --git a/test-data/unit/check-default-plugin.test b/test-data/unit/check-default-plugin.test index e331ff73a8f1..e479d6b58823 100644 --- a/test-data/unit/check-default-plugin.test +++ b/test-data/unit/check-default-plugin.test @@ -21,7 +21,7 @@ with yield_id(1) as x: f = yield_id def g(x, y): pass f = g # E: Incompatible types in assignment (expression has type "Callable[[Any, Any], Any]", variable has type "Callable[[T], GeneratorContextManager[T]]") -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-medium.pyi] [case testContextManagerWithUnspecifiedArguments] from contextlib import contextmanager @@ -30,4 +30,4 @@ from typing import Callable, Iterator c: Callable[..., Iterator[int]] reveal_type(c) # N: Revealed type is 'def (*Any, **Any) -> typing.Iterator[builtins.int]' reveal_type(contextmanager(c)) # N: Revealed type is 'def (*Any, **Any) -> contextlib.GeneratorContextManager[builtins.int*]' -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-medium.pyi] diff --git a/test-data/unit/check-enum.test b/test-data/unit/check-enum.test index 27979af0ee21..241cd1ca049c 100644 --- a/test-data/unit/check-enum.test +++ b/test-data/unit/check-enum.test @@ -336,7 +336,7 @@ U = Enum('U', *['a']) V = Enum('U', **{'a': 1}) W = Enum('W', 'a b') W.c -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-medium.pyi] [out] main:2: error: Too few arguments for Enum() main:3: error: Enum() expects a string, tuple, list or dict literal as the second argument diff --git a/test-data/unit/check-errorcodes.test b/test-data/unit/check-errorcodes.test index 4dfb47fcc51e..89e777004551 100644 --- a/test-data/unit/check-errorcodes.test +++ b/test-data/unit/check-errorcodes.test @@ -362,7 +362,7 @@ async def asyncf(): # E: Function is missing a return type annotation [no-unty async def asyncf2(x: int): # E: Function is missing a return type annotation [no-untyped-def] return 0 -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-async.pyi] [case testErrorCodeCallUntypedFunction] # flags: --disallow-untyped-calls @@ -615,7 +615,7 @@ def g() -> int: '{}'.format(b'abc') # E: On Python 3 '{}'.format(b'abc') produces "b'abc'"; use !r if this is a desired behavior [str-bytes-safe] '%s' % b'abc' # E: On Python 3 '%s' % b'abc' produces "b'abc'"; use %r if this is a desired behavior [str-bytes-safe] [builtins fixtures/primitives.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-medium.pyi] [case testErrorCodeIgnoreNamedDefinedNote] x: List[int] # type: ignore[name-defined] diff --git a/test-data/unit/check-expressions.test b/test-data/unit/check-expressions.test index ff8d7cdd4fe8..bb4511228798 100644 --- a/test-data/unit/check-expressions.test +++ b/test-data/unit/check-expressions.test @@ -791,7 +791,7 @@ divmod('foo', f) # E: Unsupported operand types for divmod ("str" and "float") divmod('foo', d) # E: Unsupported operand types for divmod ("str" and "Decimal") [builtins fixtures/divmod.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-medium.pyi] -- Unary operators @@ -1152,7 +1152,7 @@ i, f, s, t = None, None, None, None # type: (int, float, str, Tuple[int]) '%i' % f '%o' % f # E: Incompatible types in string interpolation (expression has type "float", placeholder has type "int") [builtins fixtures/primitives.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-medium.pyi] [case testStringInterpolationSAcceptsAnyType] from typing import Any @@ -1180,7 +1180,7 @@ reveal_type('%(key)s' % {'key': xu}) # N: Revealed type is 'builtins.unicode' reveal_type('%r' % xu) # N: Revealed type is 'builtins.str' reveal_type('%s' % xs) # N: Revealed type is 'builtins.str' [builtins fixtures/primitives.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-medium.pyi] [case testStringInterpolationCount] '%d %d' % 1 # E: Not enough arguments for format string @@ -1191,14 +1191,14 @@ t = 1, 's' '%s %d' % t # E: Incompatible types in string interpolation (expression has type "str", placeholder has type "Union[int, float, SupportsInt]") '%d' % t # E: Not all arguments converted during string formatting [builtins fixtures/primitives.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-medium.pyi] [case testStringInterpolationWithAnyType] from typing import Any a = None # type: Any '%d %d' % a [builtins fixtures/primitives.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-medium.pyi] [case testStringInterpolationInvalidPlaceholder] '%W' % 1 # E: Unsupported format character 'W' @@ -1225,7 +1225,7 @@ b'%a' % 3 '%*f' % (4, 3.14) '%*f' % (1.1, 3.14) # E: * wants int [builtins fixtures/primitives.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-medium.pyi] [case testStringInterpolationPrecision] '%.2f' % 3.14 @@ -1233,7 +1233,7 @@ b'%a' % 3 '%.*f' % (4, 3.14) '%.*f' % (1.1, 3.14) # E: * wants int [builtins fixtures/primitives.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-medium.pyi] [case testStringInterpolationWidthAndPrecision] '%4.2f' % 3.14 @@ -1242,7 +1242,7 @@ b'%a' % 3 '%*.*f' % 3.14 # E: Not enough arguments for format string '%*.*f' % (4, 2, 3.14) [builtins fixtures/primitives.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-medium.pyi] [case testStringInterpolationFlagsAndLengthModifiers] '%04hd' % 1 @@ -1250,7 +1250,7 @@ b'%a' % 3 '%+*Ld' % (1, 1) '% .*ld' % (1, 1) [builtins fixtures/primitives.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-medium.pyi] [case testStringInterpolationDoublePercentage] '%% %d' % 1 @@ -1258,7 +1258,7 @@ b'%a' % 3 '%*%' % 1 '%*% %d' % 1 # E: Not enough arguments for format string [builtins fixtures/primitives.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-medium.pyi] [case testStringInterpolationC] '%c' % 1 @@ -1272,7 +1272,7 @@ b'%a' % 3 '%(a)d %(b)s' % {'a': 's', 'b': 1} # E: Incompatible types in string interpolation (expression has type "str", placeholder with key 'a' has type "Union[int, float, SupportsInt]") b'%(x)s' % {b'x': b'data'} [builtins fixtures/primitives.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-medium.pyi] [case testStringInterpolationMappingKeys] '%()d' % {'': 2} @@ -1280,7 +1280,7 @@ b'%(x)s' % {b'x': b'data'} '%(q)d' % {'a': 1, 'b': 2, 'c': 3} # E: Key 'q' not found in mapping '%(a)d %%' % {'a': 1} [builtins fixtures/primitives.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-medium.pyi] [case testStringInterpolationMappingDictTypes] from typing import Any, Dict @@ -1310,7 +1310,7 @@ di = None # type: Dict[int, int] '%(a).1d' % {'a': 1} '%(a)#1.1ld' % {'a': 1} [builtins fixtures/primitives.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-medium.pyi] [case testStringInterpolationFloatPrecision] '%.f' % 1.2 @@ -1318,7 +1318,7 @@ di = None # type: Dict[int, int] '%.f' % 'x' '%.3f' % 'x' [builtins fixtures/primitives.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-medium.pyi] [out] main:3: error: Incompatible types in string interpolation (expression has type "str", placeholder has type "Union[int, float, SupportsFloat]") main:4: error: Incompatible types in string interpolation (expression has type "str", placeholder has type "Union[int, float, SupportsFloat]") @@ -1334,7 +1334,7 @@ foo(b'a', b'b') == b'a:b' [case testStringInterpolationStarArgs] x = (1, 2) "%d%d" % (*x,) -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-medium.pyi] [case testBytePercentInterpolationSupported] b'%s' % (b'xyz',) @@ -1351,7 +1351,7 @@ def f(t: Tuple[int, ...]) -> None: '%d %d' % t '%d %d %d' % t [builtins fixtures/primitives.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-medium.pyi] [case testStringInterpolationUnionType] from typing import Tuple, Union @@ -1686,7 +1686,7 @@ class Good: x: Union[float, Good] '{:+f}'.format(x) [builtins fixtures/primitives.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-medium.pyi] [case testFormatCallSpecialCases] '{:08b}'.format(int('3')) @@ -1698,7 +1698,7 @@ class S: '%d' % S() # This is OK however '{:%}'.format(0.001) [builtins fixtures/primitives.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-medium.pyi] -- Lambdas -- ------- @@ -2312,7 +2312,7 @@ d = {**a, **b, 'c': 3} e = {1: 'a', **a} # E: Argument 1 to "update" of "dict" has incompatible type "Dict[str, int]"; expected "Mapping[int, str]" f = {**b} # type: Dict[int, int] # E: List item 0 has incompatible type "Dict[str, int]"; expected "Mapping[int, int]" [builtins fixtures/dict.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-medium.pyi] [case testDictIncompatibleTypeErrorMessage] from typing import Dict, Callable @@ -2429,7 +2429,7 @@ cb: Union[Container[A], Container[B]] # flags: --strict-equality b'abc' in b'abcde' [builtins fixtures/primitives.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-medium.pyi] [case testStrictEqualityBytesSpecialUnion] # flags: --strict-equality @@ -2439,20 +2439,20 @@ x: Union[bytes, str] b'abc' in x x in b'abc' [builtins fixtures/primitives.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-medium.pyi] [case testStrictEqualityByteArraySpecial] # flags: --strict-equality b'abc' in bytearray(b'abcde') bytearray(b'abc') in b'abcde' # OK on Python 3 [builtins fixtures/primitives.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-medium.pyi] [case testBytesVsByteArray_python2] # flags: --strict-equality --py2 b'hi' in bytearray(b'hi') [builtins_py2 fixtures/python2.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-medium.pyi] [case testStrictEqualityNoPromotePy3] # flags: --strict-equality diff --git a/test-data/unit/check-flags.test b/test-data/unit/check-flags.test index a2db76dd9434..7a2e7af8233f 100644 --- a/test-data/unit/check-flags.test +++ b/test-data/unit/check-flags.test @@ -59,14 +59,14 @@ async def f(): # E: Function is missing a return type annotation \ # N: Use "-> None" if function does not return a value pass [builtins fixtures/async_await.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-medium.pyi] [case testAsyncUnannotatedArgument] # flags: --disallow-untyped-defs async def f(x) -> None: # E: Function is missing a type annotation for one or more arguments pass [builtins fixtures/async_await.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-async.pyi] [case testAsyncUnannotatedReturn] # flags: --disallow-untyped-defs @@ -77,7 +77,7 @@ async def f(x: int): # E: Function is missing a return type annotation async def g(x: int) -> Any: pass [builtins fixtures/async_await.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-async.pyi] [case testDisallowUntypedDefsUntypedDecorator] # flags: --disallow-untyped-decorators diff --git a/test-data/unit/check-functions.test b/test-data/unit/check-functions.test index 7f723fbe31a6..07999e630127 100644 --- a/test-data/unit/check-functions.test +++ b/test-data/unit/check-functions.test @@ -919,7 +919,7 @@ from typing import no_type_check @no_type_check def foo(x: 'bar', y: {'x': 4}) -> 42: 1 + 'x' -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-medium.pyi] [case testNoTypeCheckDecoratorOnMethod2] import typing @@ -931,7 +931,7 @@ def foo(x: 's', y: {'x': 4}) -> 42: @typing.no_type_check def bar() -> None: 1 + 'x' -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-medium.pyi] [case testCallingNoTypeCheckFunction] import typing @@ -942,7 +942,7 @@ def foo(x: {1:2}) -> [1]: foo() foo(1, 'b') -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-medium.pyi] [case testCallingNoTypeCheckFunction2] import typing @@ -953,7 +953,7 @@ def f() -> None: @typing.no_type_check def foo(x: {1:2}) -> [1]: 1 + 'x' -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-medium.pyi] [case testNoTypeCheckDecoratorSemanticError] import typing @@ -961,7 +961,7 @@ import typing @typing.no_type_check def foo(x: {1:2}) -> [1]: x = y -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-medium.pyi] -- Forward references to decorated functions @@ -1345,6 +1345,7 @@ else: x = 1 f(1) f('x') # fail +[builtins fixtures/primitives.pyi] [out] main:5: error: Unsupported operand types for + ("str" and "int") main:10: error: Unsupported operand types for + ("int" and "str") @@ -1550,7 +1551,7 @@ from contextlib import contextmanager @contextmanager def f(): yield -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-medium.pyi] -- Conditional method definition @@ -1605,6 +1606,7 @@ class A: x = 1 A().f(1) A().f('x') # fail +[builtins fixtures/primitives.pyi] [out] main:6: error: Unsupported operand types for + ("str" and "int") main:11: error: Unsupported operand types for + ("int" and "str") diff --git a/test-data/unit/check-incremental.test b/test-data/unit/check-incremental.test index 53fa0d190f0c..6439e32b678b 100644 --- a/test-data/unit/check-incremental.test +++ b/test-data/unit/check-incremental.test @@ -4086,7 +4086,7 @@ from typing import Iterable from a import Point p: Point it: Iterable[int] = p # change -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-medium.pyi] [builtins fixtures/dict.pyi] [out] tmp/b.py:4: error: Incompatible types in assignment (expression has type "Point", variable has type "Iterable[int]") diff --git a/test-data/unit/check-inference.test b/test-data/unit/check-inference.test index ec837570e0bd..d482e90e2fa4 100644 --- a/test-data/unit/check-inference.test +++ b/test-data/unit/check-inference.test @@ -1332,7 +1332,7 @@ if int(): # N: "List" is invariant -- see http://mypy.readthedocs.io/en/latest/common_issues.html#variance \ # N: Consider using "Sequence" instead, which is covariant [builtins fixtures/list.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-medium.pyi] [case testListWithDucktypeCompatibilityAndTransitivity] from typing import List, _promote @@ -1355,7 +1355,7 @@ if int(): # N: "List" is invariant -- see http://mypy.readthedocs.io/en/latest/common_issues.html#variance \ # N: Consider using "Sequence" instead, which is covariant [builtins fixtures/list.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-medium.pyi] -- Inferring type of variable when initialized to an empty collection @@ -2704,6 +2704,7 @@ def bar() -> None: y + '' x + '' # E: Unsupported operand types for + ("int" and "str") y + 0 # E: Unsupported operand types for + ("str" and "int") +[builtins fixtures/primitives.pyi] [case testUnusedTargetNotImport] import d, c, b, a diff --git a/test-data/unit/check-isinstance.test b/test-data/unit/check-isinstance.test index 660f1529a379..9c78bb382cc8 100644 --- a/test-data/unit/check-isinstance.test +++ b/test-data/unit/check-isinstance.test @@ -2020,7 +2020,7 @@ if y not in z: reveal_type(y) # N: Revealed type is 'Union[builtins.int, None]' else: reveal_type(y) # N: Revealed type is 'Union[builtins.int, None]' -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-medium.pyi] [builtins fixtures/list.pyi] [out] @@ -2077,7 +2077,7 @@ def f() -> None: if x not in td: return reveal_type(x) # N: Revealed type is 'builtins.str' -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-typeddict.pyi] [builtins fixtures/dict.pyi] [out] diff --git a/test-data/unit/check-literal.test b/test-data/unit/check-literal.test index 429057a02f43..96a08f344c45 100644 --- a/test-data/unit/check-literal.test +++ b/test-data/unit/check-literal.test @@ -44,7 +44,7 @@ x = 43 # E: Incompatible types in assignment (expression has type "Literal[43]" y: Literal[43] y = 43 -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-medium.pyi] [case testLiteralParsingPython2] # flags: --python-version 2.7 @@ -1720,7 +1720,7 @@ a *= b # E: Incompatible types in assignment (expression has ty b *= a reveal_type(b) # N: Revealed type is 'builtins.int' -[out] +[builtins fixtures/primitives.pyi] [case testLiteralFallbackInheritedMethodsWorkCorrectly] from typing_extensions import Literal @@ -2185,7 +2185,7 @@ del d[a_key] # E: Key 'a' of TypedDict "Outer" cannot be delete del d[b_key] del d[c_key] # E: TypedDict "Outer" has no key 'c' [builtins fixtures/dict.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-typeddict.pyi] [out] [case testLiteralIntelligentIndexingUsingFinal] @@ -2224,7 +2224,7 @@ b[int_key_bad] # E: Tuple index out of range c[str_key_bad] # E: TypedDict "MyDict" has no key 'missing' c.get(str_key_bad, u) # E: TypedDict "MyDict" has no key 'missing' [builtins fixtures/dict.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-typeddict.pyi] [out] [case testLiteralIntelligentIndexingTupleUnions] @@ -2306,7 +2306,7 @@ del test[good_keys] # E: Key 'a' of TypedDict "Test" cannot be delet del test[bad_keys] # E: Key 'a' of TypedDict "Test" cannot be deleted \ # E: TypedDict "Test" has no key 'bad' [builtins fixtures/dict.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-typeddict.pyi] [out] [case testLiteralIntelligentIndexingTypedDictPython2-skip] @@ -2355,7 +2355,7 @@ from mypy_extensions import TypedDict UnicodeDict = TypedDict(b'UnicodeDict', {'key': int}) [builtins fixtures/dict.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-medium.pyi] [case testLiteralIntelligentIndexingMultiTypedDict] from typing import Union @@ -2393,7 +2393,7 @@ reveal_type(x.get(good_keys)) # N: Revealed type is 'Union[__main__.B, __m reveal_type(x.get(good_keys, 3)) # N: Revealed type is 'Union[__main__.B, Literal[3]?, __main__.C]' [builtins fixtures/dict.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-typeddict.pyi] -- -- Interactions with 'Final' @@ -2866,7 +2866,7 @@ expect_2(final_dict["foo"]) # E: Argument 1 to "expect_2" has incompatible type expect_2(final_set_1.pop()) # E: Argument 1 to "expect_2" has incompatible type "int"; expected "Literal[2]" expect_2(final_set_2.pop()) # E: Argument 1 to "expect_2" has incompatible type "int"; expected "Literal[2]" [builtins fixtures/isinstancelist.pyi] -[out] +[typing fixtures/typing-medium.pyi] -- -- Tests for Literals and enums diff --git a/test-data/unit/check-modules.test b/test-data/unit/check-modules.test index 526b7b72b298..bb14c1c007ba 100644 --- a/test-data/unit/check-modules.test +++ b/test-data/unit/check-modules.test @@ -1500,6 +1500,7 @@ if TYPE_CHECKING: def part4_thing(a: int) -> str: pass [builtins fixtures/bool.pyi] +[typing fixtures/typing-medium.pyi] [out] tmp/part3.py:2: note: Revealed type is 'def (a: builtins.int) -> builtins.str' diff --git a/test-data/unit/check-newsemanal.test b/test-data/unit/check-newsemanal.test index 1d7723deed21..a51ef3d4d00c 100644 --- a/test-data/unit/check-newsemanal.test +++ b/test-data/unit/check-newsemanal.test @@ -2235,6 +2235,7 @@ class Box: def __init__(self, value: int) -> None: ... [builtins fixtures/classmethod.pyi] +[typing fixtures/typing-medium.pyi] [case testNewAnalyzerCastForward1] from typing import cast diff --git a/test-data/unit/check-optional.test b/test-data/unit/check-optional.test index d1993fdc4ae6..4b18cb59d1a7 100644 --- a/test-data/unit/check-optional.test +++ b/test-data/unit/check-optional.test @@ -409,6 +409,8 @@ from typing import Optional x = None # type: Optional[int] x + 1 +[builtins fixtures/primitives.pyi] + [case testOptionalWhitelistPermitsOtherErrors] # flags: --strict-optional-whitelist import a @@ -423,6 +425,7 @@ from typing import Optional x = None # type: Optional[int] x + 1 1 + "foo" +[builtins fixtures/primitives.pyi] [out] tmp/b.py:4: error: Unsupported operand types for + ("int" and "str") @@ -439,6 +442,7 @@ x + "foo" from typing import Optional x = None # type: Optional[int] x + 1 +[builtins fixtures/primitives.pyi] [out] tmp/a.py:3: error: Unsupported left operand type for + ("None") tmp/a.py:3: note: Left operand is of type "Optional[str]" diff --git a/test-data/unit/check-overloading.test b/test-data/unit/check-overloading.test index 0351111c8fa0..ae2ef0c07bd3 100644 --- a/test-data/unit/check-overloading.test +++ b/test-data/unit/check-overloading.test @@ -1082,7 +1082,7 @@ def f(n: A) -> A: f(B()) + 'x' # E: Unsupported left operand type for + ("B") f(A()) + 'x' # E: Unsupported left operand type for + ("A") -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-medium.pyi] [case testOverloadingAndIntFloatSubtyping] from foo import * @@ -3538,7 +3538,7 @@ reveal_type(mymap(f2, seq)) # N: Revealed type is 'typing.Iterable[builtins.int reveal_type(mymap(f3, seq)) # N: Revealed type is 'typing.Iterable[builtins.str*]' [builtins fixtures/list.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-medium.pyi] [case testOverloadsNoneAndTypeVarsWithStrictOptional] # flags: --strict-optional @@ -3563,7 +3563,7 @@ reveal_type(mymap(f2, seq)) # N: Revealed type is 'typing.Iterable[builtins.int reveal_type(mymap(f3, seq)) # N: Revealed type is 'Union[typing.Iterable[builtins.str*], typing.Iterable[builtins.int*]]' [builtins fixtures/list.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-medium.pyi] [case testOverloadsAndNoReturnNarrowTypeNoStrictOptional1] # flags: --no-strict-optional @@ -3595,7 +3595,7 @@ def test_narrow_int() -> None: # TODO: maybe we should make mypy report a warning instead? [builtins fixtures/isinstance.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-medium.pyi] [case testOverloadsAndNoReturnNarrowTypeWithStrictOptional1] # flags: --strict-optional @@ -3627,7 +3627,7 @@ def test_narrow_int() -> None: # TODO: maybe we should make mypy report a warning instead? [builtins fixtures/isinstance.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-medium.pyi] [case testOverloadsAndNoReturnNarrowTypeNoStrictOptional2] # flags: --no-strict-optional @@ -3659,7 +3659,7 @@ def test_narrow_none() -> None: reveal_type(c) # Note: branch is now dead, so no type is revealed [builtins fixtures/isinstance.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-medium.pyi] [case testOverloadsAndNoReturnNarrowTypeWithStrictOptional2] # flags: --strict-optional @@ -3691,7 +3691,7 @@ def test_narrow_none() -> None: reveal_type(c) # Branch is now dead [builtins fixtures/isinstance.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-medium.pyi] [case testOverloadsAndNoReturnNarrowTypeNoStrictOptional3] @@ -3723,7 +3723,7 @@ def test_narrow_none_v2() -> None: reveal_type(c) # Note: branch is now dead, so no type is revealed [builtins fixtures/isinstance.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-medium.pyi] [case testOverloadsAndNoReturnNarrowTypeWithStrictOptional3] # flags: --strict-optional @@ -3754,7 +3754,7 @@ def test_narrow_none_v2() -> None: reveal_type(c) # Note: branch is now dead, so no type is revealed [builtins fixtures/isinstance.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-medium.pyi] [case testOverloadsAndNoReturnNarrowWhenBlacklistingSubtype] from typing import TypeVar, NoReturn, Union, overload @@ -3784,7 +3784,7 @@ def test() -> None: reveal_type(val2) # Branch now dead [builtins fixtures/isinstance.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-medium.pyi] [case testOverloadsAndNoReturnNarrowWhenBlacklistingSubtype2] from typing import TypeVar, NoReturn, Union, overload @@ -3812,7 +3812,7 @@ def test_v2(val: Union[A, B], val2: A) -> None: reveal_type(val2) # Branch now dead [builtins fixtures/isinstance.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-medium.pyi] [case testOverloadWithNonGenericDescriptor] from typing import overload, Any, Optional, Union @@ -3840,7 +3840,7 @@ reveal_type(MyModel.my_number) # N: Revealed type is '__main__.NumberAttr reveal_type(MyModel.my_number.foo()) # N: Revealed type is 'builtins.str' [builtins fixtures/isinstance.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-medium.pyi] [case testOverloadWithNonGenericDescriptorLookalike] from typing import overload, Any, Optional, Union @@ -3883,7 +3883,7 @@ reveal_type(NumberAttribute[MyModel]().__get__(None, MyModel)) # N: Revealed ty reveal_type(NumberAttribute[str]().__get__(None, str)) # N: Revealed type is '__main__.NumberAttribute[builtins.str*]' [builtins fixtures/isinstance.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-medium.pyi] [case testOverloadWithGenericDescriptorLookalike] from typing import overload, Any, Optional, TypeVar, Type, Union, Generic @@ -4685,7 +4685,7 @@ def g(x: int) -> str: ... def g(x: str) -> int: ... [builtins fixtures/list.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-medium.pyi] [out] [case testNestedOverloadsNoCrash] @@ -4860,7 +4860,7 @@ def g(x: Union[int, float]) -> Union[List[int], List[float]]: return floats [builtins fixtures/isinstancelist.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-medium.pyi] [case testOverloadsTypesAndUnions] from typing import overload, Type, Union diff --git a/test-data/unit/check-protocols.test b/test-data/unit/check-protocols.test index c0dae9412911..02e3b8d4c869 100644 --- a/test-data/unit/check-protocols.test +++ b/test-data/unit/check-protocols.test @@ -521,7 +521,7 @@ reveal_type(close(arg)) # N: Revealed type is 'builtins.int*' reveal_type(close_all([F()])) # N: Revealed type is 'builtins.int*' reveal_type(close_all([arg])) # N: Revealed type is 'builtins.int*' [builtins fixtures/isinstancelist.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-medium.pyi] [case testProtocolGenericInference2] from typing import Generic, TypeVar, Protocol @@ -786,7 +786,7 @@ t = D[int]() # OK if int(): t = C() # E: Incompatible types in assignment (expression has type "C", variable has type "Traversable") [builtins fixtures/list.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-medium.pyi] [case testRecursiveProtocols2] from typing import Protocol, TypeVar @@ -843,7 +843,7 @@ if int(): t = B() # E: Incompatible types in assignment (expression has type "B", variable has type "P1") t = C() # E: Incompatible types in assignment (expression has type "C", variable has type "P1") [builtins fixtures/list.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-medium.pyi] [case testMutuallyRecursiveProtocolsTypesWithSubteMismatch] from typing import Protocol, Sequence, List @@ -1776,7 +1776,7 @@ bar((1, 2)) bar(1) # E: Argument 1 to "bar" has incompatible type "int"; expected "Sized" [builtins fixtures/isinstancelist.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-medium.pyi] [case testBasicSupportsIntProtocol] from typing import SupportsInt @@ -1792,7 +1792,7 @@ foo(Bar()) foo('no way') # E: Argument 1 to "foo" has incompatible type "str"; expected "SupportsInt" [builtins fixtures/isinstancelist.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-medium.pyi] -- Additional tests and corner cases for protocols -- ---------------------------------------------- diff --git a/test-data/unit/check-python38.test b/test-data/unit/check-python38.test index e1507d9a2ed4..fd0e46ebd1f4 100644 --- a/test-data/unit/check-python38.test +++ b/test-data/unit/check-python38.test @@ -94,6 +94,7 @@ def g(x: int): ... + # type: ignore # E: unused 'type: ignore' comment 0 # type: ignore ) # type: ignore # E: unused 'type: ignore' comment +[builtins fixtures/primitives.pyi] [case testIgnoreScopeUnused2] # flags: --warn-unused-ignores diff --git a/test-data/unit/check-serialize.test b/test-data/unit/check-serialize.test index 6f67d222fcf5..7c747534128d 100644 --- a/test-data/unit/check-serialize.test +++ b/test-data/unit/check-serialize.test @@ -378,7 +378,7 @@ class A(metaclass=ABCMeta): def f(self) -> None: pass @abstractproperty def x(self) -> int: return 0 -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-medium.pyi] [out2] tmp/a.py:2: error: Cannot instantiate abstract class 'A' with abstract attributes 'f' and 'x' tmp/a.py:9: error: Property "x" defined in "A" is read-only diff --git a/test-data/unit/check-statements.test b/test-data/unit/check-statements.test index b8c8022c6734..82d1b45e19a3 100644 --- a/test-data/unit/check-statements.test +++ b/test-data/unit/check-statements.test @@ -1751,7 +1751,7 @@ with cm as g: N = TypedDict('N', {'x': int}) [builtins fixtures/dict.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-medium.pyi] [out] [case testForwardRefsInWithStatement] @@ -1764,7 +1764,7 @@ with cm as g: # type: N N = TypedDict('N', {'x': int}) [builtins fixtures/dict.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-medium.pyi] [out] [case testGlobalWithoutInitialization] diff --git a/test-data/unit/check-tuples.test b/test-data/unit/check-tuples.test index 4058bd509535..646fee10d5cf 100644 --- a/test-data/unit/check-tuples.test +++ b/test-data/unit/check-tuples.test @@ -965,7 +965,7 @@ a = () from typing import Sized a = None # type: Sized a = () -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-medium.pyi] [case testTupleWithStarExpr1] @@ -1281,6 +1281,6 @@ t5: Tuple[int, int] = (1, 2, "s", 4) # E: Incompatible types in assignment (exp # long initializer assignment with mismatched pairs t6: Tuple[int, int, int, int, int, int, int, int, int, int, int, int] = (1, 2, 3, 4, 5, 6, 7, 8, "str", "str", "str", "str", 1, 1, 1, 1, 1) \ - # E: Incompatible types in assignment (expression has type Tuple[int, int, ... <15 more items>], variable has type Tuple[int, int, ... <10 more items>]) + # E: Incompatible types in assignment (expression has type Tuple[int, int, ... <15 more items>], variable has type Tuple[int, int, ... <10 more items>]) [builtins fixtures/tuple.pyi] diff --git a/test-data/unit/check-typeddict.test b/test-data/unit/check-typeddict.test index 96eeb24c9357..e87d20cf61a9 100644 --- a/test-data/unit/check-typeddict.test +++ b/test-data/unit/check-typeddict.test @@ -8,7 +8,7 @@ reveal_type(p) # N: Revealed type is 'TypedDict('__main__.Point', {'x': builtin # Use values() to check fallback value type. reveal_type(p.values()) # N: Revealed type is 'typing.Iterable[builtins.object*]' [builtins fixtures/dict.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-typeddict.pyi] [targets sys, __main__] [case testCanCreateTypedDictInstanceWithDictCall] @@ -19,7 +19,7 @@ reveal_type(p) # N: Revealed type is 'TypedDict('__main__.Point', {'x': builtin # Use values() to check fallback value type. reveal_type(p.values()) # N: Revealed type is 'typing.Iterable[builtins.object*]' [builtins fixtures/dict.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-typeddict.pyi] [case testCanCreateTypedDictInstanceWithDictLiteral] from mypy_extensions import TypedDict @@ -29,7 +29,7 @@ reveal_type(p) # N: Revealed type is 'TypedDict('__main__.Point', {'x': builtin # Use values() to check fallback value type. reveal_type(p.values()) # N: Revealed type is 'typing.Iterable[builtins.object*]' [builtins fixtures/dict.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-typeddict.pyi] [case testCanCreateTypedDictInstanceWithNoArguments] from typing import TypeVar, Union @@ -39,7 +39,7 @@ p = EmptyDict() reveal_type(p) # N: Revealed type is 'TypedDict('__main__.EmptyDict', {})' reveal_type(p.values()) # N: Revealed type is 'typing.Iterable[builtins.object*]' [builtins fixtures/dict.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-typeddict.pyi] -- Create Instance (Errors) @@ -392,8 +392,7 @@ f(ll) # E: Argument 1 to "f" has incompatible type "List[TypedDict({'x': int, 'z [builtins fixtures/dict.pyi] [case testTypedDictWithSimpleProtocol] -from typing_extensions import Protocol -from mypy_extensions import TypedDict +from typing_extensions import Protocol, TypedDict class StrObjectMap(Protocol): def __getitem__(self, key: str) -> object: ... @@ -411,17 +410,17 @@ fun(a) fun(b) fun2(a) # Error [builtins fixtures/dict.pyi] +[typing fixtures/typing-typeddict.pyi] [out] -main:18: error: Argument 1 to "fun2" has incompatible type "A"; expected "StrIntMap" -main:18: note: Following member(s) of "A" have conflicts: -main:18: note: Expected: -main:18: note: def __getitem__(self, str) -> int -main:18: note: Got: -main:18: note: def __getitem__(self, str) -> object +main:17: error: Argument 1 to "fun2" has incompatible type "A"; expected "StrIntMap" +main:17: note: Following member(s) of "A" have conflicts: +main:17: note: Expected: +main:17: note: def __getitem__(self, str) -> int +main:17: note: Got: +main:17: note: def __getitem__(self, str) -> object [case testTypedDictWithSimpleProtocolInference] -from typing_extensions import Protocol -from mypy_extensions import TypedDict +from typing_extensions import Protocol, TypedDict from typing import TypeVar T_co = TypeVar('T_co', covariant=True) @@ -440,7 +439,7 @@ b: B reveal_type(fun(a)) # N: Revealed type is 'builtins.object*' reveal_type(fun(b)) # N: Revealed type is 'builtins.object*' [builtins fixtures/dict.pyi] -[out] +[typing fixtures/typing-typeddict.pyi] -- Join @@ -455,7 +454,7 @@ reveal_type(p1.values()) # N: Revealed type is 'typing.Iterable[builtins.objec reveal_type(p2.values()) # N: Revealed type is 'typing.Iterable[builtins.object*]' reveal_type(joined_points) # N: Revealed type is 'TypedDict({'x': builtins.int, 'y': builtins.int})' [builtins fixtures/dict.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-typeddict.pyi] [case testJoinOfTypedDictRemovesNonequivalentKeys] from mypy_extensions import TypedDict @@ -504,7 +503,7 @@ joined2 = [right, left] reveal_type(joined1) # N: Revealed type is 'builtins.list[typing.Sized*]' reveal_type(joined2) # N: Revealed type is 'builtins.list[typing.Sized*]' [builtins fixtures/dict.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-typeddict.pyi] [case testJoinOfTypedDictWithIncompatibleTypeIsObject] from mypy_extensions import TypedDict @@ -638,7 +637,7 @@ A = TypedDict('A', {'x': int}) a: A reveal_type(f(a)) # N: Revealed type is 'builtins.str*' [builtins fixtures/dict.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-typeddict.pyi] -- TODO: Figure out some way to trigger the ConstraintBuilderVisitor.visit_typeddict_type() path. @@ -904,7 +903,7 @@ reveal_type(d.get('x', A())) # N: Revealed type is 'Union[builtins.int, __main__ reveal_type(d.get('x', 1)) # N: Revealed type is 'builtins.int' reveal_type(d.get('y', None)) # N: Revealed type is 'Union[builtins.str, None]' [builtins fixtures/dict.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-typeddict.pyi] [case testTypedDictGetMethodTypeContext] # flags: --strict-optional @@ -918,7 +917,7 @@ d.get('x', ['x']) # E: List item 0 has incompatible type "str"; expected "int" a = [''] reveal_type(d.get('x', a)) # N: Revealed type is 'Union[builtins.list[builtins.int], builtins.list[builtins.str*]]' [builtins fixtures/dict.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-typeddict.pyi] [case testTypedDictGetMethodInvalidArgs] from mypy_extensions import TypedDict @@ -938,7 +937,7 @@ s = '' y = d.get(s) reveal_type(y) # N: Revealed type is 'builtins.object*' [builtins fixtures/dict.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-typeddict.pyi] [case testTypedDictMissingMethod] from mypy_extensions import TypedDict @@ -954,7 +953,7 @@ E = TypedDict('E', {'d': D}) p = E(d=D(x=0, y='')) reveal_type(p.get('d', {'x': 1, 'y': ''})) # N: Revealed type is 'TypedDict('__main__.D', {'x': builtins.int, 'y': builtins.str})' [builtins fixtures/dict.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-typeddict.pyi] [case testTypedDictGetDefaultParameterStillTypeChecked] from mypy_extensions import TypedDict @@ -962,7 +961,7 @@ TaggedPoint = TypedDict('TaggedPoint', {'type': str, 'x': int, 'y': int}) p = TaggedPoint(type='2d', x=42, y=1337) p.get('x', 1 + 'y') # E: Unsupported operand types for + ("int" and "str") [builtins fixtures/dict.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-typeddict.pyi] [case testTypedDictChainedGetWithEmptyDictDefault] # flags: --strict-optional @@ -977,7 +976,7 @@ reveal_type(d.get('x', None)) \ reveal_type(d.get('x', {}).get('a')) # N: Revealed type is 'Union[builtins.int, None]' reveal_type(d.get('x', {})['a']) # N: Revealed type is 'builtins.int' [builtins fixtures/dict.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-typeddict.pyi] -- Totality (the "total" keyword argument) @@ -1032,7 +1031,7 @@ reveal_type(d['y']) # N: Revealed type is 'builtins.str' reveal_type(d.get('x')) # N: Revealed type is 'builtins.int' reveal_type(d.get('y')) # N: Revealed type is 'builtins.str' [builtins fixtures/dict.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-typeddict.pyi] [case testTypedDictSubtypingWithTotalFalse] from mypy_extensions import TypedDict @@ -1215,7 +1214,7 @@ a: A reveal_type(f(a)) # N: Revealed type is 'builtins.str' reveal_type(f(1)) # N: Revealed type is 'builtins.int' [builtins fixtures/dict.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-typeddict.pyi] [case testTypedDictOverloading2] from typing import overload, Iterable @@ -1232,7 +1231,7 @@ def f(x): pass a: A f(a) [builtins fixtures/dict.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-typeddict.pyi] [out] main:13: error: Argument 1 to "f" has incompatible type "A"; expected "Iterable[int]" main:13: note: Following member(s) of "A" have conflicts: @@ -1259,7 +1258,7 @@ f(a) # E: No overload variant of "f" matches argument type "A" \ # N: def f(x: str) -> None \ # N: def f(x: int) -> None [builtins fixtures/dict.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-typeddict.pyi] [case testTypedDictOverloading4] from typing import overload @@ -1280,7 +1279,7 @@ reveal_type(f(a)) # N: Revealed type is 'builtins.int' reveal_type(f(1)) # N: Revealed type is 'builtins.str' f(b) # E: Argument 1 to "f" has incompatible type "B"; expected "A" [builtins fixtures/dict.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-typeddict.pyi] [case testTypedDictOverloading5] from typing import overload @@ -1303,7 +1302,7 @@ f(a) f(b) f(c) # E: Argument 1 to "f" has incompatible type "C"; expected "A" [builtins fixtures/dict.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-typeddict.pyi] [case testTypedDictOverloading6] from typing import overload @@ -1323,7 +1322,7 @@ b: B reveal_type(f(a)) # N: Revealed type is 'builtins.int' reveal_type(f(b)) # N: Revealed type is 'builtins.str' [builtins fixtures/dict.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-typeddict.pyi] -- Special cases @@ -1639,7 +1638,7 @@ reveal_type(td['b']) # N: Revealed type is 'Union[builtins.str, builtins.int]' reveal_type(td['c']) # N: Revealed type is 'Union[Any, builtins.int]' \ # E: TypedDict "TDA" has no key 'c' [builtins fixtures/dict.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-typeddict.pyi] [case testPluginUnionsOfTypedDictsNonTotal] from typing import Union @@ -1661,7 +1660,7 @@ reveal_type(td.pop('b')) # N: Revealed type is 'Union[builtins.str, builtins.in reveal_type(td.pop('c')) # E: TypedDict "TDA" has no key 'c' \ # N: Revealed type is 'Union[Any, builtins.int]' [builtins fixtures/dict.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-typeddict.pyi] [case testCanCreateTypedDictWithTypingExtensions] # flags: --python-version 3.6 @@ -1686,7 +1685,7 @@ class Point(TypedDict): p = Point(x=42, y=1337) reveal_type(p) # N: Revealed type is 'TypedDict('__main__.Point', {'x': builtins.int, 'y': builtins.int})' [builtins fixtures/dict.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-typeddict.pyi] [case testTypedDictOptionalUpdate] from typing import Union @@ -1712,7 +1711,7 @@ y: Config x == y [builtins fixtures/dict.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-typeddict.pyi] [case testTypedDictOverlapWithDictNonOverlapping] # mypy: strict-equality @@ -1727,7 +1726,7 @@ y: Config x == y # E: Non-overlapping equality check (left operand type: "Dict[str, str]", right operand type: "Config") [builtins fixtures/dict.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-typeddict.pyi] [case testTypedDictOverlapWithDictNonTotal] # mypy: strict-equality @@ -1742,7 +1741,7 @@ y: Config x == y [builtins fixtures/dict.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-typeddict.pyi] [case testTypedDictOverlapWithDictNonTotalNonOverlapping] # mypy: strict-equality @@ -1757,7 +1756,7 @@ y: Config x == y # E: Non-overlapping equality check (left operand type: "Dict[str, str]", right operand type: "Config") [builtins fixtures/dict.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-typeddict.pyi] [case testTypedDictOverlapWithDictEmpty] # mypy: strict-equality @@ -1770,7 +1769,7 @@ class Config(TypedDict): x: Config x == {} # E: Non-overlapping equality check (left operand type: "Config", right operand type: "Dict[, ]") [builtins fixtures/dict.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-typeddict.pyi] [case testTypedDictOverlapWithDictNonTotalEmpty] # mypy: strict-equality @@ -1783,7 +1782,7 @@ class Config(TypedDict, total=False): x: Config x == {} [builtins fixtures/dict.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-typeddict.pyi] [case testTypedDictOverlapWithDictNonStrKey] # mypy: strict-equality @@ -1797,7 +1796,7 @@ x: Config y: Dict[Union[str, int], str] x == y [builtins fixtures/dict.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-typeddict.pyi] [case testTypedDictOverlapWithDictOverload] from typing import overload, TypedDict, Dict @@ -1813,7 +1812,7 @@ def func(x: Dict[str, str]) -> str: ... def func(x): pass [builtins fixtures/dict.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-typeddict.pyi] [case testTypedDictOverlapWithDictOverloadBad] from typing import overload, TypedDict, Dict @@ -1829,7 +1828,7 @@ def func(x: Dict[str, str]) -> str: ... def func(x): pass [builtins fixtures/dict.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-typeddict.pyi] [case testTypedDictOverlapWithDictOverloadMappingBad] from typing import overload, TypedDict, Mapping @@ -1845,7 +1844,7 @@ def func(x: Mapping[str, str]) -> str: ... def func(x): pass [builtins fixtures/dict.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-typeddict.pyi] [case testTypedDictOverlapWithDictOverloadNonStrKey] from typing import overload, TypedDict, Dict @@ -1861,7 +1860,7 @@ def func(x: Dict[int, str]) -> str: ... def func(x): pass [builtins fixtures/dict.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-typeddict.pyi] [case testTypedDictIsInstance] from typing import TypedDict, Union @@ -1881,7 +1880,7 @@ else: assert isinstance(u2, dict) reveal_type(u2) # N: Revealed type is 'TypedDict('__main__.User', {'id': builtins.int, 'name': builtins.str})' [builtins fixtures/dict.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-typeddict.pyi] [case testTypedDictIsInstanceABCs] from typing import TypedDict, Union, Mapping, Iterable @@ -1923,7 +1922,7 @@ bad2: Literal['bad'] v = {bad2: 2} # E: Extra key 'bad' for TypedDict "Value" [builtins fixtures/dict.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-typeddict.pyi] [case testCannotUseFinalDecoratorWithTypedDict] from typing import TypedDict @@ -1936,7 +1935,7 @@ class DummyTypedDict(TypedDict): str_val: str [builtins fixtures/dict.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-typeddict.pyi] [case testTypedDictDoubleForwardClass] from mypy_extensions import TypedDict @@ -1952,7 +1951,7 @@ foo: Foo reveal_type(foo['bar']) # N: Revealed type is 'builtins.list[Any]' reveal_type(foo['baz']) # N: Revealed type is 'builtins.list[Any]' [builtins fixtures/dict.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-typeddict.pyi] [case testTypedDictDoubleForwardFunc] from mypy_extensions import TypedDict @@ -1966,7 +1965,7 @@ foo: Foo reveal_type(foo['bar']) # N: Revealed type is 'builtins.list[Any]' reveal_type(foo['baz']) # N: Revealed type is 'builtins.list[Any]' [builtins fixtures/dict.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-typeddict.pyi] [case testTypedDictDoubleForwardMixed] from mypy_extensions import TypedDict @@ -1986,7 +1985,7 @@ reveal_type(foo['foo']) # N: Revealed type is 'builtins.int' reveal_type(foo['bar']) # N: Revealed type is 'builtins.list[Any]' reveal_type(foo['baz']) # N: Revealed type is 'builtins.list[Any]' [builtins fixtures/dict.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-typeddict.pyi] [case testAssignTypedDictAsAttribute] from typing import TypedDict @@ -1997,4 +1996,4 @@ class A: reveal_type(A().b) # N: Revealed type is 'Any' [builtins fixtures/dict.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-typeddict.pyi] diff --git a/test-data/unit/check-unions.test b/test-data/unit/check-unions.test index 92e886fee419..4ebc82568cfb 100644 --- a/test-data/unit/check-unions.test +++ b/test-data/unit/check-unions.test @@ -306,6 +306,7 @@ def f(x: Union[int, str, A]): # E: Unsupported operand types for + ("int" and "object") \ # E: Unsupported operand types for + ("str" and "object") \ # N: Left operand is of type "Union[int, str, A]" +[builtins fixtures/primitives.pyi] [case testNarrowingDownNamedTupleUnion] from typing import NamedTuple, Union diff --git a/test-data/unit/check-unreachable-code.test b/test-data/unit/check-unreachable-code.test index 8bc11e3fe5d3..7eb23fe16d33 100644 --- a/test-data/unit/check-unreachable-code.test +++ b/test-data/unit/check-unreachable-code.test @@ -96,6 +96,7 @@ if typing.TYPE_CHECKING: import pow123 # E else: import xyz753 +[typing fixtures/typing-medium.pyi] [out] main:3: error: Cannot find implementation or library stub for module named 'pow123' main:3: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports @@ -106,6 +107,7 @@ if TYPE_CHECKING: import pow123 # E else: import xyz753 +[typing fixtures/typing-medium.pyi] [out] main:3: error: Cannot find implementation or library stub for module named 'pow123' main:3: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports @@ -117,6 +119,7 @@ if not typing.TYPE_CHECKING: else: import xyz753 [builtins fixtures/bool.pyi] +[typing fixtures/typing-medium.pyi] [out] main:5: error: Cannot find implementation or library stub for module named 'xyz753' main:5: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports @@ -618,6 +621,7 @@ if typing.TYPE_CHECKING: reveal_type(x) # N: Revealed type is '__main__.B' [builtins fixtures/isinstancelist.pyi] +[typing fixtures/typing-medium.pyi] [case testUnreachableWhenSuperclassIsAny] # flags: --strict-optional @@ -834,6 +838,7 @@ if FOOBAR: else: reveal_type(x) # N: Revealed type is 'builtins.int' [builtins fixtures/ops.pyi] +[typing fixtures/typing-medium.pyi] [case testUnreachableFlagIgnoresSemanticAnalysisExprUnreachable] # flags: --warn-unreachable --always-false FOOBAR @@ -851,6 +856,7 @@ b = (not FOOBAR) or foo() c = 1 if FOOBAR else 2 d = [x for x in lst if FOOBAR] [builtins fixtures/list.pyi] +[typing fixtures/typing-medium.pyi] [case testUnreachableFlagOkWithDeadStatements] # flags: --warn-unreachable @@ -1026,7 +1032,7 @@ def f_no_suppress_5() -> int: return 3 noop() # E: Statement is unreachable -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-medium.pyi] [case testUnreachableFlagContextManagersSuppressed] # flags: --warn-unreachable @@ -1072,7 +1078,7 @@ def f_mix() -> int: # E: Missing return statement with DoesNotSuppress(), Suppresses1(), DoesNotSuppress(): return 3 noop() -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-medium.pyi] [case testUnreachableFlagContextManagersSuppressedNoStrictOptional] # flags: --warn-unreachable --no-strict-optional @@ -1113,7 +1119,7 @@ def f_suppress() -> int: # E: Missing return statement with Suppresses(): return 3 noop() -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-medium.pyi] [case testUnreachableFlagContextAsyncManagersNoSuppress] # flags: --warn-unreachable --python-version 3.7 @@ -1276,4 +1282,3 @@ async def f_malformed_2() -> int: noop() # E: Statement is unreachable [typing fixtures/typing-full.pyi] - diff --git a/test-data/unit/check-varargs.test b/test-data/unit/check-varargs.test index 3ec4028a842e..f9072a492587 100644 --- a/test-data/unit/check-varargs.test +++ b/test-data/unit/check-varargs.test @@ -707,7 +707,7 @@ g(d) # E: Argument 1 to "g" has incompatible type "Dict[str, int]"; expected "Di h(c) # E: Argument 1 to "h" has incompatible type "Dict[str, float]"; expected "Dict[str, int]" h(d) [builtins fixtures/dict.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-medium.pyi] [case testInvariantListArgNote] from typing import List, Union diff --git a/test-data/unit/deps-expressions.test b/test-data/unit/deps-expressions.test index 127f4388e718..94a44cabbe21 100644 --- a/test-data/unit/deps-expressions.test +++ b/test-data/unit/deps-expressions.test @@ -122,7 +122,7 @@ def f(): pass async def g() -> None: x = await f() [builtins fixtures/async_await.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-async.pyi] [out] -> m.g diff --git a/test-data/unit/diff.test b/test-data/unit/diff.test index 34a5ef263cdd..53bbf55ba2a6 100644 --- a/test-data/unit/diff.test +++ b/test-data/unit/diff.test @@ -475,7 +475,7 @@ def f(x: List[Tuple[int]]) -> Iterator[None]: @contextmanager def g(x: object) -> Iterator[None]: yield -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-medium.pyi] [builtins fixtures/list.pyi] [out] __main__.g diff --git a/test-data/unit/fine-grained-modules.test b/test-data/unit/fine-grained-modules.test index 26dd7e140b53..7c836aa46227 100644 --- a/test-data/unit/fine-grained-modules.test +++ b/test-data/unit/fine-grained-modules.test @@ -316,7 +316,7 @@ x.py:2: error: Argument 1 to "f" has incompatible type "int"; expected "str" 1+'hi' [file p/__init__.py.2] [file p/a.py.3] -'1'+'hi' +1+3 [out] p/a.py:1: error: Unsupported operand types for + ("int" and "str") == diff --git a/test-data/unit/fine-grained-suggest.test b/test-data/unit/fine-grained-suggest.test index 7a115e2f8765..816bd5787a9c 100644 --- a/test-data/unit/fine-grained-suggest.test +++ b/test-data/unit/fine-grained-suggest.test @@ -483,6 +483,7 @@ def test() -> None: bar(starargs) baz(named) quux(default) +[builtins fixtures/primitives.pyi] [out] (Callable[[int, str], int]) -> int (Callable[..., int]) -> int diff --git a/test-data/unit/fine-grained.test b/test-data/unit/fine-grained.test index 2dc598661fd9..fb77f534b075 100644 --- a/test-data/unit/fine-grained.test +++ b/test-data/unit/fine-grained.test @@ -1706,7 +1706,7 @@ T = List[int] from typing import List T = List[int] # yo [builtins fixtures/list.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-medium.pyi] [out] == @@ -1825,7 +1825,7 @@ def f() -> Iterator[None]: [file b.py] [delete b.py.2] [file b.py.3] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-medium.pyi] [builtins fixtures/list.pyi] [triggered] 2: , __main__ @@ -1871,7 +1871,7 @@ def g() -> None: import b b.h(1) pass -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-medium.pyi] [builtins fixtures/list.pyi] [triggered] 2: , , , a.g @@ -1910,7 +1910,7 @@ import b def f(x: List[int]) -> Iterator[None]: x.append(1) yield -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-medium.pyi] [builtins fixtures/list.pyi] [out] == @@ -5275,7 +5275,7 @@ from enum import Enum class C(Enum): X = 0 -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-medium.pyi] [out] == a.py:5: error: "Type[C]" has no attribute "Y" @@ -5371,7 +5371,7 @@ C = Enum('C', 'X Y') from enum import Enum C = Enum('C', 'X') -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-medium.pyi] [out] == a.py:5: error: "Type[C]" has no attribute "Y" @@ -7370,7 +7370,7 @@ async def g() -> str: return '' [builtins fixtures/async_await.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-async.pyi] [out] == main:4: error: Incompatible return value type (got "str", expected "int") @@ -7396,7 +7396,7 @@ class C: return '' [builtins fixtures/async_await.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-async.pyi] [out] == main:4: error: Incompatible return value type (got "str", expected "int") @@ -7431,7 +7431,7 @@ class E: async def __anext__(self) -> object: return 0 [builtins fixtures/async_await.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-async.pyi] [out] == main:6: error: Incompatible return value type (got "str", expected "int") @@ -7470,7 +7470,7 @@ class C: async def __aenter__(self) -> int: pass [builtins fixtures/async_await.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-async.pyi] [out] == main:5: error: Incompatible return value type (got "str", expected "int") @@ -7492,6 +7492,7 @@ class B: [file b.py.2] class B: x: int +[builtins fixtures/primitives.pyi] [out] main:5: error: Unsupported operand types for + ("str" and "int") == @@ -7510,6 +7511,7 @@ class B: [file b.py.2] class B: x: int +[builtins fixtures/primitives.pyi] [out] main:6: error: Unsupported operand types for + ("str" and "int") == @@ -7539,6 +7541,7 @@ from typing import Callable, TypeVar F = TypeVar('F', bound=Callable) def deco(f: F) -> F: pass +[builtins fixtures/primitives.pyi] [out] main:7: error: Unsupported operand types for + ("str" and "int") == @@ -8388,6 +8391,7 @@ x = 2 [file a.py.3] x = 'no way' [builtins fixtures/bool.pyi] +[typing fixtures/typing-medium.pyi] [out] == == @@ -9207,7 +9211,7 @@ class Data(TypedDict): [delete a.py.2] [builtins fixtures/dict.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-typeddict.pyi] [out] == @@ -9239,7 +9243,7 @@ Data = Tuple[User, File] [delete a.py.2] [builtins fixtures/dict.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-typeddict.pyi] [out] == diff --git a/test-data/unit/fixtures/classmethod.pyi b/test-data/unit/fixtures/classmethod.pyi index 5aff9f8f1006..03ad803890a3 100644 --- a/test-data/unit/fixtures/classmethod.pyi +++ b/test-data/unit/fixtures/classmethod.pyi @@ -19,6 +19,7 @@ class int: @classmethod def from_bytes(cls, bytes: bytes, byteorder: str) -> int: pass +class float: pass class str: pass class bytes: pass class bool: pass diff --git a/test-data/unit/fixtures/primitives.pyi b/test-data/unit/fixtures/primitives.pyi index 5e3b7ab32f7a..71f59a9c1d8c 100644 --- a/test-data/unit/fixtures/primitives.pyi +++ b/test-data/unit/fixtures/primitives.pyi @@ -16,6 +16,7 @@ class int: # Note: this is a simplification of the actual signature def __init__(self, x: object = ..., base: int = ...) -> None: pass def __add__(self, i: int) -> int: pass + def __rmul__(self, x: int) -> int: pass class float: def __float__(self) -> float: pass class complex: pass diff --git a/test-data/unit/fixtures/typing-async.pyi b/test-data/unit/fixtures/typing-async.pyi new file mode 100644 index 000000000000..76449c2b51ee --- /dev/null +++ b/test-data/unit/fixtures/typing-async.pyi @@ -0,0 +1,120 @@ +# Test stub for typing module, with features for async/await related tests. +# +# Use [typing fixtures/typing-async.pyi] to use this instead of lib-stub/typing.pyi +# in a particular test case. +# +# Many of the definitions have special handling in the type checker, so they +# can just be initialized to anything. + +from abc import abstractmethod, ABCMeta + +cast = 0 +overload = 0 +Any = 0 +Union = 0 +Optional = 0 +TypeVar = 0 +Generic = 0 +Protocol = 0 +Tuple = 0 +Callable = 0 +NamedTuple = 0 +Type = 0 +ClassVar = 0 +Final = 0 +Literal = 0 +NoReturn = 0 + +T = TypeVar('T') +T_co = TypeVar('T_co', covariant=True) +T_contra = TypeVar('T_contra', contravariant=True) +U = TypeVar('U') +V = TypeVar('V') +S = TypeVar('S') + +# Note: definitions below are different from typeshed, variances are declared +# to silence the protocol variance checks. Maybe it is better to use type: ignore? + +class Container(Protocol[T_co]): + @abstractmethod + # Use int because bool isn't in the default test builtins + def __contains__(self, arg: object) -> int: pass + +class Iterable(Protocol[T_co]): + @abstractmethod + def __iter__(self) -> 'Iterator[T_co]': pass + +class Iterator(Iterable[T_co], Protocol): + @abstractmethod + def __next__(self) -> T_co: pass + +class Generator(Iterator[T], Generic[T, U, V]): + @abstractmethod + def send(self, value: U) -> T: pass + + @abstractmethod + def throw(self, typ: Any, val: Any=None, tb: Any=None) -> None: pass + + @abstractmethod + def close(self) -> None: pass + + @abstractmethod + def __iter__(self) -> 'Generator[T, U, V]': pass + +class AsyncGenerator(AsyncIterator[T], Generic[T, U]): + @abstractmethod + def __anext__(self) -> Awaitable[T]: pass + + @abstractmethod + def asend(self, value: U) -> Awaitable[T]: pass + + @abstractmethod + def athrow(self, typ: Any, val: Any=None, tb: Any=None) -> Awaitable[T]: pass + + @abstractmethod + def aclose(self) -> Awaitable[T]: pass + + @abstractmethod + def __aiter__(self) -> 'AsyncGenerator[T, U]': pass + +class Awaitable(Protocol[T]): + @abstractmethod + def __await__(self) -> Generator[Any, Any, T]: pass + +class AwaitableGenerator(Generator[T, U, V], Awaitable[V], Generic[T, U, V, S], metaclass=ABCMeta): + pass + +class Coroutine(Awaitable[V], Generic[T, U, V]): + @abstractmethod + def send(self, value: U) -> T: pass + + @abstractmethod + def throw(self, typ: Any, val: Any=None, tb: Any=None) -> None: pass + + @abstractmethod + def close(self) -> None: pass + +class AsyncIterable(Protocol[T]): + @abstractmethod + def __aiter__(self) -> 'AsyncIterator[T]': pass + +class AsyncIterator(AsyncIterable[T], Protocol): + def __aiter__(self) -> 'AsyncIterator[T]': return self + @abstractmethod + def __anext__(self) -> Awaitable[T]: pass + +class Sequence(Iterable[T_co], Container[T_co]): + @abstractmethod + def __getitem__(self, n: Any) -> T_co: pass + +class Mapping(Iterable[T], Generic[T, T_co], metaclass=ABCMeta): + def __getitem__(self, key: T) -> T_co: pass + @overload + def get(self, k: T) -> Optional[T_co]: pass + @overload + def get(self, k: T, default: Union[T_co, V]) -> Union[T_co, V]: pass + +class ContextManager(Generic[T]): + def __enter__(self) -> T: pass + # Use Any because not all the precise types are in the fixtures. + def __exit__(self, exc_type: Any, exc_value: Any, traceback: Any) -> Any: pass diff --git a/test-data/unit/fixtures/typing-medium.pyi b/test-data/unit/fixtures/typing-medium.pyi new file mode 100644 index 000000000000..7717a6bf1749 --- /dev/null +++ b/test-data/unit/fixtures/typing-medium.pyi @@ -0,0 +1,69 @@ +# More complete stub for typing module. +# +# Use [typing fixtures/typing-medium.pyi] to use this instead of lib-stub/typing.pyi +# in a particular test case. +# +# Many of the definitions have special handling in the type checker, so they +# can just be initialized to anything. + +cast = 0 +overload = 0 +Any = 0 +Union = 0 +Optional = 0 +TypeVar = 0 +Generic = 0 +Protocol = 0 +Tuple = 0 +Callable = 0 +_promote = 0 +NamedTuple = 0 +Type = 0 +no_type_check = 0 +ClassVar = 0 +Final = 0 +Literal = 0 +TypedDict = 0 +NoReturn = 0 +NewType = 0 + +T = TypeVar('T') +T_co = TypeVar('T_co', covariant=True) +T_contra = TypeVar('T_contra', contravariant=True) +U = TypeVar('U') +V = TypeVar('V') +S = TypeVar('S') + +# Note: definitions below are different from typeshed, variances are declared +# to silence the protocol variance checks. Maybe it is better to use type: ignore? + +class Sized(Protocol): + def __len__(self) -> int: pass + +class Iterable(Protocol[T_co]): + def __iter__(self) -> 'Iterator[T_co]': pass + +class Iterator(Iterable[T_co], Protocol): + def __next__(self) -> T_co: pass + +class Generator(Iterator[T], Generic[T, U, V]): + def __iter__(self) -> 'Generator[T, U, V]': pass + +class Sequence(Iterable[T_co]): + def __getitem__(self, n: Any) -> T_co: pass + +class Mapping(Iterable[T], Generic[T, T_co]): + def __getitem__(self, key: T) -> T_co: pass + +class SupportsInt(Protocol): + def __int__(self) -> int: pass + +class SupportsFloat(Protocol): + def __float__(self) -> float: pass + +class ContextManager(Generic[T]): + def __enter__(self) -> T: pass + # Use Any because not all the precise types are in the fixtures. + def __exit__(self, exc_type: Any, exc_value: Any, traceback: Any) -> Any: pass + +TYPE_CHECKING = 1 diff --git a/test-data/unit/fixtures/typing-typeddict.pyi b/test-data/unit/fixtures/typing-typeddict.pyi new file mode 100644 index 000000000000..f460a7bfd167 --- /dev/null +++ b/test-data/unit/fixtures/typing-typeddict.pyi @@ -0,0 +1,67 @@ +# Test stub for typing module that includes TypedDict related things. +# +# Use [typing fixtures/typing-typeddict.pyi] to use this instead of lib-stub/typing.pyi +# in a particular test case. +# +# Many of the definitions have special handling in the type checker, so they +# can just be initialized to anything. + +from abc import ABCMeta + +cast = 0 +overload = 0 +Any = 0 +Union = 0 +Optional = 0 +TypeVar = 0 +Generic = 0 +Protocol = 0 +Tuple = 0 +Callable = 0 +NamedTuple = 0 +Final = 0 +Literal = 0 +TypedDict = 0 +NoReturn = 0 + +T = TypeVar('T') +T_co = TypeVar('T_co', covariant=True) +V = TypeVar('V') + +# Note: definitions below are different from typeshed, variances are declared +# to silence the protocol variance checks. Maybe it is better to use type: ignore? + +class Sized(Protocol): + def __len__(self) -> int: pass + +class Iterable(Protocol[T_co]): + def __iter__(self) -> 'Iterator[T_co]': pass + +class Iterator(Iterable[T_co], Protocol): + def __next__(self) -> T_co: pass + +class Sequence(Iterable[T_co]): + def __getitem__(self, n: Any) -> T_co: pass + +class Mapping(Iterable[T], Generic[T, T_co], metaclass=ABCMeta): + def __getitem__(self, key: T) -> T_co: pass + @overload + def get(self, k: T) -> Optional[T_co]: pass + @overload + def get(self, k: T, default: Union[T_co, V]) -> Union[T_co, V]: pass + def values(self) -> Iterable[T_co]: pass # Approximate return type + def __len__(self) -> int: ... + def __contains__(self, arg: object) -> int: pass + +# Fallback type for all typed dicts (does not exist at runtime). +class _TypedDict(Mapping[str, object]): + # Needed to make this class non-abstract. It is explicitly declared abstract in + # typeshed, but we don't want to import abc here, as it would slow down the tests. + def __iter__(self) -> Iterator[str]: ... + def copy(self: T) -> T: ... + # Using NoReturn so that only calls using the plugin hook can go through. + def setdefault(self, k: NoReturn, default: object) -> object: ... + # Mypy expects that 'default' has a type variable type. + def pop(self, k: NoReturn, default: T = ...) -> object: ... + def update(self: T, __m: T) -> None: ... + def __delitem__(self, k: NoReturn) -> None: ... diff --git a/test-data/unit/lib-stub/builtins.pyi b/test-data/unit/lib-stub/builtins.pyi index 29365d0efd19..de1266e3a284 100644 --- a/test-data/unit/lib-stub/builtins.pyi +++ b/test-data/unit/lib-stub/builtins.pyi @@ -1,3 +1,7 @@ +# DO NOT ADD TO THIS FILE AS IT WILL SLOW DOWN TESTS! +# +# Use [builtins fixtures/...pyi] if you need more features. + from typing import Generic, TypeVar _T = TypeVar('_T') @@ -10,11 +14,9 @@ class type: # These are provided here for convenience. class int: def __add__(self, other: int) -> int: pass - def __rmul__(self, other: int) -> int: pass class float: pass -class str: - def __add__(self, other: 'str') -> 'str': pass +class str: pass class bytes: pass class tuple(Generic[_T]): pass diff --git a/test-data/unit/lib-stub/typing.pyi b/test-data/unit/lib-stub/typing.pyi index 94324734a647..3d403b1845db 100644 --- a/test-data/unit/lib-stub/typing.pyi +++ b/test-data/unit/lib-stub/typing.pyi @@ -1,5 +1,12 @@ # Stub for typing module. Many of the definitions have special handling in # the type checker, so they can just be initialized to anything. +# +# DO NOT ADD TO THIS FILE UNLESS YOU HAVE A GOOD REASON! Additional definitions +# will slow down tests. +# +# Use [typing fixtures/typing-{medium,full,async,...}.pyi] in a test case for +# a more complete stub for typing. If you need to add things, add to one of +# the stubs under fixtures/. cast = 0 overload = 0 @@ -35,9 +42,6 @@ class Generator(Iterator[T], Generic[T, U, V]): class Sequence(Iterable[T_co]): def __getitem__(self, n: Any) -> T_co: pass -class Mapping(Generic[T, T_co]): - def __getitem__(self, key: T) -> T_co: pass +class Mapping(Generic[T, T_co]): pass def final(meth: T) -> T: pass - -TYPE_CHECKING = 1 diff --git a/test-data/unit/merge.test b/test-data/unit/merge.test index 279d566ceea7..df621cccbe81 100644 --- a/test-data/unit/merge.test +++ b/test-data/unit/merge.test @@ -1148,7 +1148,7 @@ from typing import Iterator, List, Tuple @contextmanager def f(x: List[Tuple[int]]) -> Iterator[None]: yield -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-medium.pyi] [builtins fixtures/list.pyi] [out] __main__: diff --git a/test-data/unit/semanal-types.test b/test-data/unit/semanal-types.test index 8f9b2f651862..359eb292746c 100644 --- a/test-data/unit/semanal-types.test +++ b/test-data/unit/semanal-types.test @@ -1371,7 +1371,7 @@ MypyFile:1( from typing import _promote @_promote(str) class S: pass -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-medium.pyi] [out] MypyFile:1( ImportFrom:1(typing, [_promote]) diff --git a/test-data/unit/typexport-basic.test b/test-data/unit/typexport-basic.test index 8d6e93ac340c..be446e2c80e2 100644 --- a/test-data/unit/typexport-basic.test +++ b/test-data/unit/typexport-basic.test @@ -1154,7 +1154,7 @@ OpExpr(3) : builtins.list[builtins.str] ## IntExpr|OpExpr|StrExpr '%d' % 1 [builtins fixtures/primitives.pyi] -[typing fixtures/typing-full.pyi] +[typing fixtures/typing-medium.pyi] [out] IntExpr(2) : Literal[1]? OpExpr(2) : builtins.str From 25c993be1007a09baac5d95c1d2bfce779055ad3 Mon Sep 17 00:00:00 2001 From: Xuanda Yang Date: Sat, 21 Dec 2019 02:59:24 +0800 Subject: [PATCH 021/117] Fix incorrect name lookup for decorated methods (#8175) Resolves #8161 According to comments of `lookup` in `mypy/semanal.py`, when we look up a class attribute, we require that it is defined textually before the reference statement, thus line number is used for comparison. When function has decorators, its line number is determined by the top decorator instead of the `def`. That's why #8161's code fails because on line 8, the `A` in `Type[A]` has the line number of 8 while the `@staticmethod` function `A` has the line number of 7 due to the decorator. Thus we need to properly handle this by introducing the number of decorators when deciding textural precedence. Also overloads needs special handling to be considered "as a unit". --- mypy/semanal.py | 32 +++++++++++++++++- test-data/unit/check-classes.test | 42 ++++++++++++++++++++++++ test-data/unit/fixtures/staticmethod.pyi | 1 + 3 files changed, 74 insertions(+), 1 deletion(-) diff --git a/mypy/semanal.py b/mypy/semanal.py index 488deb80e21b..0bf18a7b2197 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -3936,11 +3936,41 @@ class C: # caught. assert self.statement # we are at class scope return (node is None - or node.line < self.statement.line + or self.is_textually_before_statement(node) or not self.is_defined_in_current_module(node.fullname) or isinstance(node, TypeInfo) or (isinstance(node, PlaceholderNode) and node.becomes_typeinfo)) + def is_textually_before_statement(self, node: SymbolNode) -> bool: + """Check if a node is defined textually before the current statement + + Note that decorated functions' line number are the same as + the top decorator. + """ + assert self.statement + line_diff = self.statement.line - node.line + + # The first branch handles reference an overloaded function variant inside itself, + # this is a corner case where mypy technically deviates from runtime name resolution, + # but it is fine because we want an overloaded function to be treated as a single unit. + if self.is_overloaded_item(node, self.statement): + return False + elif isinstance(node, Decorator) and not node.is_overload: + return line_diff > len(node.original_decorators) + else: + return line_diff > 0 + + def is_overloaded_item(self, node: SymbolNode, statement: Statement) -> bool: + """Check whehter the function belongs to the overloaded variants""" + if isinstance(node, OverloadedFuncDef) and isinstance(statement, FuncDef): + in_items = statement in {item.func if isinstance(item, Decorator) + else item for item in node.items} + in_impl = (node.impl is not None and + ((isinstance(node.impl, Decorator) and statement is node.impl.func) + or statement is node.impl)) + return in_items or in_impl + return False + def is_defined_in_current_module(self, fullname: Optional[str]) -> bool: if fullname is None: return False diff --git a/test-data/unit/check-classes.test b/test-data/unit/check-classes.test index 333ffd7b7ccd..be765be67bfe 100644 --- a/test-data/unit/check-classes.test +++ b/test-data/unit/check-classes.test @@ -6597,3 +6597,45 @@ reveal_type(D() + "str") # N: Revealed type is 'Any' reveal_type(0.5 + D1()) # N: Revealed type is 'Any' reveal_type(D1() + 0.5) # N: Revealed type is '__main__.D1' [builtins fixtures/primitives.pyi] + +[case testRefMethodWithDecorator] +from typing import Type + +class A: + pass + +class B: + @staticmethod + def A() -> Type[A]: ... + @staticmethod + def B() -> Type[A]: # E: Function "__main__.B.A" is not valid as a type \ + # N: Perhaps you need "Callable[...]" or a callback protocol? + return A + +class C: + @property + @staticmethod + def A() -> Type[A]: + return A + +[builtins fixtures/staticmethod.pyi] + +[case testRefMethodWithOverloadDecorator] +from typing import Type, overload + +class A: + pass + +class B: + @classmethod + @overload + def A(cls, x: int) -> Type[A]: ... + @classmethod + @overload + def A(cls, x: str) -> Type[A]: ... + @classmethod + def A(cls, x: object) -> Type[A]: ... + def B(cls, x: int) -> Type[A]: ... # E: Function "__main__.B.A" is not valid as a type \ + # N: Perhaps you need "Callable[...]" or a callback protocol? + +[builtins fixtures/classmethod.pyi] diff --git a/test-data/unit/fixtures/staticmethod.pyi b/test-data/unit/fixtures/staticmethod.pyi index 14254e64dcb1..7d5d98634e48 100644 --- a/test-data/unit/fixtures/staticmethod.pyi +++ b/test-data/unit/fixtures/staticmethod.pyi @@ -9,6 +9,7 @@ class type: class function: pass staticmethod = object() # Dummy definition. +property = object() # Dummy definition class int: @staticmethod From aaae7e6e013e960102d803f063df1b7f97c42580 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Mon, 23 Dec 2019 12:26:28 +0000 Subject: [PATCH 022/117] Reduce Travis parallelism (#8195) Previously -n12 provided fast builds, but it looks like -n2 is marginally faster now. Total runtime went from 1 hr 48 min to 1 hr 41 min (though with a sample size of 1). --- .travis.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.travis.yml b/.travis.yml index fb9999ceb2d8..b664e8000159 100644 --- a/.travis.yml +++ b/.travis.yml @@ -17,7 +17,7 @@ dist: xenial env: TOXENV=py - EXTRA_ARGS="-n 12" + EXTRA_ARGS="-n 2" TEST_MYPYC=0 PYTHON_DEBUG_BUILD=0 @@ -39,19 +39,19 @@ jobs: - TOXENV=py36 - PYTHONVERSION=3.6.8 - PYTHON_DEBUG_BUILD=1 - - EXTRA_ARGS="-n 12 mypyc/test/test_run.py mypyc/test/test_external.py" + - EXTRA_ARGS="-n 2 mypyc/test/test_run.py mypyc/test/test_external.py" - name: "run mypyc runtime tests with python 3.6 on OS X" os: osx osx_image: xcode8.3 language: generic env: - PYTHONVERSION=3.6.3 - - EXTRA_ARGS="-n 12 mypyc/test/test_run.py mypyc/test/test_external.py" + - EXTRA_ARGS="-n 2 mypyc/test/test_run.py mypyc/test/test_external.py" - name: "run test suite with python 3.7 (compiled with mypyc)" python: 3.7 env: - TOXENV=py - - EXTRA_ARGS="-n 12" + - EXTRA_ARGS="-n 2" - TEST_MYPYC=1 - name: "type check our own code" python: 3.7 From 61da677188b16217e66c0f5c8f5369d47dfdb2a5 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Mon, 23 Dec 2019 14:31:25 +0000 Subject: [PATCH 023/117] Only run Travis tests on Python 3.7 once (compiled) (#8196) Previously we ran them both compiled and interpreted, which seems redundant. --- .travis.yml | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/.travis.yml b/.travis.yml index b664e8000159..753e777d4fab 100644 --- a/.travis.yml +++ b/.travis.yml @@ -29,8 +29,12 @@ jobs: dist: trusty - name: "run test suite with python 3.6" python: 3.6 # 3.6.3 pip 9.0.1 - - name: "run test suite with python 3.7" - python: 3.7 # 3.7.0 pip 10.0.1 + - name: "run test suite with python 3.7 (compiled with mypyc)" + python: 3.7 + env: + - TOXENV=py + - EXTRA_ARGS="-n 2" + - TEST_MYPYC=1 - name: "run test suite with python 3.8" python: 3.8 - name: "run mypyc runtime tests with python 3.6 debug build" @@ -47,12 +51,6 @@ jobs: env: - PYTHONVERSION=3.6.3 - EXTRA_ARGS="-n 2 mypyc/test/test_run.py mypyc/test/test_external.py" - - name: "run test suite with python 3.7 (compiled with mypyc)" - python: 3.7 - env: - - TOXENV=py - - EXTRA_ARGS="-n 2" - - TEST_MYPYC=1 - name: "type check our own code" python: 3.7 env: From f66f11f6cc6e8b07217b7be51582079dec03aa85 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Mon, 23 Dec 2019 14:31:45 +0000 Subject: [PATCH 024/117] Tests: run self check and lint in parallel (#8189) I've seen this speed up `runtests.py` by about 20s when multiple cores are available. --- runtests.py | 80 ++++++++++++++++++++++++++++++++++++++++++----------- 1 file changed, 64 insertions(+), 16 deletions(-) diff --git a/runtests.py b/runtests.py index 8e044e8f985e..6a2fa3639cdf 100755 --- a/runtests.py +++ b/runtests.py @@ -1,9 +1,9 @@ #!/usr/bin/env python3 +import subprocess +from subprocess import Popen from os import system from sys import argv, exit, platform, executable, version_info -prog, *args = argv - # Use the Python provided to execute the script, or fall back to a sane default if version_info >= (3, 5, 0): @@ -63,23 +63,71 @@ assert all(cmd in cmds for cmd in FAST_FAIL) -if not set(args).issubset(cmds): - print("usage:", prog, " ".join('[%s]' % k for k in cmds)) - exit(1) - -if not args: - args = list(cmds) - -status = 0 -for arg in args: - cmd = cmds[arg] - print('run %s: %s' % (arg, cmd)) +def run_cmd(name: str) -> int: + status = 0 + cmd = cmds[name] + print('run %s: %s' % (name, cmd)) res = (system(cmd) & 0x7F00) >> 8 if res: - print('\nFAILED: %s' % arg) + print('\nFAILED: %s' % name) status = res - if arg in FAST_FAIL: + if name in FAST_FAIL: exit(status) + return status + + +def start_background_cmd(name: str) -> Popen: + cmd = cmds[name] + proc = subprocess.Popen(cmd, + shell=True, + stderr=subprocess.STDOUT, + stdout=subprocess.PIPE) + return proc + + +def wait_background_cmd(name: str, proc: Popen) -> int: + output = proc.communicate()[0] + status = proc.returncode + print('run %s: %s' % (name, cmds[name])) + if status: + print(output.decode().rstrip()) + print('\nFAILED: %s' % name) + if name in FAST_FAIL: + exit(status) + return status + + +def main() -> None: + prog, *args = argv + + if not set(args).issubset(cmds): + print("usage:", prog, " ".join('[%s]' % k for k in cmds)) + exit(1) + + if not args: + args = list(cmds) + + status = 0 + + if 'self' in args and 'lint' in args: + # Perform lint and self check in parallel as it's faster. + proc = start_background_cmd('lint') + cmd_status = run_cmd('self') + if cmd_status: + status = cmd_status + cmd_status = wait_background_cmd('lint', proc) + if cmd_status: + status = cmd_status + args = [arg for arg in args if arg not in ('self', 'lint')] + + for arg in args: + cmd_status = run_cmd(arg) + if cmd_status: + status = cmd_status + + exit(status) + -exit(status) +if __name__ == '__main__': + main() From b46f734b255e51d48b1402d8ba540df08f11be40 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Mon, 23 Dec 2019 14:32:35 +0000 Subject: [PATCH 025/117] Tests: correctly mark which tests are slow (#8197) Previously various slow tests were included with fast tests, resulting in non-optimal parallelization. --- runtests.py | 18 ++++++++++++++++-- 1 file changed, 16 insertions(+), 2 deletions(-) diff --git a/runtests.py b/runtests.py index 6a2fa3639cdf..52ec4398fc8f 100755 --- a/runtests.py +++ b/runtests.py @@ -25,6 +25,9 @@ STUBGEN_PY = 'StubgenPythonSuite' MYPYC_RUN = 'TestRun' MYPYC_RUN_MULTI = 'TestRunMultiFile' +MYPYC_EXTERNAL = 'TestExternal' +MYPYC_COMMAND_LINE = 'TestCommandLine' +ERROR_STREAM = 'ErrorStreamSuite' ALL_NON_FAST = [CMDLINE, @@ -36,7 +39,10 @@ STUBGEN_CMD, STUBGEN_PY, MYPYC_RUN, - MYPYC_RUN_MULTI] + MYPYC_RUN_MULTI, + MYPYC_EXTERNAL, + MYPYC_COMMAND_LINE, + ERROR_STREAM] # We split the pytest run into three parts to improve test # parallelization. Each run should have tests that each take a roughly similar @@ -55,7 +61,15 @@ STUBGEN_PY]), # Test cases that may take seconds to run each 'pytest-slow': 'pytest -k "%s"' % ' or '.join( - [SAMPLES, TYPESHED, PEP561, DAEMON, MYPYC_RUN, MYPYC_RUN_MULTI]), + [SAMPLES, + TYPESHED, + PEP561, + DAEMON, + MYPYC_RUN, + MYPYC_RUN_MULTI, + MYPYC_EXTERNAL, + MYPYC_COMMAND_LINE, + ERROR_STREAM]), } # Stop run immediately if these commands fail From 9a7b16bba2aa90c3bb0575bb5cb2ce9d7ac47453 Mon Sep 17 00:00:00 2001 From: Xuanda Yang Date: Mon, 23 Dec 2019 23:31:29 +0800 Subject: [PATCH 026/117] Move cmdline test into check test (#8137) Relates to #7633. * testDisallowAnyUnimported is checked in testDisallowImplicitAnyVariableDefinition from check-flags.test so I removed it from cmdline.test instead of porting it. * most tests are moved from cmdline.test to check-flags.test * two remaining: testDisallowAnyGenericsBuiltinCollections and testDisallowAnyGenericsTypingCollections due to no fixture for fronzenset avaliable --- test-data/unit/check-flags.test | 222 +++++++++++++++++ test-data/unit/cmdline.test | 409 -------------------------------- 2 files changed, 222 insertions(+), 409 deletions(-) diff --git a/test-data/unit/check-flags.test b/test-data/unit/check-flags.test index 7a2e7af8233f..1aa5a020f32d 100644 --- a/test-data/unit/check-flags.test +++ b/test-data/unit/check-flags.test @@ -1232,3 +1232,225 @@ A = List # OK B = List[A] # E:10: Missing type parameters for generic type "A" x: A # E:4: Missing type parameters for generic type "A" [builtins fixtures/list.pyi] + +[case testDisallowAnyExplicitDefSignature] +# flags: --disallow-any-explicit + +from typing import Any, List + +def f(x: Any) -> None: # E: Explicit "Any" is not allowed + pass + +def g() -> Any: # E: Explicit "Any" is not allowed + pass + +def h() -> List[Any]: # E: Explicit "Any" is not allowed + pass +[builtins fixtures/list.pyi] + +[case testDisallowAnyExplicitVarDeclaration] +# flags: --python-version 3.6 --disallow-any-explicit +from typing import Any +v: Any = '' # E: Explicit "Any" is not allowed +w = '' # type: Any # E: Explicit "Any" is not allowed +class X: + y = '' # type: Any # E: Explicit "Any" is not allowed + +[case testDisallowAnyExplicitGenericVarDeclaration] +# flags: --python-version 3.6 --disallow-any-explicit +from typing import Any, List +v: List[Any] = [] # E: Explicit "Any" is not allowed +[builtins fixtures/list.pyi] + +[case testDisallowAnyExplicitInheritance] +# flags: --disallow-any-explicit +from typing import Any, List + +class C(Any): # E: Explicit "Any" is not allowed + pass + +class D(List[Any]): # E: Explicit "Any" is not allowed + pass +[builtins fixtures/list.pyi] + +[case testDisallowAnyExplicitAlias] +# flags: --disallow-any-explicit +from typing import Any, List + +X = Any # E: Explicit "Any" is not allowed +Y = List[Any] # E: Explicit "Any" is not allowed + +def foo(x: X) -> Y: # no error + x.nonexistent() # no error + return x +[builtins fixtures/list.pyi] + +[case testDisallowAnyExplicitGenericAlias] +# flags: --disallow-any-explicit +from typing import Any, TypeVar, Tuple + +T = TypeVar('T') + +TupleAny = Tuple[Any, T] # E: Explicit "Any" is not allowed + +def foo(x: TupleAny[str]) -> None: # no error + pass + +def goo(x: TupleAny[Any]) -> None: # E: Explicit "Any" is not allowed + pass + +[case testDisallowAnyExplicitCast] +# flags: --disallow-any-explicit +from typing import Any, List, cast + +x = 1 +y = cast(Any, x) # E: Explicit "Any" is not allowed +z = cast(List[Any], x) # E: Explicit "Any" is not allowed +[builtins fixtures/list.pyi] + +[case testDisallowAnyExplicitNamedTuple] +# flags: --disallow-any-explicit +from typing import Any, List, NamedTuple + +Point = NamedTuple('Point', [('x', List[Any]), ('y', Any)]) # E: Explicit "Any" is not allowed +[builtins fixtures/list.pyi] + +[case testDisallowAnyExplicitTypeVarConstraint] +# flags: --disallow-any-explicit +from typing import Any, List, TypeVar + +T = TypeVar('T', Any, List[Any]) # E: Explicit "Any" is not allowed +[builtins fixtures/list.pyi] + +[case testDisallowAnyExplicitNewType] +# flags: --disallow-any-explicit +from typing import Any, List, NewType + +# this error does not come from `--disallow-any-explicit` flag +Baz = NewType('Baz', Any) # E: Argument 2 to NewType(...) must be subclassable (got "Any") +Bar = NewType('Bar', List[Any]) # E: Explicit "Any" is not allowed +[builtins fixtures/list.pyi] + +[case testDisallowAnyExplicitTypedDictSimple] +# flags: --disallow-any-explicit +from mypy_extensions import TypedDict +from typing import Any + +M = TypedDict('M', {'x': str, 'y': Any}) # E: Explicit "Any" is not allowed +M(x='x', y=2) # no error +def f(m: M) -> None: pass # no error +[builtins fixtures/dict.pyi] + +[case testDisallowAnyExplicitTypedDictGeneric] +# flags: --disallow-any-explicit +from mypy_extensions import TypedDict +from typing import Any, List + +M = TypedDict('M', {'x': str, 'y': List[Any]}) # E: Explicit "Any" is not allowed +N = TypedDict('N', {'x': str, 'y': List}) # no error +[builtins fixtures/dict.pyi] + +[case testDisallowAnyGenericsTupleNoTypeParams] +# flags: --python-version 3.6 --disallow-any-generics +from typing import Tuple + +def f(s: Tuple) -> None: pass # E: Missing type parameters for generic type "Tuple" +def g(s) -> Tuple: # E: Missing type parameters for generic type "Tuple" + return 'a', 'b' +def h(s) -> Tuple[str, str]: # no error + return 'a', 'b' +x: Tuple = () # E: Missing type parameters for generic type "Tuple" + +[case testDisallowAnyGenericsTupleWithNoTypeParamsGeneric] +# flags: --disallow-any-generics +from typing import Tuple, List + +def f(s: List[Tuple]) -> None: pass # E: Missing type parameters for generic type "Tuple" +def g(s: List[Tuple[str, str]]) -> None: pass # no error +[builtins fixtures/list.pyi] + +[case testDisallowAnyGenericsTypeType] +# flags: --disallow-any-generics +from typing import Type, Any + +def f(s: Type[Any]) -> None: pass # no error +def g(s) -> Type: # E: Missing type parameters for generic type "Type" + return s +def h(s) -> Type[str]: # no error + return s +x: Type = g(0) # E: Missing type parameters for generic type "Type" + +[case testDisallowAnyGenericsAliasGenericType] +# flags: --disallow-any-generics +from typing import List + +L = List # no error + +def f(l: L) -> None: pass # E: Missing type parameters for generic type "L" +def g(l: L[str]) -> None: pass # no error +[builtins fixtures/list.pyi] + +[case testDisallowAnyGenericsGenericAlias] +# flags: --python-version 3.6 --disallow-any-generics +from typing import TypeVar, Tuple + +T = TypeVar('T') +A = Tuple[T, str, T] + +def f(s: A) -> None: pass # E: Missing type parameters for generic type "A" +def g(s) -> A: # E: Missing type parameters for generic type "A" + return 'a', 'b', 1 +def h(s) -> A[str]: # no error + return 'a', 'b', 'c' +x: A = ('a', 'b', 1) # E: Missing type parameters for generic type "A" + +[case testDisallowAnyGenericsPlainList] +# flags: --python-version 3.6 --disallow-any-generics +from typing import List + +def f(l: List) -> None: pass # E: Missing type parameters for generic type "List" +def g(l: List[str]) -> None: pass # no error +def h(l: List[List]) -> None: pass # E: Missing type parameters for generic type "List" +def i(l: List[List[List[List]]]) -> None: pass # E: Missing type parameters for generic type "List" + +x = [] # E: Need type annotation for 'x' (hint: "x: List[] = ...") +y: List = [] # E: Missing type parameters for generic type "List" +[builtins fixtures/list.pyi] + +[case testDisallowAnyGenericsCustomGenericClass] +# flags: --python-version 3.6 --disallow-any-generics +from typing import Generic, TypeVar, Any + +T = TypeVar('T') +class G(Generic[T]): pass + +def f() -> G: # E: Missing type parameters for generic type "G" + return G() + +x: G[Any] = G() # no error +y: G = x # E: Missing type parameters for generic type "G" + +[case testDisallowSubclassingAny] +# flags: --config-file tmp/mypy.ini +import m +import y + +[file m.py] +from typing import Any + +x = None # type: Any + +class ShouldBeFine(x): ... + +[file y.py] +from typing import Any + +x = None # type: Any + +class ShouldNotBeFine(x): ... # E: Class cannot subclass 'x' (has type 'Any') + +[file mypy.ini] +\[mypy] +disallow_subclassing_any = True +\[mypy-m] +disallow_subclassing_any = False diff --git a/test-data/unit/cmdline.test b/test-data/unit/cmdline.test index 3350167bea3c..0e7adc41a29d 100644 --- a/test-data/unit/cmdline.test +++ b/test-data/unit/cmdline.test @@ -597,391 +597,6 @@ reveal_type(a.__pow__(2)) # N: Revealed type is 'builtins.int' reveal_type(a.__pow__(a)) # N: Revealed type is 'Any' a.__pow__() # E: Too few arguments for "__pow__" of "int" -[case testDisallowAnyUnimported] -# cmd: mypy main.py -[file mypy.ini] -\[mypy] -disallow_any_unimported = True -ignore_missing_imports = True -[file main.py] -from unreal import F - -def f(x: F) -> None: pass -[out] -main.py:3: error: Argument 1 to "f" becomes "Any" due to an unfollowed import - -[case testDisallowAnyExplicitDefSignature] -# cmd: mypy m.py -[file mypy.ini] -\[mypy] -\[mypy-m] -disallow_any_explicit = True - -[file m.py] -from typing import Any, List - -def f(x: Any) -> None: - pass - -def g() -> Any: - pass - -def h() -> List[Any]: - pass - -[out] -m.py:3: error: Explicit "Any" is not allowed -m.py:6: error: Explicit "Any" is not allowed -m.py:9: error: Explicit "Any" is not allowed - -[case testDisallowAnyExplicitVarDeclaration] -# cmd: mypy --python-version=3.6 m.py - -[file mypy.ini] -\[mypy] -\[mypy-m] -disallow_any_explicit = True - -[file m.py] -from typing import Any, List -v: Any = '' -w = '' # type: Any -class X: - y = '' # type: Any - -[out] -m.py:2: error: Explicit "Any" is not allowed -m.py:3: error: Explicit "Any" is not allowed -m.py:5: error: Explicit "Any" is not allowed - -[case testDisallowAnyExplicitGenericVarDeclaration] -# cmd: mypy --python-version=3.6 m.py - -[file mypy.ini] -\[mypy] -\[mypy-m] -disallow_any_explicit = True - -[file m.py] -from typing import Any, List -v: List[Any] = [] -[out] -m.py:2: error: Explicit "Any" is not allowed - -[case testDisallowAnyExplicitInheritance] -# cmd: mypy m.py - -[file mypy.ini] -\[mypy] -\[mypy-m] -disallow_any_explicit = True - -[file m.py] -from typing import Any, List - -class C(Any): - pass - -class D(List[Any]): - pass -[out] -m.py:3: error: Explicit "Any" is not allowed -m.py:6: error: Explicit "Any" is not allowed - -[case testDisallowAnyExplicitAlias] -# cmd: mypy m.py - -[file mypy.ini] -\[mypy] -\[mypy-m] -disallow_any_explicit = True - -[file m.py] -from typing import Any, List - -X = Any -Y = List[Any] - -def foo(x: X) -> Y: # no error - x.nonexistent() # no error - return x - -[out] -m.py:3: error: Explicit "Any" is not allowed -m.py:4: error: Explicit "Any" is not allowed - -[case testDisallowAnyExplicitGenericAlias] -# cmd: mypy m.py - -[file mypy.ini] -\[mypy] -\[mypy-m] -disallow_any_explicit = True - -[file m.py] -from typing import Any, List, TypeVar, Tuple - -T = TypeVar('T') - -TupleAny = Tuple[Any, T] # error - -def foo(x: TupleAny[str]) -> None: # no error - pass - -def goo(x: TupleAny[Any]) -> None: # error - pass - -[out] -m.py:5: error: Explicit "Any" is not allowed -m.py:10: error: Explicit "Any" is not allowed - -[case testDisallowAnyExplicitCast] -# cmd: mypy m.py - -[file mypy.ini] -\[mypy] -\[mypy-m] -disallow_any_explicit = True - -[file m.py] -from typing import Any, List, cast - -x = 1 -y = cast(Any, x) -z = cast(List[Any], x) -[out] -m.py:4: error: Explicit "Any" is not allowed -m.py:5: error: Explicit "Any" is not allowed - -[case testDisallowAnyExplicitNamedTuple] -# cmd: mypy m.py - -[file mypy.ini] -\[mypy] -\[mypy-m] -disallow_any_explicit = True - -[file m.py] -from typing import Any, List, NamedTuple - -Point = NamedTuple('Point', [('x', List[Any]), - ('y', Any)]) - -[out] -m.py:3: error: Explicit "Any" is not allowed - -[case testDisallowAnyExplicitTypeVarConstraint] -# cmd: mypy m.py - -[file mypy.ini] -\[mypy] -\[mypy-m] -disallow_any_explicit = True - -[file m.py] -from typing import Any, List, TypeVar - -T = TypeVar('T', Any, List[Any]) -[out] -m.py:3: error: Explicit "Any" is not allowed - -[case testDisallowAnyExplicitNewType] -# cmd: mypy m.py - -[file mypy.ini] -\[mypy] -\[mypy-m] -disallow_any_explicit = True - -[file m.py] -from typing import Any, List, NewType - -Baz = NewType('Baz', Any) # this error does not come from `--disallow-any-explicit` flag -Bar = NewType('Bar', List[Any]) - -[out] -m.py:3: error: Argument 2 to NewType(...) must be subclassable (got "Any") -m.py:4: error: Explicit "Any" is not allowed - -[case testDisallowAnyExplicitTypedDictSimple] -# cmd: mypy m.py - -[file mypy.ini] -\[mypy] -\[mypy-m] -disallow_any_explicit = True - -[file m.py] -from mypy_extensions import TypedDict -from typing import Any - -M = TypedDict('M', {'x': str, 'y': Any}) # error -M(x='x', y=2) # no error -def f(m: M) -> None: pass # no error -[out] -m.py:4: error: Explicit "Any" is not allowed - -[case testDisallowAnyExplicitTypedDictGeneric] -# cmd: mypy m.py - -[file mypy.ini] -\[mypy] -\[mypy-m] -disallow_any_explicit = True - -[file m.py] -from mypy_extensions import TypedDict -from typing import Any, List - -M = TypedDict('M', {'x': str, 'y': List[Any]}) # error -N = TypedDict('N', {'x': str, 'y': List}) # no error -[out] -m.py:4: error: Explicit "Any" is not allowed - -[case testDisallowAnyGenericsTupleNoTypeParams] -# cmd: mypy --python-version=3.6 m.py -[file mypy.ini] -\[mypy] -\[mypy-m] -disallow_any_generics = True - -[file m.py] -from typing import Tuple - -def f(s: Tuple) -> None: pass # error -def g(s) -> Tuple: # error - return 'a', 'b' -def h(s) -> Tuple[str, str]: # no error - return 'a', 'b' -x: Tuple = () # error -[out] -m.py:3: error: Missing type parameters for generic type "Tuple" -m.py:4: error: Missing type parameters for generic type "Tuple" -m.py:8: error: Missing type parameters for generic type "Tuple" - -[case testDisallowAnyGenericsTupleWithNoTypeParamsGeneric] -# cmd: mypy m.py -[file mypy.ini] -\[mypy] -\[mypy-m] -disallow_any_generics = True - -[file m.py] -from typing import Tuple, List - -def f(s: List[Tuple]) -> None: pass # error -def g(s: List[Tuple[str, str]]) -> None: pass # no error -[out] -m.py:3: error: Missing type parameters for generic type "Tuple" - -[case testDisallowAnyGenericsTypeType] -# cmd: mypy --python-version=3.6 m.py -[file mypy.ini] -\[mypy] -\[mypy-m] -disallow_any_generics = True - -[file m.py] -from typing import Type, Any - -def f(s: Type[Any]) -> None: pass # no error -def g(s) -> Type: # error - return s -def h(s) -> Type[str]: # no error - return s -x: Type = g(0) # error -[out] -m.py:4: error: Missing type parameters for generic type "Type" -m.py:8: error: Missing type parameters for generic type "Type" - -[case testDisallowAnyGenericsAliasGenericType] -# cmd: mypy m.py -[file mypy.ini] -\[mypy] -\[mypy-m] -disallow_any_generics = True - -[file m.py] -from typing import List - -L = List # no error - -def f(l: L) -> None: pass # error -def g(l: L[str]) -> None: pass # no error -[out] -m.py:5: error: Missing type parameters for generic type "L" - -[case testDisallowAnyGenericsGenericAlias] -# cmd: mypy --python-version=3.6 m.py -[file mypy.ini] -\[mypy] -\[mypy-m] -disallow_any_generics = True - -[file m.py] -from typing import List, TypeVar, Tuple - -T = TypeVar('T') -A = Tuple[T, str, T] - -def f(s: A) -> None: pass # error -def g(s) -> A: # error - return 'a', 'b', 1 -def h(s) -> A[str]: # no error - return 'a', 'b', 'c' -x: A = ('a', 'b', 1) # error -[out] -m.py:6: error: Missing type parameters for generic type "A" -m.py:7: error: Missing type parameters for generic type "A" -m.py:11: error: Missing type parameters for generic type "A" - -[case testDisallowAnyGenericsPlainList] -# cmd: mypy --python-version=3.6 m.py -[file mypy.ini] -\[mypy] -\[mypy-m] -disallow_any_generics = True - -[file m.py] -from typing import List - -def f(l: List) -> None: pass # error -def g(l: List[str]) -> None: pass # no error -def h(l: List[List]) -> None: pass # error -def i(l: List[List[List[List]]]) -> None: pass # error - -x = [] # error: need type annotation -y: List = [] # error -[out] -m.py:3: error: Missing type parameters for generic type "List" -m.py:5: error: Missing type parameters for generic type "List" -m.py:6: error: Missing type parameters for generic type "List" -m.py:8: error: Need type annotation for 'x' (hint: "x: List[] = ...") -m.py:9: error: Missing type parameters for generic type "List" - -[case testDisallowAnyGenericsCustomGenericClass] -# cmd: mypy --python-version=3.6 m.py -[file mypy.ini] -\[mypy] -\[mypy-m] -disallow_any_generics = True - -[file m.py] -from typing import Generic, TypeVar, Any - -T = TypeVar('T') -class G(Generic[T]): pass - -def f() -> G: # error - return G() - -x: G[Any] = G() # no error -y: G = x # error - -[out] -m.py:6: error: Missing type parameters for generic type "G" -m.py:10: error: Missing type parameters for generic type "G" - [case testDisallowAnyGenericsBuiltinCollections] # cmd: mypy m.py [file mypy.ini] @@ -1026,30 +641,6 @@ m.py:5: error: Missing type parameters for generic type "Dict" m.py:6: error: Missing type parameters for generic type "Set" m.py:7: error: Missing type parameters for generic type "FrozenSet" -[case testDisallowSubclassingAny] -# cmd: mypy m.py y.py -[file mypy.ini] -\[mypy] -disallow_subclassing_any = True -\[mypy-m] -disallow_subclassing_any = False - -[file m.py] -from typing import Any - -x = None # type: Any - -class ShouldBeFine(x): ... - -[file y.py] -from typing import Any - -x = None # type: Any - -class ShouldNotBeFine(x): ... -[out] -y.py:5: error: Class cannot subclass 'x' (has type 'Any') - [case testSectionInheritance] # cmd: mypy a [file a/__init__.py] From 5336f272c2ac625bd40a79ab3dbbe3d548f147b8 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Mon, 23 Dec 2019 16:27:53 +0000 Subject: [PATCH 027/117] Run 3.5.1 tests compiled with mypyc (#8198) Python 3.5.1 tests were the slowest job, and this makes them faster (roughly from 20min to 15min). --- .travis.yml | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 753e777d4fab..9ec81e83fb53 100644 --- a/.travis.yml +++ b/.travis.yml @@ -24,9 +24,13 @@ env: jobs: include: # Specifically request 3.5.1 because we need to be compatible with that. - - name: "run test suite with python 3.5.1" + - name: "run test suite with python 3.5.1 (compiled with mypyc)" python: 3.5.1 dist: trusty + env: + - TOXENV=py + - EXTRA_ARGS="-n 2" + - TEST_MYPYC=1 - name: "run test suite with python 3.6" python: 3.6 # 3.6.3 pip 9.0.1 - name: "run test suite with python 3.7 (compiled with mypyc)" From 9101707bd0c96624d09cb31fe573d7e25c89a35c Mon Sep 17 00:00:00 2001 From: Michael Lee Date: Tue, 24 Dec 2019 20:29:32 -0800 Subject: [PATCH 028/117] Make reachability code understand chained comparisons (v2) (#8148) This pull request is v2 (well, more like v10...) of my attempts to make our reachability code better understand chained comparisons. Unlike https://github.com/python/mypy/pull/7169, this diff focuses exclusively on adding support for chained operation comparisons and deliberately does not attempt to change any of the semantics of how identity and equality operations are performed. Specifically, mypy currently only examines the first two operands within a comparison expression when refining types. That means the following expressions all do not behave as expected: ```python x: MyEnum y: MyEnum if x is y is MyEnum.A: # x and y are not narrowed at all if x is MyEnum.A is y: # Only x is narrowed to Literal[MyEnum.A] ``` This pull request fixes this so we correctly infer the literal type for x and y in both conditionals. Some additional notes: 1. While analyzing our codebase, I found that while comparison expressions involving two or more `is` or `==` operators were somewhat common, there were almost no comparisons involving chains of `!=` or `is not` operators, and no comparisons involving "disjoint chains" -- e.g. expressions like `a == b < c == b` where there are multiple "disjoint" chains of equality comparisons. So, this diff is primarily designed to handle the case where a comparison expression has just one chain of `is` or `==`. For all other cases, I fall back to the more naive strategy of evaluating each comparison individually and and-ing the inferred types together without attempting to propagate any info. 2. I tested this code against one of our internal codebases. This ended up making mypy produce 3 or 4 new errors, but they all seemed legitimate, as far as I can tell. 3. I plan on submitting a follow-up diff that takes advantage of the work done in this diff to complete support for tagged unions using any Literal key, as previously promised. (I tried adding support for tagged unions in this diff, but attempting to simultaneously add support for chained comparisons while overhauling the semantics of `==` proved to be a little too overwhelming for me. So, baby steps.) --- mypy/checker.py | 603 ++++++++++++++++++++++++++--- mypy/nodes.py | 7 + mypy/test/testinfer.py | 239 +++++++++++- test-data/unit/check-enum.test | 150 +++++++ test-data/unit/check-optional.test | 22 ++ 5 files changed, 956 insertions(+), 65 deletions(-) diff --git a/mypy/checker.py b/mypy/checker.py index 8528bf35248d..ae829d1157c1 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -5,8 +5,8 @@ from contextlib import contextmanager from typing import ( - Dict, Set, List, cast, Tuple, TypeVar, Union, Optional, NamedTuple, Iterator, Sequence, - Mapping, + Dict, Set, List, cast, Tuple, TypeVar, Union, Optional, NamedTuple, Iterator, Iterable, + Sequence, Mapping, Generic, AbstractSet ) from typing_extensions import Final @@ -27,7 +27,7 @@ is_final_node, ARG_NAMED) from mypy import nodes -from mypy.literals import literal, literal_hash +from mypy.literals import literal, literal_hash, Key from mypy.typeanal import has_any_from_unimported_type, check_for_explicit_any from mypy.types import ( Type, AnyType, CallableType, FunctionLike, Overloaded, TupleType, TypedDictType, @@ -3842,67 +3842,101 @@ def find_isinstance_check_helper(self, node: Expression) -> Tuple[TypeMap, TypeM vartype = type_map[expr] return self.conditional_callable_type_map(expr, vartype) elif isinstance(node, ComparisonExpr): - operand_types = [coerce_to_literal(type_map[expr]) - for expr in node.operands if expr in type_map] - - is_not = node.operators == ['is not'] - if (is_not or node.operators == ['is']) and len(operand_types) == len(node.operands): - if_vars = {} # type: TypeMap - else_vars = {} # type: TypeMap - - for i, expr in enumerate(node.operands): - var_type = operand_types[i] - other_type = operand_types[1 - i] - - if literal(expr) == LITERAL_TYPE and is_singleton_type(other_type): - # This should only be true at most once: there should be - # exactly two elements in node.operands and if the 'other type' is - # a singleton type, it by definition does not need to be narrowed: - # it already has the most precise type possible so does not need to - # be narrowed/included in the output map. - # - # TODO: Generalize this to handle the case where 'other_type' is - # a union of singleton types. - - if isinstance(other_type, LiteralType) and other_type.is_enum_literal(): - fallback_name = other_type.fallback.type.fullname - var_type = try_expanding_enum_to_union(var_type, fallback_name) - - target_type = [TypeRange(other_type, is_upper_bound=False)] - if_vars, else_vars = conditional_type_map(expr, var_type, target_type) - break + # Step 1: Obtain the types of each operand and whether or not we can + # narrow their types. (For example, we shouldn't try narrowing the + # types of literal string or enum expressions). + + operands = node.operands + operand_types = [] + narrowable_operand_index_to_hash = {} + for i, expr in enumerate(operands): + if expr not in type_map: + return {}, {} + expr_type = type_map[expr] + operand_types.append(expr_type) + + if (literal(expr) == LITERAL_TYPE + and not is_literal_none(expr) + and not is_literal_enum(type_map, expr)): + h = literal_hash(expr) + if h is not None: + narrowable_operand_index_to_hash[i] = h + + # Step 2: Group operands chained by either the 'is' or '==' operands + # together. For all other operands, we keep them in groups of size 2. + # So the expression: + # + # x0 == x1 == x2 < x3 < x4 is x5 is x6 is not x7 is not x8 + # + # ...is converted into the simplified operator list: + # + # [("==", [0, 1, 2]), ("<", [2, 3]), ("<", [3, 4]), + # ("is", [4, 5, 6]), ("is not", [6, 7]), ("is not", [7, 8])] + # + # We group identity/equality expressions so we can propagate information + # we discover about one operand across the entire chain. We don't bother + # handling 'is not' and '!=' chains in a special way: those are very rare + # in practice. + + simplified_operator_list = group_comparison_operands( + node.pairwise(), + narrowable_operand_index_to_hash, + {'==', 'is'}, + ) + + # Step 3: Analyze each group and infer more precise type maps for each + # assignable operand, if possible. We combine these type maps together + # in the final step. + + partial_type_maps = [] + for operator, expr_indices in simplified_operator_list: + if operator in {'is', 'is not'}: + if_map, else_map = self.refine_identity_comparison_expression( + operands, + operand_types, + expr_indices, + narrowable_operand_index_to_hash.keys(), + ) + elif operator in {'==', '!='}: + if_map, else_map = self.refine_equality_comparison_expression( + operands, + operand_types, + expr_indices, + narrowable_operand_index_to_hash.keys(), + ) + elif operator in {'in', 'not in'}: + assert len(expr_indices) == 2 + left_index, right_index = expr_indices + if left_index not in narrowable_operand_index_to_hash: + continue + + item_type = operand_types[left_index] + collection_type = operand_types[right_index] + + # We only try and narrow away 'None' for now + if not is_optional(item_type): + pass - if is_not: - if_vars, else_vars = else_vars, if_vars - return if_vars, else_vars - # Check for `x == y` where x is of type Optional[T] and y is of type T - # or a type that overlaps with T (or vice versa). - elif node.operators == ['==']: - first_type = type_map[node.operands[0]] - second_type = type_map[node.operands[1]] - if is_optional(first_type) != is_optional(second_type): - if is_optional(first_type): - optional_type, comp_type = first_type, second_type - optional_expr = node.operands[0] + collection_item_type = get_proper_type(builtin_item_type(collection_type)) + if collection_item_type is None or is_optional(collection_item_type): + continue + if (isinstance(collection_item_type, Instance) + and collection_item_type.type.fullname == 'builtins.object'): + continue + if is_overlapping_erased_types(item_type, collection_item_type): + if_map, else_map = {operands[left_index]: remove_optional(item_type)}, {} else: - optional_type, comp_type = second_type, first_type - optional_expr = node.operands[1] - if is_overlapping_erased_types(optional_type, comp_type): - return {optional_expr: remove_optional(optional_type)}, {} - elif node.operators in [['in'], ['not in']]: - expr = node.operands[0] - left_type = type_map[expr] - right_type = get_proper_type(builtin_item_type(type_map[node.operands[1]])) - right_ok = right_type and (not is_optional(right_type) and - (not isinstance(right_type, Instance) or - right_type.type.fullname != 'builtins.object')) - if (right_type and right_ok and is_optional(left_type) and - literal(expr) == LITERAL_TYPE and not is_literal_none(expr) and - is_overlapping_erased_types(left_type, right_type)): - if node.operators == ['in']: - return {expr: remove_optional(left_type)}, {} - if node.operators == ['not in']: - return {}, {expr: remove_optional(left_type)} + continue + else: + if_map = {} + else_map = {} + + if operator in {'is not', '!=', 'not in'}: + if_map, else_map = else_map, if_map + + partial_type_maps.append((if_map, else_map)) + + return reduce_partial_conditional_maps(partial_type_maps) elif isinstance(node, RefExpr): # Restrict the type of the variable to True-ish/False-ish in the if and else branches # respectively @@ -4107,6 +4141,143 @@ def replay_lookup(new_parent_type: ProperType) -> Optional[Type]: return output + def refine_identity_comparison_expression(self, + operands: List[Expression], + operand_types: List[Type], + chain_indices: List[int], + narrowable_operand_indices: AbstractSet[int], + ) -> Tuple[TypeMap, TypeMap]: + """Produces conditional type maps refining expressions used in an identity comparison. + + The 'operands' and 'operand_types' lists should be the full list of operands used + in the overall comparison expression. The 'chain_indices' list is the list of indices + actually used within this identity comparison chain. + + So if we have the expression: + + a <= b is c is d <= e + + ...then 'operands' and 'operand_types' would be lists of length 5 and 'chain_indices' + would be the list [1, 2, 3]. + + The 'narrowable_operand_indices' parameter is the set of all indices we are allowed + to refine the types of: that is, all operands that will potentially be a part of + the output TypeMaps. + """ + singleton = None # type: Optional[ProperType] + possible_singleton_indices = [] + for i in chain_indices: + coerced_type = coerce_to_literal(operand_types[i]) + if not is_singleton_type(coerced_type): + continue + if singleton and not is_same_type(singleton, coerced_type): + # We have multiple disjoint singleton types. So the 'if' branch + # must be unreachable. + return None, {} + singleton = coerced_type + possible_singleton_indices.append(i) + + # There's nothing we can currently infer if none of the operands are singleton types, + # so we end early and infer nothing. + if singleton is None: + return {}, {} + + # If possible, use an unassignable expression as the singleton. + # We skip refining the type of the singleton below, so ideally we'd + # want to pick an expression we were going to skip anyways. + singleton_index = -1 + for i in possible_singleton_indices: + if i not in narrowable_operand_indices: + singleton_index = i + + # But if none of the possible singletons are unassignable ones, we give up + # and arbitrarily pick the last item, mostly because other parts of the + # type narrowing logic bias towards picking the rightmost item and it'd be + # nice to stay consistent. + # + # That said, it shouldn't matter which index we pick. For example, suppose we + # have this if statement, where 'x' and 'y' both have singleton types: + # + # if x is y: + # reveal_type(x) + # reveal_type(y) + # else: + # reveal_type(x) + # reveal_type(y) + # + # At this point, 'x' and 'y' *must* have the same singleton type: we would have + # ended early in the first for-loop in this function if they weren't. + # + # So, we should always get the same result in the 'if' case no matter which + # index we pick. And while we do end up getting different results in the 'else' + # case depending on the index (e.g. if we pick 'y', then its type stays the same + # while 'x' is narrowed to ''), this distinction is also moot: mypy + # currently will just mark the whole branch as unreachable if either operand is + # narrowed to . + if singleton_index == -1: + singleton_index = possible_singleton_indices[-1] + + enum_name = None + if isinstance(singleton, LiteralType) and singleton.is_enum_literal(): + enum_name = singleton.fallback.type.fullname + + target_type = [TypeRange(singleton, is_upper_bound=False)] + + partial_type_maps = [] + for i in chain_indices: + # If we try refining a singleton against itself, conditional_type_map + # will end up assuming that the 'else' branch is unreachable. This is + # typically not what we want: generally the user will intend for the + # singleton type to be some fixed 'sentinel' value and will want to refine + # the other exprs against this one instead. + if i == singleton_index: + continue + + # Naturally, we can't refine operands which are not permitted to be refined. + if i not in narrowable_operand_indices: + continue + + expr = operands[i] + expr_type = coerce_to_literal(operand_types[i]) + + if enum_name is not None: + expr_type = try_expanding_enum_to_union(expr_type, enum_name) + partial_type_maps.append(conditional_type_map(expr, expr_type, target_type)) + + return reduce_partial_conditional_maps(partial_type_maps) + + def refine_equality_comparison_expression(self, + operands: List[Expression], + operand_types: List[Type], + chain_indices: List[int], + narrowable_operand_indices: AbstractSet[int], + ) -> Tuple[TypeMap, TypeMap]: + """Produces conditional type maps refining expressions used in an equality comparison. + + For more details, see the docstring of 'refine_equality_comparison' up above. + The only difference is that this function is for refining equality operations + (e.g. 'a == b == c') instead of identity ('a is b is c'). + """ + non_optional_types = [] + for i in chain_indices: + typ = operand_types[i] + if not is_optional(typ): + non_optional_types.append(typ) + + # Make sure we have a mixture of optional and non-optional types. + if len(non_optional_types) == 0 or len(non_optional_types) == len(chain_indices): + return {}, {} + + if_map = {} + for i in narrowable_operand_indices: + expr_type = operand_types[i] + if not is_optional(expr_type): + continue + if any(is_overlapping_erased_types(expr_type, t) for t in non_optional_types): + if_map[operands[i]] = remove_optional(expr_type) + + return if_map, {} + # # Helpers # @@ -4541,16 +4712,55 @@ def gen_unique_name(base: str, table: SymbolTable) -> str: def is_true_literal(n: Expression) -> bool: + """Returns true if this expression is the 'True' literal/keyword.""" return (refers_to_fullname(n, 'builtins.True') or isinstance(n, IntExpr) and n.value == 1) def is_false_literal(n: Expression) -> bool: + """Returns true if this expression is the 'False' literal/keyword.""" return (refers_to_fullname(n, 'builtins.False') or isinstance(n, IntExpr) and n.value == 0) +def is_literal_enum(type_map: Mapping[Expression, Type], n: Expression) -> bool: + """Returns true if this expression (with the given type context) is an Enum literal. + + For example, if we had an enum: + + class Foo(Enum): + A = 1 + B = 2 + + ...and if the expression 'Foo' referred to that enum within the current type context, + then the expression 'Foo.A' would be a a literal enum. However, if we did 'a = Foo.A', + then the variable 'a' would *not* be a literal enum. + + We occasionally special-case expressions like 'Foo.A' and treat them as a single primitive + unit for the same reasons we sometimes treat 'True', 'False', or 'None' as a single + primitive unit. + """ + if not isinstance(n, MemberExpr) or not isinstance(n.expr, NameExpr): + return False + + parent_type = type_map.get(n.expr) + member_type = type_map.get(n) + if member_type is None or parent_type is None: + return False + + parent_type = get_proper_type(parent_type) + member_type = coerce_to_literal(member_type) + if not isinstance(parent_type, FunctionLike) or not isinstance(member_type, LiteralType): + return False + + if not parent_type.is_type_obj(): + return False + + return member_type.is_enum_literal() and member_type.fallback.type == parent_type.type_object() + + def is_literal_none(n: Expression) -> bool: + """Returns true if this expression is the 'None' literal/keyword.""" return isinstance(n, NameExpr) and n.fullname == 'builtins.None' @@ -4641,6 +4851,76 @@ def or_conditional_maps(m1: TypeMap, m2: TypeMap) -> TypeMap: return result +def or_partial_conditional_maps(m1: TypeMap, m2: TypeMap) -> TypeMap: + """Calculate what information we can learn from the truth of (e1 or e2) + in terms of the information that we can learn from the truth of e1 and + the truth of e2. + + Unlike 'or_conditional_maps', we include an expression in the output even + if it exists in only one map: we're assuming both maps are "partial" and + contain information about only some expressions, and so we "or" together + expressions both maps have information on. + """ + + if m1 is None: + return m2 + if m2 is None: + return m1 + # The logic here is a blend between 'and_conditional_maps' + # and 'or_conditional_maps'. We use the high-level logic from the + # former to ensure all expressions make it in the output map, + # but resolve cases where both maps contain info on the same + # expr using the unioning strategy from the latter. + result = m2.copy() + m2_keys = {literal_hash(n2): n2 for n2 in m2} + for n1 in m1: + n2 = m2_keys.get(literal_hash(n1)) + if n2 is None: + result[n1] = m1[n1] + else: + result[n2] = make_simplified_union([m1[n1], result[n2]]) + + return result + + +def reduce_partial_conditional_maps(type_maps: List[Tuple[TypeMap, TypeMap]], + ) -> Tuple[TypeMap, TypeMap]: + """Reduces a list containing pairs of *partial* if/else TypeMaps into a single pair. + + That is, if a expression exists in only one map, we always include it in the output. + We only "and"/"or" together expressions that appear in multiple if/else maps. + + So for example, if we had the input: + + [ + ({x: TypeIfX, shared: TypeIfShared1}, {x: TypeElseX, shared: TypeElseShared1}), + ({y: TypeIfY, shared: TypeIfShared2}, {y: TypeElseY, shared: TypeElseShared2}), + ] + + ...we'd return the output: + + ( + {x: TypeIfX, y: TypeIfY, shared: PseudoIntersection[TypeIfShared1, TypeIfShared2]}, + {x: TypeElseX, y: TypeElseY, shared: Union[TypeElseShared1, TypeElseShared2]}, + ) + + ...where "PseudoIntersection[X, Y] == Y" because mypy actually doesn't understand intersections + yet, so we settle for just arbitrarily picking the right expr's type. + """ + if len(type_maps) == 0: + return {}, {} + elif len(type_maps) == 1: + return type_maps[0] + else: + final_if_map, final_else_map = type_maps[0] + for if_map, else_map in type_maps[1:]: + # 'and_conditional_maps' does the same thing for both global and partial type maps, + # which is why we don't need to have an 'and_partial_conditional_maps' function. + final_if_map = and_conditional_maps(final_if_map, if_map) + final_else_map = or_partial_conditional_maps(final_else_map, else_map) + return final_if_map, final_else_map + + def convert_to_typetype(type_map: TypeMap) -> TypeMap: converted_type_map = {} # type: Dict[Expression, Type] if type_map is None: @@ -5007,6 +5287,205 @@ def nothing() -> Iterator[None]: yield +TKey = TypeVar('TKey') +TValue = TypeVar('TValue') + + +class DisjointDict(Generic[TKey, TValue]): + """An variation of the union-find algorithm/data structure where instead of keeping + track of just disjoint sets, we keep track of disjoint dicts -- keep track of multiple + Set[Key] -> Set[Value] mappings, where each mapping's keys are guaranteed to be disjoint. + + This data structure is currently used exclusively by 'group_comparison_operands' below + to merge chains of '==' and 'is' comparisons when two or more chains use the same expression + in best-case O(n), where n is the number of operands. + + Specifically, the `add_mapping()` function and `items()` functions will take on average + O(k + v) and O(n) respectively, where k and v are the number of keys and values we're adding + for a given chain. Note that k <= n and v <= n. + + We hit these average/best-case scenarios for most user code: e.g. when the user has just + a single chain like 'a == b == c == d == ...' or multiple disjoint chains like + 'a==b < c==d < e==f < ...'. (Note that a naive iterative merging would be O(n^2) for + the latter case). + + In comparison, this data structure will make 'group_comparison_operands' have a worst-case + runtime of O(n*log(n)): 'add_mapping()' and 'items()' are worst-case O(k*log(n) + v) and + O(k*log(n)) respectively. This happens only in the rare case where the user keeps repeatedly + making disjoint mappings before merging them in a way that persistently dodges the path + compression optimization in '_lookup_root_id', which would end up constructing a single + tree of height log_2(n). This makes root lookups no longer amoritized constant time when we + finally call 'items()'. + """ + def __init__(self) -> None: + # Each key maps to a unique ID + self._key_to_id = {} # type: Dict[TKey, int] + + # Each id points to the parent id, forming a forest of upwards-pointing trees. If the + # current id already is the root, it points to itself. We gradually flatten these trees + # as we perform root lookups: eventually all nodes point directly to its root. + self._id_to_parent_id = {} # type: Dict[int, int] + + # Each root id in turn maps to the set of values. + self._root_id_to_values = {} # type: Dict[int, Set[TValue]] + + def add_mapping(self, keys: Set[TKey], values: Set[TValue]) -> None: + """Adds a 'Set[TKey] -> Set[TValue]' mapping. If there already exists a mapping + containing one or more of the given keys, we merge the input mapping with the old one. + + Note that the given set of keys must be non-empty -- otherwise, nothing happens. + """ + if len(keys) == 0: + return + + subtree_roots = [self._lookup_or_make_root_id(key) for key in keys] + new_root = subtree_roots[0] + + root_values = self._root_id_to_values[new_root] + root_values.update(values) + for subtree_root in subtree_roots[1:]: + if subtree_root == new_root or subtree_root not in self._root_id_to_values: + continue + self._id_to_parent_id[subtree_root] = new_root + root_values.update(self._root_id_to_values.pop(subtree_root)) + + def items(self) -> List[Tuple[Set[TKey], Set[TValue]]]: + """Returns all disjoint mappings in key-value pairs.""" + root_id_to_keys = {} # type: Dict[int, Set[TKey]] + for key in self._key_to_id: + root_id = self._lookup_root_id(key) + if root_id not in root_id_to_keys: + root_id_to_keys[root_id] = set() + root_id_to_keys[root_id].add(key) + + output = [] + for root_id, keys in root_id_to_keys.items(): + output.append((keys, self._root_id_to_values[root_id])) + + return output + + def _lookup_or_make_root_id(self, key: TKey) -> int: + if key in self._key_to_id: + return self._lookup_root_id(key) + else: + new_id = len(self._key_to_id) + self._key_to_id[key] = new_id + self._id_to_parent_id[new_id] = new_id + self._root_id_to_values[new_id] = set() + return new_id + + def _lookup_root_id(self, key: TKey) -> int: + i = self._key_to_id[key] + while i != self._id_to_parent_id[i]: + # Optimization: make keys directly point to their grandparents to speed up + # future traversals. This prevents degenerate trees of height n from forming. + new_parent = self._id_to_parent_id[self._id_to_parent_id[i]] + self._id_to_parent_id[i] = new_parent + i = new_parent + return i + + +def group_comparison_operands(pairwise_comparisons: Iterable[Tuple[str, Expression, Expression]], + operand_to_literal_hash: Mapping[int, Key], + operators_to_group: Set[str], + ) -> List[Tuple[str, List[int]]]: + """Group a series of comparison operands together chained by any operand + in the 'operators_to_group' set. All other pairwise operands are kept in + groups of size 2. + + For example, suppose we have the input comparison expression: + + x0 == x1 == x2 < x3 < x4 is x5 is x6 is not x7 is not x8 + + If we get these expressions in a pairwise way (e.g. by calling ComparisionExpr's + 'pairwise()' method), we get the following as input: + + [('==', x0, x1), ('==', x1, x2), ('<', x2, x3), ('<', x3, x4), + ('is', x4, x5), ('is', x5, x6), ('is not', x6, x7), ('is not', x7, x8)] + + If `operators_to_group` is the set {'==', 'is'}, this function will produce + the following "simplified operator list": + + [("==", [0, 1, 2]), ("<", [2, 3]), ("<", [3, 4]), + ("is", [4, 5, 6]), ("is not", [6, 7]), ("is not", [7, 8])] + + Note that (a) we yield *indices* to the operands rather then the operand + expressions themselves and that (b) operands used in a consecutive chain + of '==' or 'is' are grouped together. + + If two of these chains happen to contain operands with the same underlying + literal hash (e.g. are assignable and correspond to the same expression), + we combine those chains together. For example, if we had: + + same == x < y == same + + ...and if 'operand_to_literal_hash' contained the same values for the indices + 0 and 3, we'd produce the following output: + + [("==", [0, 1, 2, 3]), ("<", [1, 2])] + + But if the 'operand_to_literal_hash' did *not* contain an entry, we'd instead + default to returning: + + [("==", [0, 1]), ("<", [1, 2]), ("==", [2, 3])] + + This function is currently only used to assist with type-narrowing refinements + and is extracted out to a helper function so we can unit test it. + """ + groups = { + op: DisjointDict() for op in operators_to_group + } # type: Dict[str, DisjointDict[Key, int]] + + simplified_operator_list = [] # type: List[Tuple[str, List[int]]] + last_operator = None # type: Optional[str] + current_indices = set() # type: Set[int] + current_hashes = set() # type: Set[Key] + for i, (operator, left_expr, right_expr) in enumerate(pairwise_comparisons): + if last_operator is None: + last_operator = operator + + if current_indices and (operator != last_operator or operator not in operators_to_group): + # If some of the operands in the chain are assignable, defer adding it: we might + # end up needing to merge it with other chains that appear later. + if len(current_hashes) == 0: + simplified_operator_list.append((last_operator, sorted(current_indices))) + else: + groups[last_operator].add_mapping(current_hashes, current_indices) + last_operator = operator + current_indices = set() + current_hashes = set() + + # Note: 'i' corresponds to the left operand index, so 'i + 1' is the + # right operand. + current_indices.add(i) + current_indices.add(i + 1) + + # We only ever want to combine operands/combine chains for these operators + if operator in operators_to_group: + left_hash = operand_to_literal_hash.get(i) + if left_hash is not None: + current_hashes.add(left_hash) + right_hash = operand_to_literal_hash.get(i + 1) + if right_hash is not None: + current_hashes.add(right_hash) + + if last_operator is not None: + if len(current_hashes) == 0: + simplified_operator_list.append((last_operator, sorted(current_indices))) + else: + groups[last_operator].add_mapping(current_hashes, current_indices) + + # Now that we know which chains happen to contain the same underlying expressions + # and can be merged together, add in this info back to the output. + for operator, disjoint_dict in groups.items(): + for keys, indices in disjoint_dict.items(): + simplified_operator_list.append((operator, sorted(indices))) + + # For stability, reorder list by the first operand index to appear + simplified_operator_list.sort(key=lambda item: item[1][0]) + return simplified_operator_list + + def is_typed_callable(c: Optional[Type]) -> bool: c = get_proper_type(c) if not c or not isinstance(c, CallableType): diff --git a/mypy/nodes.py b/mypy/nodes.py index 4ee3948fedd3..792a89a5fea4 100644 --- a/mypy/nodes.py +++ b/mypy/nodes.py @@ -1750,6 +1750,13 @@ def __init__(self, operators: List[str], operands: List[Expression]) -> None: self.operands = operands self.method_types = [] + def pairwise(self) -> Iterator[Tuple[str, Expression, Expression]]: + """If this comparison expr is "a < b is c == d", yields the sequence + ("<", a, b), ("is", b, c), ("==", c, d) + """ + for i, operator in enumerate(self.operators): + yield operator, self.operands[i], self.operands[i + 1] + def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_comparison_expr(self) diff --git a/mypy/test/testinfer.py b/mypy/test/testinfer.py index 2e26e99453b8..e70d74530a99 100644 --- a/mypy/test/testinfer.py +++ b/mypy/test/testinfer.py @@ -1,16 +1,18 @@ """Test cases for type inference helper functions.""" -from typing import List, Optional, Tuple, Union +from typing import List, Optional, Tuple, Union, Dict, Set from mypy.test.helpers import Suite, assert_equal from mypy.argmap import map_actuals_to_formals -from mypy.nodes import ARG_POS, ARG_OPT, ARG_STAR, ARG_STAR2, ARG_NAMED +from mypy.checker import group_comparison_operands, DisjointDict +from mypy.literals import Key +from mypy.nodes import ARG_POS, ARG_OPT, ARG_STAR, ARG_STAR2, ARG_NAMED, NameExpr from mypy.types import AnyType, TupleType, Type, TypeOfAny from mypy.test.typefixture import TypeFixture class MapActualsToFormalsSuite(Suite): - """Test cases for checkexpr.map_actuals_to_formals.""" + """Test cases for argmap.map_actuals_to_formals.""" def test_basic(self) -> None: self.assert_map([], [], []) @@ -223,3 +225,234 @@ def expand_callee_kinds(kinds_and_names: List[Union[int, Tuple[int, str]]] kinds.append(v) names.append(None) return kinds, names + + +class OperandDisjointDictSuite(Suite): + """Test cases for checker.DisjointDict, which is used for type inference with operands.""" + def new(self) -> DisjointDict[int, str]: + return DisjointDict() + + def test_independent_maps(self) -> None: + d = self.new() + d.add_mapping({0, 1}, {"group1"}) + d.add_mapping({2, 3, 4}, {"group2"}) + d.add_mapping({5, 6, 7}, {"group3"}) + + self.assertEqual(d.items(), [ + ({0, 1}, {"group1"}), + ({2, 3, 4}, {"group2"}), + ({5, 6, 7}, {"group3"}), + ]) + + def test_partial_merging(self) -> None: + d = self.new() + d.add_mapping({0, 1}, {"group1"}) + d.add_mapping({1, 2}, {"group2"}) + d.add_mapping({3, 4}, {"group3"}) + d.add_mapping({5, 0}, {"group4"}) + d.add_mapping({5, 6}, {"group5"}) + d.add_mapping({4, 7}, {"group6"}) + + self.assertEqual(d.items(), [ + ({0, 1, 2, 5, 6}, {"group1", "group2", "group4", "group5"}), + ({3, 4, 7}, {"group3", "group6"}), + ]) + + def test_full_merging(self) -> None: + d = self.new() + d.add_mapping({0, 1, 2}, {"a"}) + d.add_mapping({3, 4, 2}, {"b"}) + d.add_mapping({10, 11, 12}, {"c"}) + d.add_mapping({13, 14, 15}, {"d"}) + d.add_mapping({14, 10, 16}, {"e"}) + d.add_mapping({0, 10}, {"f"}) + + self.assertEqual(d.items(), [ + ({0, 1, 2, 3, 4, 10, 11, 12, 13, 14, 15, 16}, {"a", "b", "c", "d", "e", "f"}), + ]) + + def test_merge_with_multiple_overlaps(self) -> None: + d = self.new() + d.add_mapping({0, 1, 2}, {"a"}) + d.add_mapping({3, 4, 5}, {"b"}) + d.add_mapping({1, 2, 4, 5}, {"c"}) + d.add_mapping({6, 1, 2, 4, 5}, {"d"}) + d.add_mapping({6, 1, 2, 4, 5}, {"e"}) + + self.assertEqual(d.items(), [ + ({0, 1, 2, 3, 4, 5, 6}, {"a", "b", "c", "d", "e"}), + ]) + + +class OperandComparisonGroupingSuite(Suite): + """Test cases for checker.group_comparison_operands.""" + def literal_keymap(self, assignable_operands: Dict[int, NameExpr]) -> Dict[int, Key]: + output = {} # type: Dict[int, Key] + for index, expr in assignable_operands.items(): + output[index] = ('FakeExpr', expr.name) + return output + + def test_basic_cases(self) -> None: + # Note: the grouping function doesn't actually inspect the input exprs, so we + # just default to using NameExprs for simplicity. + x0 = NameExpr('x0') + x1 = NameExpr('x1') + x2 = NameExpr('x2') + x3 = NameExpr('x3') + x4 = NameExpr('x4') + + basic_input = [('==', x0, x1), ('==', x1, x2), ('<', x2, x3), ('==', x3, x4)] + + none_assignable = self.literal_keymap({}) + all_assignable = self.literal_keymap({0: x0, 1: x1, 2: x2, 3: x3, 4: x4}) + + for assignable in [none_assignable, all_assignable]: + self.assertEqual( + group_comparison_operands(basic_input, assignable, set()), + [('==', [0, 1]), ('==', [1, 2]), ('<', [2, 3]), ('==', [3, 4])], + ) + self.assertEqual( + group_comparison_operands(basic_input, assignable, {'=='}), + [('==', [0, 1, 2]), ('<', [2, 3]), ('==', [3, 4])], + ) + self.assertEqual( + group_comparison_operands(basic_input, assignable, {'<'}), + [('==', [0, 1]), ('==', [1, 2]), ('<', [2, 3]), ('==', [3, 4])], + ) + self.assertEqual( + group_comparison_operands(basic_input, assignable, {'==', '<'}), + [('==', [0, 1, 2]), ('<', [2, 3]), ('==', [3, 4])], + ) + + def test_multiple_groups(self) -> None: + x0 = NameExpr('x0') + x1 = NameExpr('x1') + x2 = NameExpr('x2') + x3 = NameExpr('x3') + x4 = NameExpr('x4') + x5 = NameExpr('x5') + + self.assertEqual( + group_comparison_operands( + [('==', x0, x1), ('==', x1, x2), ('is', x2, x3), ('is', x3, x4)], + self.literal_keymap({}), + {'==', 'is'}, + ), + [('==', [0, 1, 2]), ('is', [2, 3, 4])], + ) + self.assertEqual( + group_comparison_operands( + [('==', x0, x1), ('==', x1, x2), ('==', x2, x3), ('==', x3, x4)], + self.literal_keymap({}), + {'==', 'is'}, + ), + [('==', [0, 1, 2, 3, 4])], + ) + self.assertEqual( + group_comparison_operands( + [('is', x0, x1), ('==', x1, x2), ('==', x2, x3), ('==', x3, x4)], + self.literal_keymap({}), + {'==', 'is'}, + ), + [('is', [0, 1]), ('==', [1, 2, 3, 4])], + ) + self.assertEqual( + group_comparison_operands( + [('is', x0, x1), ('is', x1, x2), ('<', x2, x3), ('==', x3, x4), ('==', x4, x5)], + self.literal_keymap({}), + {'==', 'is'}, + ), + [('is', [0, 1, 2]), ('<', [2, 3]), ('==', [3, 4, 5])], + ) + + def test_multiple_groups_coalescing(self) -> None: + x0 = NameExpr('x0') + x1 = NameExpr('x1') + x2 = NameExpr('x2') + x3 = NameExpr('x3') + x4 = NameExpr('x4') + + nothing_combined = [('==', [0, 1, 2]), ('<', [2, 3]), ('==', [3, 4, 5])] + everything_combined = [('==', [0, 1, 2, 3, 4, 5]), ('<', [2, 3])] + + # Note: We do 'x4 == x0' at the very end! + two_groups = [ + ('==', x0, x1), ('==', x1, x2), ('<', x2, x3), ('==', x3, x4), ('==', x4, x0), + ] + self.assertEqual( + group_comparison_operands( + two_groups, + self.literal_keymap({0: x0, 1: x1, 2: x2, 3: x3, 4: x4, 5: x0}), + {'=='}, + ), + everything_combined, + "All vars are assignable, everything is combined" + ) + self.assertEqual( + group_comparison_operands( + two_groups, + self.literal_keymap({1: x1, 2: x2, 3: x3, 4: x4}), + {'=='}, + ), + nothing_combined, + "x0 is unassignable, so no combining" + ) + self.assertEqual( + group_comparison_operands( + two_groups, + self.literal_keymap({0: x0, 1: x1, 3: x3, 5: x0}), + {'=='}, + ), + everything_combined, + "Some vars are unassignable but x0 is, so we combine" + ) + self.assertEqual( + group_comparison_operands( + two_groups, + self.literal_keymap({0: x0, 5: x0}), + {'=='}, + ), + everything_combined, + "All vars are unassignable but x0 is, so we combine" + ) + + def test_multiple_groups_different_operators(self) -> None: + x0 = NameExpr('x0') + x1 = NameExpr('x1') + x2 = NameExpr('x2') + x3 = NameExpr('x3') + + groups = [('==', x0, x1), ('==', x1, x2), ('is', x2, x3), ('is', x3, x0)] + keymap = self.literal_keymap({0: x0, 1: x1, 2: x2, 3: x3, 4: x0}) + self.assertEqual( + group_comparison_operands(groups, keymap, {'==', 'is'}), + [('==', [0, 1, 2]), ('is', [2, 3, 4])], + "Different operators can never be combined" + ) + + def test_single_pair(self) -> None: + x0 = NameExpr('x0') + x1 = NameExpr('x1') + + single_comparison = [('==', x0, x1)] + expected_output = [('==', [0, 1])] + + assignable_combinations = [ + {}, {0: x0}, {1: x1}, {0: x0, 1: x1}, + ] # type: List[Dict[int, NameExpr]] + to_group_by = [set(), {'=='}, {'is'}] # type: List[Set[str]] + + for combo in assignable_combinations: + for operators in to_group_by: + keymap = self.literal_keymap(combo) + self.assertEqual( + group_comparison_operands(single_comparison, keymap, operators), + expected_output, + ) + + def test_empty_pair_list(self) -> None: + # This case should never occur in practice -- ComparisionExprs + # always contain at least one comparision. But in case it does... + + self.assertEqual(group_comparison_operands([], {}, set()), []) + self.assertEqual(group_comparison_operands([], {}, {'=='}), []) diff --git a/test-data/unit/check-enum.test b/test-data/unit/check-enum.test index 241cd1ca049c..9d027f47192f 100644 --- a/test-data/unit/check-enum.test +++ b/test-data/unit/check-enum.test @@ -967,3 +967,153 @@ class A: self.b = Enum("x", [("foo", "bar")]) # E: Enum type as attribute is not supported reveal_type(A().b) # N: Revealed type is 'Any' + +[case testEnumReachabilityWithChaining] +from enum import Enum + +class Foo(Enum): + A = 1 + B = 2 + +x: Foo +y: Foo + +if x is y is Foo.A: + reveal_type(x) # N: Revealed type is 'Literal[__main__.Foo.A]' + reveal_type(y) # N: Revealed type is 'Literal[__main__.Foo.A]' +else: + reveal_type(x) # N: Revealed type is 'Literal[__main__.Foo.B]' + reveal_type(y) # N: Revealed type is 'Literal[__main__.Foo.B]' +reveal_type(x) # N: Revealed type is '__main__.Foo' +reveal_type(y) # N: Revealed type is '__main__.Foo' + +if x is Foo.A is y: + reveal_type(x) # N: Revealed type is 'Literal[__main__.Foo.A]' + reveal_type(y) # N: Revealed type is 'Literal[__main__.Foo.A]' +else: + reveal_type(x) # N: Revealed type is 'Literal[__main__.Foo.B]' + reveal_type(y) # N: Revealed type is 'Literal[__main__.Foo.B]' +reveal_type(x) # N: Revealed type is '__main__.Foo' +reveal_type(y) # N: Revealed type is '__main__.Foo' + +if Foo.A is x is y: + reveal_type(x) # N: Revealed type is 'Literal[__main__.Foo.A]' + reveal_type(y) # N: Revealed type is 'Literal[__main__.Foo.A]' +else: + reveal_type(x) # N: Revealed type is 'Literal[__main__.Foo.B]' + reveal_type(y) # N: Revealed type is 'Literal[__main__.Foo.B]' +reveal_type(x) # N: Revealed type is '__main__.Foo' +reveal_type(y) # N: Revealed type is '__main__.Foo' + +[builtins fixtures/primitives.pyi] + +[case testEnumReachabilityWithChainingDisjoint] +# flags: --warn-unreachable +from enum import Enum + +class Foo(Enum): + A = 1 + B = 2 + + # Used to divide up a chained comparison into multiple identity groups + def __lt__(self, other: object) -> bool: return True + +x: Foo +y: Foo + +# No conflict +if x is Foo.A < y is Foo.B: + reveal_type(x) # N: Revealed type is 'Literal[__main__.Foo.A]' + reveal_type(y) # N: Revealed type is 'Literal[__main__.Foo.B]' +else: + reveal_type(x) # N: Revealed type is 'Literal[__main__.Foo.B]' + reveal_type(y) # N: Revealed type is 'Literal[__main__.Foo.A]' +reveal_type(x) # N: Revealed type is '__main__.Foo' +reveal_type(y) # N: Revealed type is '__main__.Foo' + +# The standard output when we end up inferring two disjoint facts about the same expr +if x is Foo.A and x is Foo.B: + reveal_type(x) # E: Statement is unreachable +else: + reveal_type(x) # N: Revealed type is '__main__.Foo' +reveal_type(x) # N: Revealed type is '__main__.Foo' + +# ..and we get the same result if we have two disjoint groups within the same comp expr +if x is Foo.A < x is Foo.B: + reveal_type(x) # E: Statement is unreachable +else: + reveal_type(x) # N: Revealed type is '__main__.Foo' +reveal_type(x) # N: Revealed type is '__main__.Foo' +[builtins fixtures/primitives.pyi] + +[case testEnumReachabilityWithChainingDirectConflict] +# flags: --warn-unreachable +from enum import Enum +from typing_extensions import Literal, Final + +class Foo(Enum): + A = 1 + B = 2 + C = 3 + +x: Foo +if x is Foo.A is Foo.B: + reveal_type(x) # E: Statement is unreachable +else: + reveal_type(x) # N: Revealed type is '__main__.Foo' +reveal_type(x) # N: Revealed type is '__main__.Foo' + +literal_a: Literal[Foo.A] +literal_b: Literal[Foo.B] +if x is literal_a is literal_b: + reveal_type(x) # E: Statement is unreachable +else: + reveal_type(x) # N: Revealed type is '__main__.Foo' +reveal_type(x) # N: Revealed type is '__main__.Foo' + +final_a: Final = Foo.A +final_b: Final = Foo.B +if x is final_a is final_b: + reveal_type(x) # E: Statement is unreachable +else: + reveal_type(x) # N: Revealed type is '__main__.Foo' +reveal_type(x) # N: Revealed type is '__main__.Foo' + +[builtins fixtures/primitives.pyi] + +[case testEnumReachabilityWithChainingBigDisjoints] +# flags: --warn-unreachable +from enum import Enum +from typing_extensions import Literal, Final + +class Foo(Enum): + A = 1 + B = 2 + C = 3 + + def __lt__(self, other: object) -> bool: return True + +x0: Foo +x1: Foo +x2: Foo +x3: Foo +x4: Foo +x5: Foo + +if x0 is x1 is Foo.A is x2 < x3 is Foo.B is x4 is x5: + reveal_type(x0) # N: Revealed type is 'Literal[__main__.Foo.A]' + reveal_type(x1) # N: Revealed type is 'Literal[__main__.Foo.A]' + reveal_type(x2) # N: Revealed type is 'Literal[__main__.Foo.A]' + + reveal_type(x3) # N: Revealed type is 'Literal[__main__.Foo.B]' + reveal_type(x4) # N: Revealed type is 'Literal[__main__.Foo.B]' + reveal_type(x5) # N: Revealed type is 'Literal[__main__.Foo.B]' +else: + reveal_type(x0) # N: Revealed type is 'Union[Literal[__main__.Foo.B], Literal[__main__.Foo.C]]' + reveal_type(x1) # N: Revealed type is 'Union[Literal[__main__.Foo.B], Literal[__main__.Foo.C]]' + reveal_type(x2) # N: Revealed type is 'Union[Literal[__main__.Foo.B], Literal[__main__.Foo.C]]' + + reveal_type(x3) # N: Revealed type is 'Union[Literal[__main__.Foo.A], Literal[__main__.Foo.C]]' + reveal_type(x4) # N: Revealed type is 'Union[Literal[__main__.Foo.A], Literal[__main__.Foo.C]]' + reveal_type(x5) # N: Revealed type is 'Union[Literal[__main__.Foo.A], Literal[__main__.Foo.C]]' +[builtins fixtures/primitives.pyi] diff --git a/test-data/unit/check-optional.test b/test-data/unit/check-optional.test index 4b18cb59d1a7..9c40d550699e 100644 --- a/test-data/unit/check-optional.test +++ b/test-data/unit/check-optional.test @@ -528,6 +528,28 @@ else: reveal_type(x) # N: Revealed type is 'Union[builtins.str, builtins.int, None]' [builtins fixtures/ops.pyi] +[case testInferEqualsNotOptionalWithMultipleArgs] +from typing import Optional +x: Optional[int] +y: Optional[int] +if x == y == 1: + reveal_type(x) # N: Revealed type is 'builtins.int' + reveal_type(y) # N: Revealed type is 'builtins.int' +else: + reveal_type(x) # N: Revealed type is 'Union[builtins.int, None]' + reveal_type(y) # N: Revealed type is 'Union[builtins.int, None]' + +class A: pass +a: Optional[A] +b: Optional[A] +if a == b == object(): + reveal_type(a) # N: Revealed type is '__main__.A' + reveal_type(b) # N: Revealed type is '__main__.A' +else: + reveal_type(a) # N: Revealed type is 'Union[__main__.A, None]' + reveal_type(b) # N: Revealed type is 'Union[__main__.A, None]' +[builtins fixtures/ops.pyi] + [case testWarnNoReturnWorksWithStrictOptional] # flags: --warn-no-return def f() -> None: From cdd91ba819646b0138b05a8e9180faba4ef0ff92 Mon Sep 17 00:00:00 2001 From: Michael Lee Date: Fri, 3 Jan 2020 06:39:23 -0800 Subject: [PATCH 029/117] Fix regression in container check logic (#8232) This PR fixes the crash reported in https://github.com/python/mypy/issues/8230, by replacing the 'pass' with the 'continue', as suggested. However, it does *not* fix the underlying root cause -- I don't think I actually understand the relevant pieces of code enough to feel confident volunteering a fix. So, I settled for just fixing the regression. Basically, it seems this bug is due to how we try inferring the type of the lambda in multiple passes to resolve the types. We pencil in an ErasedType during the first pass -- and then subsequently crash when attempting to type check the body during that pass. I'll leave more details about this in the linked issue. --- mypy/checker.py | 2 +- test-data/unit/check-optional.test | 9 +++++++++ 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/mypy/checker.py b/mypy/checker.py index ae829d1157c1..5046d9431b4f 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -3915,7 +3915,7 @@ def find_isinstance_check_helper(self, node: Expression) -> Tuple[TypeMap, TypeM # We only try and narrow away 'None' for now if not is_optional(item_type): - pass + continue collection_item_type = get_proper_type(builtin_item_type(collection_type)) if collection_item_type is None or is_optional(collection_item_type): diff --git a/test-data/unit/check-optional.test b/test-data/unit/check-optional.test index 9c40d550699e..15698e99ddf5 100644 --- a/test-data/unit/check-optional.test +++ b/test-data/unit/check-optional.test @@ -550,6 +550,15 @@ else: reveal_type(b) # N: Revealed type is 'Union[__main__.A, None]' [builtins fixtures/ops.pyi] +[case testInferInWithErasedTypes] +from typing import TypeVar, Callable + +T = TypeVar('T') +def foo(f: Callable[[T], bool], it: T) -> None: ... + +foo(lambda x: x in [1, 2] and bool(), 3) +[builtins fixtures/list.pyi] + [case testWarnNoReturnWorksWithStrictOptional] # flags: --warn-no-return def f() -> None: From 89e259f3cbbc02cf67f7d810553b94dea0dc868b Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Mon, 6 Jan 2020 11:24:51 +0000 Subject: [PATCH 030/117] Make runtests.py skip some slow mypyc tests by default (#8199) The skipped mypyc run tests are very slow and I believe that they only rarely fail for changes that don't touch mypyc. The skipped tests can be run via runtests.py mypyc-extra. Also explain running tests in some more detail in the the docs. --- README.md | 5 +++-- runtests.py | 16 +++++++++++++--- test-data/unit/README.md | 36 ++++++++++++++++++++++++------------ 3 files changed, 40 insertions(+), 17 deletions(-) diff --git a/README.md b/README.md index 6225a001b9e6..9b35b41b4a50 100644 --- a/README.md +++ b/README.md @@ -114,7 +114,7 @@ Mypy can be integrated into popular IDEs: * Using [Syntastic](https://github.com/vim-syntastic/syntastic): in `~/.vimrc` add `let g:syntastic_python_checkers=['mypy']` * Using [ALE](https://github.com/dense-analysis/ale): should be enabled by default when `mypy` is installed, - or can be explicitly enabled by adding `let b:ale_linters = ['mypy']` in `~/vim/ftplugin/python.vim` + or can be explicitly enabled by adding `let b:ale_linters = ['mypy']` in `~/vim/ftplugin/python.vim` * Emacs: using [Flycheck](https://github.com/flycheck/) and [Flycheck-mypy](https://github.com/lbolla/emacs-flycheck-mypy) * Sublime Text: [SublimeLinter-contrib-mypy](https://github.com/fredcallaway/SublimeLinter-contrib-mypy) * Atom: [linter-mypy](https://atom.io/packages/linter-mypy) @@ -237,7 +237,8 @@ The basic way to run tests: $ python2 -m pip install -U typing $ ./runtests.py -For more on the tests, see [Test README.md](test-data/unit/README.md) +For more on the tests, such as how to write tests and how to control +which tests to run, see [Test README.md](test-data/unit/README.md). Development status diff --git a/runtests.py b/runtests.py index 52ec4398fc8f..c4fe1fce8981 100755 --- a/runtests.py +++ b/runtests.py @@ -44,6 +44,11 @@ MYPYC_COMMAND_LINE, ERROR_STREAM] + +# These must be enabled by explicitly including 'mypyc-extra' on the command line. +MYPYC_OPT_IN = [MYPYC_RUN, + MYPYC_RUN_MULTI] + # We split the pytest run into three parts to improve test # parallelization. Each run should have tests that each take a roughly similar # time to run. @@ -65,16 +70,19 @@ TYPESHED, PEP561, DAEMON, - MYPYC_RUN, - MYPYC_RUN_MULTI, MYPYC_EXTERNAL, MYPYC_COMMAND_LINE, ERROR_STREAM]), + # Mypyc tests that aren't run by default, since they are slow and rarely + # fail for commits that don't touch mypyc + 'mypyc-extra': 'pytest -k "%s"' % ' or '.join(MYPYC_OPT_IN), } # Stop run immediately if these commands fail FAST_FAIL = ['self', 'lint'] +DEFAULT_COMMANDS = [cmd for cmd in cmds if cmd != 'mypyc-extra'] + assert all(cmd in cmds for cmd in FAST_FAIL) @@ -117,10 +125,12 @@ def main() -> None: if not set(args).issubset(cmds): print("usage:", prog, " ".join('[%s]' % k for k in cmds)) + print() + print('Run the given tests. If given no arguments, run everything except mypyc-extra.') exit(1) if not args: - args = list(cmds) + args = DEFAULT_COMMANDS[:] status = 0 diff --git a/test-data/unit/README.md b/test-data/unit/README.md index 7454126fe570..e1923b90ad52 100644 --- a/test-data/unit/README.md +++ b/test-data/unit/README.md @@ -30,7 +30,7 @@ Add the test in this format anywhere in the file: with text "abc..." - note a space after `E:` and `flags:` - `# E:12` adds column number to the expected error -- use `\` to escape the `#` character and indicate that the rest of the line is part of +- use `\` to escape the `#` character and indicate that the rest of the line is part of the error message - repeating `# E: ` several times in one line indicates multiple expected errors in one line - `W: ...` and `N: ...` works exactly like `E:`, but report a warning and a note respectively @@ -88,29 +88,32 @@ module: $ python2 -m pip install -U typing -The unit test suites are driven by the `pytest` framework. To run all tests, +The unit test suites are driven by the `pytest` framework. To run all mypy tests, run `pytest` in the mypy repository: - $ pytest - -Note that some tests will be disabled for older python versions. + $ pytest mypy This will run all tests, including integration and regression tests, -and will type check mypy and verify that all stubs are valid. This may -take several minutes to run, so you don't want to use this all the time -while doing development. +and will verify that all stubs are valid. This may take several minutes to run, +so you don't want to use this all the time while doing development. Test suites for individual components are in the files `mypy/test/test*.py`. +Note that some tests will be disabled for older python versions. + +If you work on mypyc, you will want to also run mypyc tests: + + $ pytest mypyc + You can run tests from a specific module directly, a specific suite within a - module, or a test in a suite (even if it's data-driven): +module, or a test in a suite (even if it's data-driven): $ pytest mypy/test/testdiff.py $ pytest mypy/test/testsemanal.py::SemAnalTypeInfoSuite - + $ pytest -n0 mypy/test/testargs.py::ArgSuite::test_coherence - + $ pytest -n0 mypy/test/testcheck.py::TypeCheckSuite::testCallingVariableWithFunctionType To control which tests are run and how, you can use the `-k` switch: @@ -144,10 +147,19 @@ To run the linter: $ flake8 -You can also run all of the above tests together with: +You can also run all of the above tests using `runtests.py` (this includes +type checking mypy and linting): $ python3 runtests.py +By default, this runs everything except some mypyc tests. You can give it +arguments to control what gets run, such as `self` to run mypy on itself: + + $ python3 runtests.py self + +Run `python3 runtests.py mypyc-extra` to run mypyc tests that are not +enabled by default. This is typically only needed if you work on mypyc. + Many test suites store test case descriptions in text files (`test-data/unit/*.test`). The module `mypy.test.data` parses these descriptions. From a8913d8107a1cbbb73ca018aa2ac72934eed7f6d Mon Sep 17 00:00:00 2001 From: hauntsaninja Date: Mon, 6 Jan 2020 03:34:59 -0800 Subject: [PATCH 031/117] Remove unused function custom_equality_method (#8223) Superseded by custom_special_method --- mypy/checkexpr.py | 24 ------------------------ 1 file changed, 24 deletions(-) diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index 7edaf7e2ad89..9173a0dcdbcb 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -32,7 +32,6 @@ YieldFromExpr, TypedDictExpr, PromoteExpr, NewTypeExpr, NamedTupleExpr, TypeVarExpr, TypeAliasExpr, BackquoteExpr, EnumCallExpr, TypeAlias, SymbolNode, PlaceholderNode, ARG_POS, ARG_OPT, ARG_NAMED, ARG_STAR, ARG_STAR2, LITERAL_TYPE, REVEAL_TYPE, - SYMBOL_FUNCBASE_TYPES ) from mypy.literals import literal from mypy import nodes @@ -4305,29 +4304,6 @@ def is_expr_literal_type(node: Expression) -> bool: return False -def custom_equality_method(typ: Type) -> bool: - """Does this type have a custom __eq__() method?""" - typ = get_proper_type(typ) - if isinstance(typ, Instance): - method = typ.type.get('__eq__') - if method and isinstance(method.node, (SYMBOL_FUNCBASE_TYPES, Decorator, Var)): - if method.node.info: - return not method.node.info.fullname.startswith('builtins.') - return False - if isinstance(typ, UnionType): - return any(custom_equality_method(t) for t in typ.items) - if isinstance(typ, TupleType): - return custom_equality_method(tuple_fallback(typ)) - if isinstance(typ, CallableType) and typ.is_type_obj(): - # Look up __eq__ on the metaclass for class objects. - return custom_equality_method(typ.fallback) - if isinstance(typ, AnyType): - # Avoid false positives in uncertain cases. - return True - # TODO: support other types (see ExpressionChecker.has_member())? - return False - - def has_bytes_component(typ: Type, py2: bool = False) -> bool: """Is this one of builtin byte types, or a union that contains it?""" typ = get_proper_type(typ) From 9756b59a4187dbfa2579417e366d6497c3c342b4 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Mon, 6 Jan 2020 13:12:50 +0000 Subject: [PATCH 032/117] Fix lint (#8246) IOError is an alias to OSError in Python 3.3+. --- mypy/util.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mypy/util.py b/mypy/util.py index 84859fa94e70..27ecc682b5fe 100644 --- a/mypy/util.py +++ b/mypy/util.py @@ -142,7 +142,7 @@ def read_py_file(path: str, read: Callable[[str], bytes], """ try: source = read(path) - except (IOError, OSError): + except OSError: return None else: try: From 300c846a244a99e0c9cd9ade6b0400c4142a0f77 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ville=20Skytt=C3=A4?= Date: Mon, 6 Jan 2020 15:49:36 +0200 Subject: [PATCH 033/117] Spelling and grammar fixes (#8194) --- mypy/build.py | 10 +++++----- mypy/checkexpr.py | 2 +- mypy/checkmember.py | 2 +- mypy/dmypy_server.py | 2 +- mypy/fastparse.py | 2 +- mypy/moduleinspect.py | 2 +- mypy/nodes.py | 2 +- mypy/semanal_namedtuple.py | 2 +- mypy/semanal_shared.py | 2 +- mypy/server/deps.py | 2 +- mypy/stats.py | 2 +- mypy/suggestions.py | 4 ++-- mypy/test/helpers.py | 2 +- mypyc/emitmodule.py | 6 +++--- mypyc/genops.py | 4 ++-- mypyc/test/test_run.py | 4 ++-- mypyc/test/test_serialization.py | 2 +- mypyc/uninit.py | 2 +- 18 files changed, 27 insertions(+), 27 deletions(-) diff --git a/mypy/build.py b/mypy/build.py index 749785907f02..402e6fbc4a2c 100644 --- a/mypy/build.py +++ b/mypy/build.py @@ -968,7 +968,7 @@ def write_plugins_snapshot(manager: BuildManager) -> None: def read_plugins_snapshot(manager: BuildManager) -> Optional[Dict[str, str]]: """Read cached snapshot of versions and hashes of plugins from previous run.""" snapshot = _load_json_file(PLUGIN_SNAPSHOT_FILE, manager, - log_sucess='Plugins snapshot ', + log_success='Plugins snapshot ', log_error='Could not load plugins snapshot: ') if snapshot is None: return None @@ -1009,7 +1009,7 @@ def read_deps_cache(manager: BuildManager, Returns None if the cache was invalid in some way. """ deps_meta = _load_json_file(DEPS_META_FILE, manager, - log_sucess='Deps meta ', + log_success='Deps meta ', log_error='Could not load fine-grained dependency metadata: ') if deps_meta is None: return None @@ -1041,7 +1041,7 @@ def read_deps_cache(manager: BuildManager, def _load_json_file(file: str, manager: BuildManager, - log_sucess: str, log_error: str) -> Optional[Dict[str, Any]]: + log_success: str, log_error: str) -> Optional[Dict[str, Any]]: """A simple helper to read a JSON file with logging.""" t0 = time.time() try: @@ -1052,7 +1052,7 @@ def _load_json_file(file: str, manager: BuildManager, manager.add_stats(metastore_read_time=time.time() - t0) # Only bother to compute the log message if we are logging it, since it could be big if manager.verbosity() >= 2: - manager.trace(log_sucess + data.rstrip()) + manager.trace(log_success + data.rstrip()) try: result = json.loads(data) except ValueError: # TODO: JSONDecodeError in 3.5 @@ -1142,7 +1142,7 @@ def find_cache_meta(id: str, path: str, manager: BuildManager) -> Optional[Cache manager.trace('Looking for {} at {}'.format(id, meta_json)) t0 = time.time() meta = _load_json_file(meta_json, manager, - log_sucess='Meta {} '.format(id), + log_success='Meta {} '.format(id), log_error='Could not load cache for {}: '.format(id)) t1 = time.time() if meta is None: diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index 9173a0dcdbcb..c8dc34da0b91 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -2514,7 +2514,7 @@ def lookup_definer(typ: Instance, attr_name: str) -> Optional[str]: variants_raw.append((right_cmp_op, right_type, left_expr)) # STEP 3: - # We now filter out all non-existant operators. The 'variants' list contains + # We now filter out all non-existent operators. The 'variants' list contains # all operator methods that are actually present, in the order that Python # attempts to invoke them. diff --git a/mypy/checkmember.py b/mypy/checkmember.py index d0c705e2d7b3..65c84793eee3 100644 --- a/mypy/checkmember.py +++ b/mypy/checkmember.py @@ -565,7 +565,7 @@ def analyze_var(name: str, # * B.f: Callable[[B1], None] where B1 <: B (maybe B1 == B) # * x: Union[A1, B1] # In `x.f`, when checking `x` against A1 we assume x is compatible with A - # and similarly for B1 when checking agains B + # and similarly for B1 when checking against B dispatched_type = meet.meet_types(mx.original_type, itype) signature = freshen_function_type_vars(functype) signature = check_self_arg(signature, dispatched_type, var.is_classmethod, diff --git a/mypy/dmypy_server.py b/mypy/dmypy_server.py index 4899acd8f0a6..510886c01e82 100644 --- a/mypy/dmypy_server.py +++ b/mypy/dmypy_server.py @@ -273,7 +273,7 @@ def cmd_status(self, fswatcher_dump_file: Optional[str] = None) -> Dict[str, obj res.update(get_meminfo()) if fswatcher_dump_file: data = self.fswatcher.dump_file_data() if hasattr(self, 'fswatcher') else {} - # Using .dumps and then writing was noticably faster than using dump + # Using .dumps and then writing was noticeably faster than using dump s = json.dumps(data) with open(fswatcher_dump_file, 'w') as f: f.write(s) diff --git a/mypy/fastparse.py b/mypy/fastparse.py index 812defbc1452..55c6b25535f7 100644 --- a/mypy/fastparse.py +++ b/mypy/fastparse.py @@ -1137,7 +1137,7 @@ def visit_JoinedStr(self, n: ast3.JoinedStr) -> Expression: empty_string.set_line(n.lineno, n.col_offset) strs_to_join = ListExpr(self.translate_expr_list(n.values)) strs_to_join.set_line(empty_string) - # Don't make unecessary join call if there is only one str to join + # Don't make unnecessary join call if there is only one str to join if len(strs_to_join.items) == 1: return self.set_line(strs_to_join.items[0], n) join_method = MemberExpr(empty_string, 'join') diff --git a/mypy/moduleinspect.py b/mypy/moduleinspect.py index 9580e9b03b18..a4c7bcc13438 100644 --- a/mypy/moduleinspect.py +++ b/mypy/moduleinspect.py @@ -122,7 +122,7 @@ def _start(self) -> None: self.results = Queue() # type: Queue[Union[ModuleProperties, str]] self.proc = Process(target=worker, args=(self.tasks, self.results, sys.path)) self.proc.start() - self.counter = 0 # Number of successfull roundtrips + self.counter = 0 # Number of successful roundtrips def close(self) -> None: """Free any resources used.""" diff --git a/mypy/nodes.py b/mypy/nodes.py index 792a89a5fea4..b2c7769580bd 100644 --- a/mypy/nodes.py +++ b/mypy/nodes.py @@ -856,7 +856,7 @@ def __init__(self, name: str, type: 'Optional[mypy.types.Type]' = None) -> None: # def __init__(self) -> None: # self.x: int # This case is important because this defines a new Var, even if there is one - # present in a superclass (without explict type this doesn't create a new Var). + # present in a superclass (without explicit type this doesn't create a new Var). # See SemanticAnalyzer.analyze_member_lvalue() for details. self.explicit_self_type = False # If True, this is an implicit Var created due to module-level __getattr__. diff --git a/mypy/semanal_namedtuple.py b/mypy/semanal_namedtuple.py index af71c9d234d4..14b85b04dade 100644 --- a/mypy/semanal_namedtuple.py +++ b/mypy/semanal_namedtuple.py @@ -314,7 +314,7 @@ def parse_namedtuple_fields_with_types(self, nodes: List[Expression], context: C bool]]: """Parse typed named tuple fields. - Return (names, types, defaults, error ocurred), or None if at least one of + Return (names, types, defaults, error occurred), or None if at least one of the types is not ready. """ items = [] # type: List[str] diff --git a/mypy/semanal_shared.py b/mypy/semanal_shared.py index c800dcb95d14..c040fee4e7d7 100644 --- a/mypy/semanal_shared.py +++ b/mypy/semanal_shared.py @@ -208,7 +208,7 @@ def calculate_tuple_fallback(typ: TupleType) -> None: Note that there is an apparent chicken and egg problem with respect to verifying type arguments against bounds. Verifying bounds might require fallbacks, but we might use the bounds to calculate the - fallbacks. In partice this is not a problem, since the worst that + fallbacks. In practice this is not a problem, since the worst that can happen is that we have invalid type argument values, and these can happen in later stages as well (they will generate errors, but we don't prevent their existence). diff --git a/mypy/server/deps.py b/mypy/server/deps.py index cf2d9b51246a..f7d93789586a 100644 --- a/mypy/server/deps.py +++ b/mypy/server/deps.py @@ -299,7 +299,7 @@ def process_type_info(self, info: TypeInfo) -> None: if name not in info.names: continue # __init__ and __new__ can be overridden with different signatures, so no - # logical depedency. + # logical dependency. if name in ('__init__', '__new__'): continue self.add_dependency(make_trigger(base_info.fullname + '.' + name), diff --git a/mypy/stats.py b/mypy/stats.py index d277852c60ef..17725ac86bdc 100644 --- a/mypy/stats.py +++ b/mypy/stats.py @@ -227,7 +227,7 @@ def visit_call_expr(self, o: CallExpr) -> None: def record_call_target_precision(self, o: CallExpr) -> None: """Record precision of formal argument types used in a call.""" if not self.typemap or o.callee not in self.typemap: - # Type not availabe. + # Type not available. return callee_type = get_proper_type(self.typemap[o.callee]) if isinstance(callee_type, CallableType): diff --git a/mypy/suggestions.py b/mypy/suggestions.py index db128f5276b7..ab9dd8260b0b 100644 --- a/mypy/suggestions.py +++ b/mypy/suggestions.py @@ -305,7 +305,7 @@ def get_args(self, is_method: bool, """Produce a list of type suggestions for each argument type.""" types = [] # type: List[List[Type]] for i in range(len(base.arg_kinds)): - # Make self args Any but this will get overriden somewhere in the checker + # Make self args Any but this will get overridden somewhere in the checker if i == 0 and is_method: types.append([AnyType(TypeOfAny.suggestion_engine)]) continue @@ -967,7 +967,7 @@ def refine_union(t: UnionType, s: ProperType) -> Type: This is done by refining every component of the union against the right hand side type (or every component of its union if it is - one). If an element of the union is succesfully refined, we drop it + one). If an element of the union is successfully refined, we drop it from the union in favor of the refined versions. """ # Don't try to do any union refining if the types are already the diff --git a/mypy/test/helpers.py b/mypy/test/helpers.py index 6007a0a7f849..47118a413d9b 100644 --- a/mypy/test/helpers.py +++ b/mypy/test/helpers.py @@ -412,7 +412,7 @@ def copy_and_fudge_mtime(source_path: str, target_path: str) -> None: # In some systems, mtime has a resolution of 1 second which can # cause annoying-to-debug issues when a file has the same size # after a change. We manually set the mtime to circumvent this. - # Note that we increment the old file's mtime, which guarentees a + # Note that we increment the old file's mtime, which guarantees a # different value, rather than incrementing the mtime after the # copy, which could leave the mtime unchanged if the old file had # a similarly fudged mtime. diff --git a/mypyc/emitmodule.py b/mypyc/emitmodule.py index 57336867d5dd..4006635e8278 100644 --- a/mypyc/emitmodule.py +++ b/mypyc/emitmodule.py @@ -53,7 +53,7 @@ # modules: one shim per module and one shared library containing all # the actual code. # In fully separate compilation, we (unfortunately) will generate 2*N -# extension modules: one shim per module and also one library containg +# extension modules: one shim per module and also one library containing # each module's actual code. (This might be fixable in the future, # but allows a clean separation between setup of the export tables # (see generate_export_table) and running module top levels.) @@ -424,10 +424,10 @@ def pointerize(decl: str, name: str) -> str: """Given a C decl and its name, modify it to be a declaration to a pointer.""" # This doesn't work in general but does work for all our types... if '(' in decl: - # Function pointer. Stick a * in front of the name and wrap it in parens. + # Function pointer. Stick an * in front of the name and wrap it in parens. return decl.replace(name, '(*{})'.format(name)) else: - # Non-function pointer. Just stick a * in front of the name. + # Non-function pointer. Just stick an * in front of the name. return decl.replace(name, '*{}'.format(name)) diff --git a/mypyc/genops.py b/mypyc/genops.py index 6f16b270d694..da1423705131 100644 --- a/mypyc/genops.py +++ b/mypyc/genops.py @@ -601,7 +601,7 @@ def prepare_class_def(path: str, module_name: str, cdef: ClassDef, # We sort the table for determinism here on Python 3.5 for name, node in sorted(info.names.items()): - # Currenly all plugin generated methods are dummies and not included. + # Currently all plugin generated methods are dummies and not included. if node.plugin_generated: continue @@ -3651,7 +3651,7 @@ def visit_tuple_expr(self, expr: TupleExpr) -> Value: # create a tuple of unknown length return self._visit_tuple_display(expr) - # create an tuple of fixed length (RTuple) + # create a tuple of fixed length (RTuple) tuple_type = self.node_type(expr) # When handling NamedTuple et. al we might not have proper type info, # so make some up if we need it. diff --git a/mypyc/test/test_run.py b/mypyc/test/test_run.py index a0d4812296ae..efc58497300f 100644 --- a/mypyc/test/test_run.py +++ b/mypyc/test/test_run.py @@ -312,7 +312,7 @@ def get_separate(self, program_text: str, return True -# Run the main multi-module tests in multi-file compliation mode +# Run the main multi-module tests in multi-file compilation mode class TestRunMultiFile(TestRun): multi_file = True test_name_suffix = '_multi' @@ -322,7 +322,7 @@ class TestRunMultiFile(TestRun): ] -# Run the main multi-module tests in separate compliation mode +# Run the main multi-module tests in separate compilation mode class TestRunSeparate(TestRun): separate = True test_name_suffix = '_separate' diff --git a/mypyc/test/test_serialization.py b/mypyc/test/test_serialization.py index 4a6e26b0ceb5..0d7220bccfd1 100644 --- a/mypyc/test/test_serialization.py +++ b/mypyc/test/test_serialization.py @@ -33,7 +33,7 @@ def assert_blobs_same(x: Any, y: Any, trail: Tuple[Any, ...]) -> None: FuncDecls, FuncIRs, and ClassIRs are compared by fullname to avoid infinite recursion. - (More detailed comparisions should be done manually.) + (More detailed comparisons should be done manually.) Types and signatures are compared using mypyc.sametype. diff --git a/mypyc/uninit.py b/mypyc/uninit.py index 753af114b8d6..d7979aac37eb 100644 --- a/mypyc/uninit.py +++ b/mypyc/uninit.py @@ -41,7 +41,7 @@ def split_blocks_at_uninits(env: Environment, for i, op in enumerate(ops): defined = pre_must_defined[block, i] for src in op.unique_sources(): - # If a register operand is not guarenteed to be + # If a register operand is not guaranteed to be # initialized is an operand to something other than a # check that it is defined, insert a check. if (isinstance(src, Register) and src not in defined From 7c5e69be55c9d91aba335f14bd5e271889e3db88 Mon Sep 17 00:00:00 2001 From: Xuanda Yang Date: Tue, 7 Jan 2020 04:56:17 +0800 Subject: [PATCH 034/117] Fix incorrect error code indexing (#8248) Fixes #8242 --- mypy/util.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/mypy/util.py b/mypy/util.py index 27ecc682b5fe..cd6f7cbe2585 100644 --- a/mypy/util.py +++ b/mypy/util.py @@ -607,8 +607,11 @@ def colorize(self, error: str) -> str: return (loc + self.style('error:', 'red', bold=True) + self.highlight_quote_groups(msg)) codepos = msg.rfind('[') - code = msg[codepos:] - msg = msg[:codepos] + if codepos != -1: + code = msg[codepos:] + msg = msg[:codepos] + else: + code = "" # no error code specified return (loc + self.style('error:', 'red', bold=True) + self.highlight_quote_groups(msg) + self.style(code, 'yellow')) elif ': note:' in error: From 8571f7ddc36068a12b217ca78be6eb1f797f9b1a Mon Sep 17 00:00:00 2001 From: "Michael J. Sullivan" Date: Mon, 6 Jan 2020 13:21:33 -0800 Subject: [PATCH 035/117] Refactor calls to md5 to go through a helper (#8250) --- mypy/build.py | 23 ++++++++++++----------- mypy/dmypy_server.py | 2 +- mypy/fscache.py | 7 +++---- mypy/fswatcher.py | 14 +++++++------- mypy/util.py | 12 +++++++++++- mypyc/emitmodule.py | 4 ++-- 6 files changed, 36 insertions(+), 26 deletions(-) diff --git a/mypy/build.py b/mypy/build.py index 402e6fbc4a2c..0c8e05f11556 100644 --- a/mypy/build.py +++ b/mypy/build.py @@ -13,7 +13,6 @@ import contextlib import errno import gc -import hashlib import json import os import pathlib @@ -37,7 +36,7 @@ from mypy.errors import Errors, CompileError, ErrorInfo, report_internal_error from mypy.util import ( DecodeError, decode_python_encoding, is_sub_path, get_mypy_comments, module_prefix, - read_py_file + read_py_file, hash_digest, ) if TYPE_CHECKING: from mypy.report import Reports # Avoid unconditional slow import @@ -468,7 +467,7 @@ def take_module_snapshot(module: types.ModuleType) -> str: """ if hasattr(module, '__file__'): with open(module.__file__, 'rb') as f: - digest = hashlib.md5(f.read()).hexdigest() + digest = hash_digest(f.read()) else: digest = 'unknown' ver = getattr(module, '__version__', 'none') @@ -1262,9 +1261,9 @@ def validate_meta(meta: Optional[CacheMeta], id: str, path: Optional[str], # coarse-grained incremental rebuild, so we accept the cache # metadata even if it doesn't match the source file. # - # We still *do* the mtime/md5 checks, however, to enable + # We still *do* the mtime/hash checks, however, to enable # fine-grained mode to take advantage of the mtime-updating - # optimization when mtimes differ but md5s match. There is + # optimization when mtimes differ but hashes match. There is # essentially no extra time cost to computing the hash here, since # it will be cached and will be needed for finding changed files # later anyways. @@ -1292,7 +1291,7 @@ def validate_meta(meta: Optional[CacheMeta], id: str, path: Optional[str], t0 = time.time() try: - source_hash = manager.fscache.md5(path) + source_hash = manager.fscache.hash_digest(path) except (OSError, UnicodeDecodeError, DecodeError): return None manager.add_stats(validate_hash_time=time.time() - t0) @@ -1346,10 +1345,12 @@ def validate_meta(meta: Optional[CacheMeta], id: str, path: Optional[str], def compute_hash(text: str) -> str: - # We use md5 instead of the builtin hash(...) function because the output of hash(...) - # can differ between runs due to hash randomization (enabled by default in Python 3.3). - # See the note in https://docs.python.org/3/reference/datamodel.html#object.__hash__. - return hashlib.md5(text.encode('utf-8')).hexdigest() + # We use a crypto hash instead of the builtin hash(...) function + # because the output of hash(...) can differ between runs due to + # hash randomization (enabled by default in Python 3.3). See the + # note in + # https://docs.python.org/3/reference/datamodel.html#object.__hash__. + return hash_digest(text.encode('utf-8')) def json_dumps(obj: Any, debug_cache: bool) -> str: @@ -1982,7 +1983,7 @@ def parse_file(self) -> None: path = manager.maybe_swap_for_shadow_path(self.path) source = decode_python_encoding(manager.fscache.read(path), manager.options.python_version) - self.source_hash = manager.fscache.md5(path) + self.source_hash = manager.fscache.hash_digest(path) except IOError as ioerr: # ioerr.strerror differs for os.stat failures between Windows and # other systems, but os.strerror(ioerr.errno) does not, so we use that. diff --git a/mypy/dmypy_server.py b/mypy/dmypy_server.py index 510886c01e82..20aa9d678e9e 100644 --- a/mypy/dmypy_server.py +++ b/mypy/dmypy_server.py @@ -423,7 +423,7 @@ def initialize_fine_grained(self, sources: List[BuildSource], assert state.path is not None self.fswatcher.set_file_data( state.path, - FileData(st_mtime=float(meta.mtime), st_size=meta.size, md5=meta.hash)) + FileData(st_mtime=float(meta.mtime), st_size=meta.size, hash=meta.hash)) changed, removed = self.find_changed(sources) diff --git a/mypy/fscache.py b/mypy/fscache.py index e4426715c224..0677aaee7645 100644 --- a/mypy/fscache.py +++ b/mypy/fscache.py @@ -28,10 +28,10 @@ advantage of the benefits. """ -import hashlib import os import stat from typing import Dict, List, Set +from mypy.util import hash_digest class FileSystemCache: @@ -256,12 +256,11 @@ def read(self, path: str) -> bytes: self.read_error_cache[path] = err raise - md5hash = hashlib.md5(data).hexdigest() self.read_cache[path] = data - self.hash_cache[path] = md5hash + self.hash_cache[path] = hash_digest(data) return data - def md5(self, path: str) -> str: + def hash_digest(self, path: str) -> str: if path not in self.hash_cache: self.read(path) return self.hash_cache[path] diff --git a/mypy/fswatcher.py b/mypy/fswatcher.py index 7be8ee313749..7ab78b2c4ed3 100644 --- a/mypy/fswatcher.py +++ b/mypy/fswatcher.py @@ -6,14 +6,14 @@ FileData = NamedTuple('FileData', [('st_mtime', float), ('st_size', int), - ('md5', str)]) + ('hash', str)]) class FileSystemWatcher: """Watcher for file system changes among specific paths. All file system access is performed using FileSystemCache. We - detect changed files by stat()ing them all and comparing md5 hashes + detect changed files by stat()ing them all and comparing hashes of potentially changed files. If a file has both size and mtime unmodified, the file is assumed to be unchanged. @@ -54,8 +54,8 @@ def remove_watched_paths(self, paths: Iterable[str]) -> None: def _update(self, path: str) -> None: st = self.fs.stat(path) - md5 = self.fs.md5(path) - self._file_data[path] = FileData(st.st_mtime, st.st_size, md5) + hash_digest = self.fs.hash_digest(path) + self._file_data[path] = FileData(st.st_mtime, st.st_size, hash_digest) def _find_changed(self, paths: Iterable[str]) -> AbstractSet[str]: changed = set() @@ -76,10 +76,10 @@ def _find_changed(self, paths: Iterable[str]) -> AbstractSet[str]: # Round mtimes down, to match the mtimes we write to meta files elif st.st_size != old.st_size or int(st.st_mtime) != int(old.st_mtime): # Only look for changes if size or mtime has changed as an - # optimization, since calculating md5 is expensive. - new_md5 = self.fs.md5(path) + # optimization, since calculating hash is expensive. + new_hash = self.fs.hash_digest(path) self._update(path) - if st.st_size != old.st_size or new_md5 != old.md5: + if st.st_size != old.st_size or new_hash != old.hash: # Changed file. changed.add(path) return changed diff --git a/mypy/util.py b/mypy/util.py index cd6f7cbe2585..d44e58da8fc3 100644 --- a/mypy/util.py +++ b/mypy/util.py @@ -4,7 +4,7 @@ import re import subprocess import sys -import os +import hashlib from typing import ( TypeVar, List, Tuple, Optional, Dict, Sequence, Iterable, Container, IO, Callable @@ -469,6 +469,16 @@ def soft_wrap(msg: str, max_len: int, first_offset: int, return padding.join(lines) +def hash_digest(data: bytes) -> str: + """Compute a hash digest of some data. + + We use a cryptographic hash because we want a low probability of + accidental collision, but we don't really care about any of the + cryptographic properties. + """ + return hashlib.md5(data).hexdigest() + + class FancyFormatter: """Apply color and bold font to terminal output. diff --git a/mypyc/emitmodule.py b/mypyc/emitmodule.py index 4006635e8278..435e8c237d76 100644 --- a/mypyc/emitmodule.py +++ b/mypyc/emitmodule.py @@ -4,7 +4,6 @@ # single module and it should be renamed. import os -import hashlib import json from collections import OrderedDict from typing import List, Tuple, Dict, Iterable, Set, TypeVar, Optional @@ -18,6 +17,7 @@ from mypy.options import Options from mypy.plugin import Plugin, ReportConfigContext from mypy.fscache import FileSystemCache +from mypy.util import hash_digest from mypyc import genops from mypyc.common import ( @@ -144,7 +144,7 @@ def report_config_data( contents = f.read() except FileNotFoundError: return None - real_hash = hashlib.md5(contents).hexdigest() + real_hash = hash_digest(contents) if hash != real_hash: return None From 35b50397cabd36066d2dd92b979794b94f1741d3 Mon Sep 17 00:00:00 2001 From: "Michael J. Sullivan" Date: Tue, 7 Jan 2020 15:39:34 -0800 Subject: [PATCH 036/117] Switch to using sha256 for hashes (#8251) --- mypy/util.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/mypy/util.py b/mypy/util.py index d44e58da8fc3..f7c96e520f5c 100644 --- a/mypy/util.py +++ b/mypy/util.py @@ -476,7 +476,9 @@ def hash_digest(data: bytes) -> str: accidental collision, but we don't really care about any of the cryptographic properties. """ - return hashlib.md5(data).hexdigest() + # Once we drop Python 3.5 support, we should consider using + # blake2b, which is faster. + return hashlib.sha256(data).hexdigest() class FancyFormatter: From 3dce3fd18e3e48c1e892951950865ea97f4ee4ed Mon Sep 17 00:00:00 2001 From: Michael Lee Date: Wed, 8 Jan 2020 08:20:29 -0800 Subject: [PATCH 037/117] Add support for narrowing Literals using equality (#8151) This pull request (finally) adds support for narrowing expressions using Literal types by equality, instead of just identity. For example, the following "tagged union" pattern is now supported: ```python class Foo(TypedDict): key: Literal["A"] blah: int class Bar(TypedDict): key: Literal["B"] something: str x: Union[Foo, Bar] if x.key == "A": reveal_type(x) # Revealed type is 'Foo' else: reveal_type(x) # Revealed type is 'Bar' ``` Previously, this was possible to do only with Enum Literals and the `is` operator, which is perhaps not very intuitive. The main limitation with this pull request is that it'll perform narrowing only if either the LHS or RHS contains an explicit Literal type somewhere. If this limitation is not present, we end up breaking a decent amount of real-world code -- mostly tests -- that do something like this: ```python def some_test_case() -> None: worker = Worker() # Without the limitation, we narrow 'worker.state' to # Literal['ready'] in this assert... assert worker.state == 'ready' worker.start() # ...which subsequently causes this second assert to narrow # worker.state to , causing the last line to be # unreachable. assert worker.state == 'running' worker.query() ``` I tried for several weeks to find a more intelligent way around this problem, but everything I tried ended up being either insufficient or super-hacky, so I gave up and went for this brute-force solution. The other main limitation is that we perform narrowing only if both the LHS and RHS do not define custom `__eq__` or `__ne__` methods, but this seems like a more reasonable one to me. Resolves https://github.com/python/mypy/issues/7944. --- mypy/checker.py | 214 +++++++----- mypy/checkexpr.py | 23 +- mypy/checkstrformat.py | 35 +- mypy/typeops.py | 53 ++- test-data/unit/check-enum.test | 69 ++-- test-data/unit/check-narrowing.test | 515 ++++++++++++++++++++++++++++ test-data/unit/check-optional.test | 20 ++ 7 files changed, 755 insertions(+), 174 deletions(-) diff --git a/mypy/checker.py b/mypy/checker.py index 5046d9431b4f..18f3573f14c5 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -6,7 +6,7 @@ from typing import ( Dict, Set, List, cast, Tuple, TypeVar, Union, Optional, NamedTuple, Iterator, Iterable, - Sequence, Mapping, Generic, AbstractSet + Sequence, Mapping, Generic, AbstractSet, Callable ) from typing_extensions import Final @@ -50,7 +50,8 @@ erase_def_to_union_or_bound, erase_to_union_or_bound, coerce_to_literal, try_getting_str_literals_from_type, try_getting_int_literals_from_type, tuple_fallback, is_singleton_type, try_expanding_enum_to_union, - true_only, false_only, function_type, TypeVarExtractor, + true_only, false_only, function_type, TypeVarExtractor, custom_special_method, + is_literal_type_like, ) from mypy import message_registry from mypy.subtypes import ( @@ -3890,20 +3891,64 @@ def find_isinstance_check_helper(self, node: Expression) -> Tuple[TypeMap, TypeM partial_type_maps = [] for operator, expr_indices in simplified_operator_list: - if operator in {'is', 'is not'}: - if_map, else_map = self.refine_identity_comparison_expression( - operands, - operand_types, - expr_indices, - narrowable_operand_index_to_hash.keys(), - ) - elif operator in {'==', '!='}: - if_map, else_map = self.refine_equality_comparison_expression( - operands, - operand_types, - expr_indices, - narrowable_operand_index_to_hash.keys(), - ) + if operator in {'is', 'is not', '==', '!='}: + # is_valid_target: + # Controls which types we're allowed to narrow exprs to. Note that + # we cannot use 'is_literal_type_like' in both cases since doing + # 'x = 10000 + 1; x is 10001' is not always True in all Python + # implementations. + # + # coerce_only_in_literal_context: + # If true, coerce types into literal types only if one or more of + # the provided exprs contains an explicit Literal type. This could + # technically be set to any arbitrary value, but it seems being liberal + # with narrowing when using 'is' and conservative when using '==' seems + # to break the least amount of real-world code. + # + # should_narrow_by_identity: + # Set to 'false' only if the user defines custom __eq__ or __ne__ methods + # that could cause identity-based narrowing to produce invalid results. + if operator in {'is', 'is not'}: + is_valid_target = is_singleton_type # type: Callable[[Type], bool] + coerce_only_in_literal_context = False + should_narrow_by_identity = True + else: + def is_exactly_literal_type(t: Type) -> bool: + return isinstance(get_proper_type(t), LiteralType) + + def has_no_custom_eq_checks(t: Type) -> bool: + return (not custom_special_method(t, '__eq__', check_all=False) + and not custom_special_method(t, '__ne__', check_all=False)) + + is_valid_target = is_exactly_literal_type + coerce_only_in_literal_context = True + + expr_types = [operand_types[i] for i in expr_indices] + should_narrow_by_identity = all(map(has_no_custom_eq_checks, expr_types)) + + if_map = {} # type: TypeMap + else_map = {} # type: TypeMap + if should_narrow_by_identity: + if_map, else_map = self.refine_identity_comparison_expression( + operands, + operand_types, + expr_indices, + narrowable_operand_index_to_hash.keys(), + is_valid_target, + coerce_only_in_literal_context, + ) + + # Strictly speaking, we should also skip this check if the objects in the expr + # chain have custom __eq__ or __ne__ methods. But we (maybe optimistically) + # assume nobody would actually create a custom objects that considers itself + # equal to None. + if if_map == {} and else_map == {}: + if_map, else_map = self.refine_away_none_in_comparison( + operands, + operand_types, + expr_indices, + narrowable_operand_index_to_hash.keys(), + ) elif operator in {'in', 'not in'}: assert len(expr_indices) == 2 left_index, right_index = expr_indices @@ -3936,7 +3981,7 @@ def find_isinstance_check_helper(self, node: Expression) -> Tuple[TypeMap, TypeM partial_type_maps.append((if_map, else_map)) - return reduce_partial_conditional_maps(partial_type_maps) + return reduce_conditional_maps(partial_type_maps) elif isinstance(node, RefExpr): # Restrict the type of the variable to True-ish/False-ish in the if and else branches # respectively @@ -4146,8 +4191,10 @@ def refine_identity_comparison_expression(self, operand_types: List[Type], chain_indices: List[int], narrowable_operand_indices: AbstractSet[int], + is_valid_target: Callable[[ProperType], bool], + coerce_only_in_literal_context: bool, ) -> Tuple[TypeMap, TypeMap]: - """Produces conditional type maps refining expressions used in an identity comparison. + """Produce conditional type maps refining expressions by an identity/equality comparison. The 'operands' and 'operand_types' lists should be the full list of operands used in the overall comparison expression. The 'chain_indices' list is the list of indices @@ -4163,30 +4210,45 @@ def refine_identity_comparison_expression(self, The 'narrowable_operand_indices' parameter is the set of all indices we are allowed to refine the types of: that is, all operands that will potentially be a part of the output TypeMaps. + + Although this function could theoretically try setting the types of the operands + in the chains to the meet, doing that causes too many issues in real-world code. + Instead, we use 'is_valid_target' to identify which of the given chain types + we could plausibly use as the refined type for the expressions in the chain. + + Similarly, 'coerce_only_in_literal_context' controls whether we should try coercing + expressions in the chain to a Literal type. Performing this coercion is sometimes + too aggressive of a narrowing, depending on context. """ - singleton = None # type: Optional[ProperType] - possible_singleton_indices = [] + should_coerce = True + if coerce_only_in_literal_context: + should_coerce = any(is_literal_type_like(operand_types[i]) for i in chain_indices) + + target = None # type: Optional[Type] + possible_target_indices = [] for i in chain_indices: - coerced_type = coerce_to_literal(operand_types[i]) - if not is_singleton_type(coerced_type): + expr_type = operand_types[i] + if should_coerce: + expr_type = coerce_to_literal(expr_type) + if not is_valid_target(get_proper_type(expr_type)): continue - if singleton and not is_same_type(singleton, coerced_type): - # We have multiple disjoint singleton types. So the 'if' branch + if target and not is_same_type(target, expr_type): + # We have multiple disjoint target types. So the 'if' branch # must be unreachable. return None, {} - singleton = coerced_type - possible_singleton_indices.append(i) + target = expr_type + possible_target_indices.append(i) - # There's nothing we can currently infer if none of the operands are singleton types, + # There's nothing we can currently infer if none of the operands are valid targets, # so we end early and infer nothing. - if singleton is None: + if target is None: return {}, {} - # If possible, use an unassignable expression as the singleton. - # We skip refining the type of the singleton below, so ideally we'd + # If possible, use an unassignable expression as the target. + # We skip refining the type of the target below, so ideally we'd # want to pick an expression we were going to skip anyways. singleton_index = -1 - for i in possible_singleton_indices: + for i in possible_target_indices: if i not in narrowable_operand_indices: singleton_index = i @@ -4215,20 +4277,21 @@ def refine_identity_comparison_expression(self, # currently will just mark the whole branch as unreachable if either operand is # narrowed to . if singleton_index == -1: - singleton_index = possible_singleton_indices[-1] + singleton_index = possible_target_indices[-1] enum_name = None - if isinstance(singleton, LiteralType) and singleton.is_enum_literal(): - enum_name = singleton.fallback.type.fullname + target = get_proper_type(target) + if isinstance(target, LiteralType) and target.is_enum_literal(): + enum_name = target.fallback.type.fullname - target_type = [TypeRange(singleton, is_upper_bound=False)] + target_type = [TypeRange(target, is_upper_bound=False)] partial_type_maps = [] for i in chain_indices: - # If we try refining a singleton against itself, conditional_type_map + # If we try refining a type against itself, conditional_type_map # will end up assuming that the 'else' branch is unreachable. This is # typically not what we want: generally the user will intend for the - # singleton type to be some fixed 'sentinel' value and will want to refine + # target type to be some fixed 'sentinel' value and will want to refine # the other exprs against this one instead. if i == singleton_index: continue @@ -4244,19 +4307,18 @@ def refine_identity_comparison_expression(self, expr_type = try_expanding_enum_to_union(expr_type, enum_name) partial_type_maps.append(conditional_type_map(expr, expr_type, target_type)) - return reduce_partial_conditional_maps(partial_type_maps) + return reduce_conditional_maps(partial_type_maps) - def refine_equality_comparison_expression(self, - operands: List[Expression], - operand_types: List[Type], - chain_indices: List[int], - narrowable_operand_indices: AbstractSet[int], - ) -> Tuple[TypeMap, TypeMap]: - """Produces conditional type maps refining expressions used in an equality comparison. + def refine_away_none_in_comparison(self, + operands: List[Expression], + operand_types: List[Type], + chain_indices: List[int], + narrowable_operand_indices: AbstractSet[int], + ) -> Tuple[TypeMap, TypeMap]: + """Produces conditional type maps refining away None in an identity/equality chain. - For more details, see the docstring of 'refine_equality_comparison' up above. - The only difference is that this function is for refining equality operations - (e.g. 'a == b == c') instead of identity ('a is b is c'). + For more details about what the different arguments mean, see the + docstring of 'refine_identity_comparison_expression' up above. """ non_optional_types = [] for i in chain_indices: @@ -4749,7 +4811,7 @@ class Foo(Enum): return False parent_type = get_proper_type(parent_type) - member_type = coerce_to_literal(member_type) + member_type = get_proper_type(coerce_to_literal(member_type)) if not isinstance(parent_type, FunctionLike) or not isinstance(member_type, LiteralType): return False @@ -4851,46 +4913,12 @@ def or_conditional_maps(m1: TypeMap, m2: TypeMap) -> TypeMap: return result -def or_partial_conditional_maps(m1: TypeMap, m2: TypeMap) -> TypeMap: - """Calculate what information we can learn from the truth of (e1 or e2) - in terms of the information that we can learn from the truth of e1 and - the truth of e2. - - Unlike 'or_conditional_maps', we include an expression in the output even - if it exists in only one map: we're assuming both maps are "partial" and - contain information about only some expressions, and so we "or" together - expressions both maps have information on. - """ - - if m1 is None: - return m2 - if m2 is None: - return m1 - # The logic here is a blend between 'and_conditional_maps' - # and 'or_conditional_maps'. We use the high-level logic from the - # former to ensure all expressions make it in the output map, - # but resolve cases where both maps contain info on the same - # expr using the unioning strategy from the latter. - result = m2.copy() - m2_keys = {literal_hash(n2): n2 for n2 in m2} - for n1 in m1: - n2 = m2_keys.get(literal_hash(n1)) - if n2 is None: - result[n1] = m1[n1] - else: - result[n2] = make_simplified_union([m1[n1], result[n2]]) - - return result - - -def reduce_partial_conditional_maps(type_maps: List[Tuple[TypeMap, TypeMap]], - ) -> Tuple[TypeMap, TypeMap]: - """Reduces a list containing pairs of *partial* if/else TypeMaps into a single pair. - - That is, if a expression exists in only one map, we always include it in the output. - We only "and"/"or" together expressions that appear in multiple if/else maps. +def reduce_conditional_maps(type_maps: List[Tuple[TypeMap, TypeMap]], + ) -> Tuple[TypeMap, TypeMap]: + """Reduces a list containing pairs of if/else TypeMaps into a single pair. - So for example, if we had the input: + We "and" together all of the if TypeMaps and "or" together the else TypeMaps. So + for example, if we had the input: [ ({x: TypeIfX, shared: TypeIfShared1}, {x: TypeElseX, shared: TypeElseShared1}), @@ -4901,11 +4929,14 @@ def reduce_partial_conditional_maps(type_maps: List[Tuple[TypeMap, TypeMap]], ( {x: TypeIfX, y: TypeIfY, shared: PseudoIntersection[TypeIfShared1, TypeIfShared2]}, - {x: TypeElseX, y: TypeElseY, shared: Union[TypeElseShared1, TypeElseShared2]}, + {shared: Union[TypeElseShared1, TypeElseShared2]}, ) ...where "PseudoIntersection[X, Y] == Y" because mypy actually doesn't understand intersections yet, so we settle for just arbitrarily picking the right expr's type. + + We only retain the shared expression in the 'else' case because we don't actually know + whether x was refined or y was refined -- only just that one of the two was refined. """ if len(type_maps) == 0: return {}, {} @@ -4914,10 +4945,9 @@ def reduce_partial_conditional_maps(type_maps: List[Tuple[TypeMap, TypeMap]], else: final_if_map, final_else_map = type_maps[0] for if_map, else_map in type_maps[1:]: - # 'and_conditional_maps' does the same thing for both global and partial type maps, - # which is why we don't need to have an 'and_partial_conditional_maps' function. final_if_map = and_conditional_maps(final_if_map, if_map) - final_else_map = or_partial_conditional_maps(final_else_map, else_map) + final_else_map = or_conditional_maps(final_else_map, else_map) + return final_if_map, final_else_map diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index c8dc34da0b91..47f0e74691c0 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -50,7 +50,7 @@ from mypy import erasetype from mypy.checkmember import analyze_member_access, type_object_type from mypy.argmap import ArgTypeExpander, map_actuals_to_formals, map_formals_to_actuals -from mypy.checkstrformat import StringFormatterChecker, custom_special_method +from mypy.checkstrformat import StringFormatterChecker from mypy.expandtype import expand_type, expand_type_by_instance, freshen_function_type_vars from mypy.util import split_module_names from mypy.typevars import fill_typevars @@ -58,7 +58,8 @@ from mypy.plugin import Plugin, MethodContext, MethodSigContext, FunctionContext from mypy.typeops import ( tuple_fallback, make_simplified_union, true_only, false_only, erase_to_union_or_bound, - function_type, callable_type, try_getting_str_literals + function_type, callable_type, try_getting_str_literals, custom_special_method, + is_literal_type_like, ) import mypy.errorcodes as codes @@ -4265,24 +4266,6 @@ def merge_typevars_in_callables_by_name( return output, variables -def is_literal_type_like(t: Optional[Type]) -> bool: - """Returns 'true' if the given type context is potentially either a LiteralType, - a Union of LiteralType, or something similar. - """ - t = get_proper_type(t) - if t is None: - return False - elif isinstance(t, LiteralType): - return True - elif isinstance(t, UnionType): - return any(is_literal_type_like(item) for item in t.items) - elif isinstance(t, TypeVarType): - return (is_literal_type_like(t.upper_bound) - or any(is_literal_type_like(item) for item in t.values)) - else: - return False - - def try_getting_literal(typ: Type) -> ProperType: """If possible, get a more precise literal type for a given type.""" typ = get_proper_type(typ) diff --git a/mypy/checkstrformat.py b/mypy/checkstrformat.py index 6f7647d98846..f89d5d0451b2 100644 --- a/mypy/checkstrformat.py +++ b/mypy/checkstrformat.py @@ -19,12 +19,12 @@ from mypy.types import ( Type, AnyType, TupleType, Instance, UnionType, TypeOfAny, get_proper_type, TypeVarType, - CallableType, LiteralType, get_proper_types + LiteralType, get_proper_types ) from mypy.nodes import ( StrExpr, BytesExpr, UnicodeExpr, TupleExpr, DictExpr, Context, Expression, StarExpr, CallExpr, IndexExpr, MemberExpr, TempNode, ARG_POS, ARG_STAR, ARG_NAMED, ARG_STAR2, - SYMBOL_FUNCBASE_TYPES, Decorator, Var, Node, MypyFile, ExpressionStmt, NameExpr, IntExpr + Node, MypyFile, ExpressionStmt, NameExpr, IntExpr ) import mypy.errorcodes as codes @@ -35,7 +35,7 @@ from mypy import message_registry from mypy.messages import MessageBuilder from mypy.maptype import map_instance_to_supertype -from mypy.typeops import tuple_fallback +from mypy.typeops import custom_special_method from mypy.subtypes import is_subtype from mypy.parse import parse @@ -961,32 +961,3 @@ def has_type_component(typ: Type, fullname: str) -> bool: elif isinstance(typ, UnionType): return any(has_type_component(t, fullname) for t in typ.relevant_items()) return False - - -def custom_special_method(typ: Type, name: str, - check_all: bool = False) -> bool: - """Does this type have a custom special method such as __format__() or __eq__()? - - If check_all is True ensure all items of a union have a custom method, not just some. - """ - typ = get_proper_type(typ) - if isinstance(typ, Instance): - method = typ.type.get(name) - if method and isinstance(method.node, (SYMBOL_FUNCBASE_TYPES, Decorator, Var)): - if method.node.info: - return not method.node.info.fullname.startswith('builtins.') - return False - if isinstance(typ, UnionType): - if check_all: - return all(custom_special_method(t, name, check_all) for t in typ.items) - return any(custom_special_method(t, name) for t in typ.items) - if isinstance(typ, TupleType): - return custom_special_method(tuple_fallback(typ), name) - if isinstance(typ, CallableType) and typ.is_type_obj(): - # Look up __method__ on the metaclass for class objects. - return custom_special_method(typ.fallback, name) - if isinstance(typ, AnyType): - # Avoid false positives in uncertain cases. - return True - # TODO: support other types (see ExpressionChecker.has_member())? - return False diff --git a/mypy/typeops.py b/mypy/typeops.py index 266a0fa0bb88..828791333f36 100644 --- a/mypy/typeops.py +++ b/mypy/typeops.py @@ -17,7 +17,7 @@ ) from mypy.nodes import ( FuncBase, FuncItem, OverloadedFuncDef, TypeInfo, ARG_STAR, ARG_STAR2, ARG_POS, - Expression, StrExpr, Var + Expression, StrExpr, Var, Decorator, SYMBOL_FUNCBASE_TYPES ) from mypy.maptype import map_instance_to_supertype from mypy.expandtype import expand_type_by_instance, expand_type @@ -564,6 +564,24 @@ def try_getting_literals_from_type(typ: Type, return literals +def is_literal_type_like(t: Optional[Type]) -> bool: + """Returns 'true' if the given type context is potentially either a LiteralType, + a Union of LiteralType, or something similar. + """ + t = get_proper_type(t) + if t is None: + return False + elif isinstance(t, LiteralType): + return True + elif isinstance(t, UnionType): + return any(is_literal_type_like(item) for item in t.items) + elif isinstance(t, TypeVarType): + return (is_literal_type_like(t.upper_bound) + or any(is_literal_type_like(item) for item in t.values)) + else: + return False + + def get_enum_values(typ: Instance) -> List[str]: """Return the list of values for an Enum.""" return [name for name, sym in typ.type.names.items() if isinstance(sym.node, Var)] @@ -640,10 +658,11 @@ class Status(Enum): return typ -def coerce_to_literal(typ: Type) -> ProperType: +def coerce_to_literal(typ: Type) -> Type: """Recursively converts any Instances that have a last_known_value or are instances of enum types with a single value into the corresponding LiteralType. """ + original_type = typ typ = get_proper_type(typ) if isinstance(typ, UnionType): new_items = [coerce_to_literal(item) for item in typ.items] @@ -655,7 +674,7 @@ def coerce_to_literal(typ: Type) -> ProperType: enum_values = get_enum_values(typ) if len(enum_values) == 1: return LiteralType(value=enum_values[0], fallback=typ) - return typ + return original_type def get_type_vars(tp: Type) -> List[TypeVarType]: @@ -674,3 +693,31 @@ def _merge(self, iter: Iterable[List[TypeVarType]]) -> List[TypeVarType]: def visit_type_var(self, t: TypeVarType) -> List[TypeVarType]: return [t] + + +def custom_special_method(typ: Type, name: str, check_all: bool = False) -> bool: + """Does this type have a custom special method such as __format__() or __eq__()? + + If check_all is True ensure all items of a union have a custom method, not just some. + """ + typ = get_proper_type(typ) + if isinstance(typ, Instance): + method = typ.type.get(name) + if method and isinstance(method.node, (SYMBOL_FUNCBASE_TYPES, Decorator, Var)): + if method.node.info: + return not method.node.info.fullname.startswith('builtins.') + return False + if isinstance(typ, UnionType): + if check_all: + return all(custom_special_method(t, name, check_all) for t in typ.items) + return any(custom_special_method(t, name) for t in typ.items) + if isinstance(typ, TupleType): + return custom_special_method(tuple_fallback(typ), name, check_all) + if isinstance(typ, CallableType) and typ.is_type_obj(): + # Look up __method__ on the metaclass for class objects. + return custom_special_method(typ.fallback, name, check_all) + if isinstance(typ, AnyType): + # Avoid false positives in uncertain cases. + return True + # TODO: support other types (see ExpressionChecker.has_member())? + return False diff --git a/test-data/unit/check-enum.test b/test-data/unit/check-enum.test index 9d027f47192f..18130d2d818c 100644 --- a/test-data/unit/check-enum.test +++ b/test-data/unit/check-enum.test @@ -978,32 +978,43 @@ class Foo(Enum): x: Foo y: Foo +# We can't narrow anything in the else cases -- what if +# x is Foo.A and y is Foo.B or vice versa, for example? if x is y is Foo.A: - reveal_type(x) # N: Revealed type is 'Literal[__main__.Foo.A]' - reveal_type(y) # N: Revealed type is 'Literal[__main__.Foo.A]' + reveal_type(x) # N: Revealed type is 'Literal[__main__.Foo.A]' + reveal_type(y) # N: Revealed type is 'Literal[__main__.Foo.A]' +elif x is y is Foo.B: + reveal_type(x) # N: Revealed type is 'Literal[__main__.Foo.B]' + reveal_type(y) # N: Revealed type is 'Literal[__main__.Foo.B]' else: - reveal_type(x) # N: Revealed type is 'Literal[__main__.Foo.B]' - reveal_type(y) # N: Revealed type is 'Literal[__main__.Foo.B]' -reveal_type(x) # N: Revealed type is '__main__.Foo' -reveal_type(y) # N: Revealed type is '__main__.Foo' + reveal_type(x) # N: Revealed type is '__main__.Foo' + reveal_type(y) # N: Revealed type is '__main__.Foo' +reveal_type(x) # N: Revealed type is '__main__.Foo' +reveal_type(y) # N: Revealed type is '__main__.Foo' if x is Foo.A is y: - reveal_type(x) # N: Revealed type is 'Literal[__main__.Foo.A]' - reveal_type(y) # N: Revealed type is 'Literal[__main__.Foo.A]' + reveal_type(x) # N: Revealed type is 'Literal[__main__.Foo.A]' + reveal_type(y) # N: Revealed type is 'Literal[__main__.Foo.A]' +elif x is Foo.B is y: + reveal_type(x) # N: Revealed type is 'Literal[__main__.Foo.B]' + reveal_type(y) # N: Revealed type is 'Literal[__main__.Foo.B]' else: - reveal_type(x) # N: Revealed type is 'Literal[__main__.Foo.B]' - reveal_type(y) # N: Revealed type is 'Literal[__main__.Foo.B]' -reveal_type(x) # N: Revealed type is '__main__.Foo' -reveal_type(y) # N: Revealed type is '__main__.Foo' + reveal_type(x) # N: Revealed type is '__main__.Foo' + reveal_type(y) # N: Revealed type is '__main__.Foo' +reveal_type(x) # N: Revealed type is '__main__.Foo' +reveal_type(y) # N: Revealed type is '__main__.Foo' if Foo.A is x is y: - reveal_type(x) # N: Revealed type is 'Literal[__main__.Foo.A]' - reveal_type(y) # N: Revealed type is 'Literal[__main__.Foo.A]' + reveal_type(x) # N: Revealed type is 'Literal[__main__.Foo.A]' + reveal_type(y) # N: Revealed type is 'Literal[__main__.Foo.A]' +elif Foo.B is x is y: + reveal_type(x) # N: Revealed type is 'Literal[__main__.Foo.B]' + reveal_type(y) # N: Revealed type is 'Literal[__main__.Foo.B]' else: - reveal_type(x) # N: Revealed type is 'Literal[__main__.Foo.B]' - reveal_type(y) # N: Revealed type is 'Literal[__main__.Foo.B]' -reveal_type(x) # N: Revealed type is '__main__.Foo' -reveal_type(y) # N: Revealed type is '__main__.Foo' + reveal_type(x) # N: Revealed type is '__main__.Foo' + reveal_type(y) # N: Revealed type is '__main__.Foo' +reveal_type(x) # N: Revealed type is '__main__.Foo' +reveal_type(y) # N: Revealed type is '__main__.Foo' [builtins fixtures/primitives.pyi] @@ -1026,8 +1037,10 @@ if x is Foo.A < y is Foo.B: reveal_type(x) # N: Revealed type is 'Literal[__main__.Foo.A]' reveal_type(y) # N: Revealed type is 'Literal[__main__.Foo.B]' else: - reveal_type(x) # N: Revealed type is 'Literal[__main__.Foo.B]' - reveal_type(y) # N: Revealed type is 'Literal[__main__.Foo.A]' + # Note: we can't narrow in this case. What if both x and y + # are Foo.A, for example? + reveal_type(x) # N: Revealed type is '__main__.Foo' + reveal_type(y) # N: Revealed type is '__main__.Foo' reveal_type(x) # N: Revealed type is '__main__.Foo' reveal_type(y) # N: Revealed type is '__main__.Foo' @@ -1109,11 +1122,13 @@ if x0 is x1 is Foo.A is x2 < x3 is Foo.B is x4 is x5: reveal_type(x4) # N: Revealed type is 'Literal[__main__.Foo.B]' reveal_type(x5) # N: Revealed type is 'Literal[__main__.Foo.B]' else: - reveal_type(x0) # N: Revealed type is 'Union[Literal[__main__.Foo.B], Literal[__main__.Foo.C]]' - reveal_type(x1) # N: Revealed type is 'Union[Literal[__main__.Foo.B], Literal[__main__.Foo.C]]' - reveal_type(x2) # N: Revealed type is 'Union[Literal[__main__.Foo.B], Literal[__main__.Foo.C]]' - - reveal_type(x3) # N: Revealed type is 'Union[Literal[__main__.Foo.A], Literal[__main__.Foo.C]]' - reveal_type(x4) # N: Revealed type is 'Union[Literal[__main__.Foo.A], Literal[__main__.Foo.C]]' - reveal_type(x5) # N: Revealed type is 'Union[Literal[__main__.Foo.A], Literal[__main__.Foo.C]]' + # We unfortunately can't narrow away anything. For example, + # what if x0 == Foo.A and x1 == Foo.B or vice versa? + reveal_type(x0) # N: Revealed type is '__main__.Foo' + reveal_type(x1) # N: Revealed type is '__main__.Foo' + reveal_type(x2) # N: Revealed type is '__main__.Foo' + + reveal_type(x3) # N: Revealed type is '__main__.Foo' + reveal_type(x4) # N: Revealed type is '__main__.Foo' + reveal_type(x5) # N: Revealed type is '__main__.Foo' [builtins fixtures/primitives.pyi] diff --git a/test-data/unit/check-narrowing.test b/test-data/unit/check-narrowing.test index e0a0cb660c80..6c64b241eaaa 100644 --- a/test-data/unit/check-narrowing.test +++ b/test-data/unit/check-narrowing.test @@ -1,3 +1,86 @@ +[case testNarrowingParentWithStrsBasic] +from dataclasses import dataclass +from typing import NamedTuple, Tuple, Union +from typing_extensions import Literal, TypedDict + +class Object1: + key: Literal["A"] + foo: int +class Object2: + key: Literal["B"] + bar: str + +@dataclass +class Dataclass1: + key: Literal["A"] + foo: int +@dataclass +class Dataclass2: + key: Literal["B"] + foo: str + +class NamedTuple1(NamedTuple): + key: Literal["A"] + foo: int +class NamedTuple2(NamedTuple): + key: Literal["B"] + foo: str + +Tuple1 = Tuple[Literal["A"], int] +Tuple2 = Tuple[Literal["B"], str] + +class TypedDict1(TypedDict): + key: Literal["A"] + foo: int +class TypedDict2(TypedDict): + key: Literal["B"] + foo: str + +x1: Union[Object1, Object2] +if x1.key == "A": + reveal_type(x1) # N: Revealed type is '__main__.Object1' + reveal_type(x1.key) # N: Revealed type is 'Literal['A']' +else: + reveal_type(x1) # N: Revealed type is '__main__.Object2' + reveal_type(x1.key) # N: Revealed type is 'Literal['B']' + +x2: Union[Dataclass1, Dataclass2] +if x2.key == "A": + reveal_type(x2) # N: Revealed type is '__main__.Dataclass1' + reveal_type(x2.key) # N: Revealed type is 'Literal['A']' +else: + reveal_type(x2) # N: Revealed type is '__main__.Dataclass2' + reveal_type(x2.key) # N: Revealed type is 'Literal['B']' + +x3: Union[NamedTuple1, NamedTuple2] +if x3.key == "A": + reveal_type(x3) # N: Revealed type is 'Tuple[Literal['A'], builtins.int, fallback=__main__.NamedTuple1]' + reveal_type(x3.key) # N: Revealed type is 'Literal['A']' +else: + reveal_type(x3) # N: Revealed type is 'Tuple[Literal['B'], builtins.str, fallback=__main__.NamedTuple2]' + reveal_type(x3.key) # N: Revealed type is 'Literal['B']' +if x3[0] == "A": + reveal_type(x3) # N: Revealed type is 'Tuple[Literal['A'], builtins.int, fallback=__main__.NamedTuple1]' + reveal_type(x3[0]) # N: Revealed type is 'Literal['A']' +else: + reveal_type(x3) # N: Revealed type is 'Tuple[Literal['B'], builtins.str, fallback=__main__.NamedTuple2]' + reveal_type(x3[0]) # N: Revealed type is 'Literal['B']' + +x4: Union[Tuple1, Tuple2] +if x4[0] == "A": + reveal_type(x4) # N: Revealed type is 'Tuple[Literal['A'], builtins.int]' + reveal_type(x4[0]) # N: Revealed type is 'Literal['A']' +else: + reveal_type(x4) # N: Revealed type is 'Tuple[Literal['B'], builtins.str]' + reveal_type(x4[0]) # N: Revealed type is 'Literal['B']' + +x5: Union[TypedDict1, TypedDict2] +if x5["key"] == "A": + reveal_type(x5) # N: Revealed type is 'TypedDict('__main__.TypedDict1', {'key': Literal['A'], 'foo': builtins.int})' +else: + reveal_type(x5) # N: Revealed type is 'TypedDict('__main__.TypedDict2', {'key': Literal['B'], 'foo': builtins.str})' +[builtins fixtures/primitives.pyi] + [case testNarrowingParentWithEnumsBasic] from enum import Enum from dataclasses import dataclass @@ -184,6 +267,88 @@ if x.key is Key.D: else: reveal_type(x) # N: Revealed type is 'Union[__main__.Object1, __main__.Object2]' +[case testNarrowingTypedDictParentMultipleKeys] +# flags: --warn-unreachable +from typing import Union +from typing_extensions import Literal, TypedDict + +class TypedDict1(TypedDict): + key: Literal['A', 'C'] +class TypedDict2(TypedDict): + key: Literal['B', 'C'] + +x: Union[TypedDict1, TypedDict2] +if x['key'] == 'A': + reveal_type(x) # N: Revealed type is 'TypedDict('__main__.TypedDict1', {'key': Union[Literal['A'], Literal['C']]})' +else: + reveal_type(x) # N: Revealed type is 'Union[TypedDict('__main__.TypedDict1', {'key': Union[Literal['A'], Literal['C']]}), TypedDict('__main__.TypedDict2', {'key': Union[Literal['B'], Literal['C']]})]' + +if x['key'] == 'C': + reveal_type(x) # N: Revealed type is 'Union[TypedDict('__main__.TypedDict1', {'key': Union[Literal['A'], Literal['C']]}), TypedDict('__main__.TypedDict2', {'key': Union[Literal['B'], Literal['C']]})]' +else: + reveal_type(x) # N: Revealed type is 'Union[TypedDict('__main__.TypedDict1', {'key': Union[Literal['A'], Literal['C']]}), TypedDict('__main__.TypedDict2', {'key': Union[Literal['B'], Literal['C']]})]' + +if x['key'] == 'D': + reveal_type(x) # E: Statement is unreachable +else: + reveal_type(x) # N: Revealed type is 'Union[TypedDict('__main__.TypedDict1', {'key': Union[Literal['A'], Literal['C']]}), TypedDict('__main__.TypedDict2', {'key': Union[Literal['B'], Literal['C']]})]' +[builtins fixtures/primitives.pyi] + +[case testNarrowingPartialTypedDictParentMultipleKeys] +# flags: --warn-unreachable +from typing import Union +from typing_extensions import Literal, TypedDict + +class TypedDict1(TypedDict, total=False): + key: Literal['A', 'C'] +class TypedDict2(TypedDict, total=False): + key: Literal['B', 'C'] + +x: Union[TypedDict1, TypedDict2] +if x['key'] == 'A': + reveal_type(x) # N: Revealed type is 'TypedDict('__main__.TypedDict1', {'key'?: Union[Literal['A'], Literal['C']]})' +else: + reveal_type(x) # N: Revealed type is 'Union[TypedDict('__main__.TypedDict1', {'key'?: Union[Literal['A'], Literal['C']]}), TypedDict('__main__.TypedDict2', {'key'?: Union[Literal['B'], Literal['C']]})]' + +if x['key'] == 'C': + reveal_type(x) # N: Revealed type is 'Union[TypedDict('__main__.TypedDict1', {'key'?: Union[Literal['A'], Literal['C']]}), TypedDict('__main__.TypedDict2', {'key'?: Union[Literal['B'], Literal['C']]})]' +else: + reveal_type(x) # N: Revealed type is 'Union[TypedDict('__main__.TypedDict1', {'key'?: Union[Literal['A'], Literal['C']]}), TypedDict('__main__.TypedDict2', {'key'?: Union[Literal['B'], Literal['C']]})]' + +if x['key'] == 'D': + reveal_type(x) # E: Statement is unreachable +else: + reveal_type(x) # N: Revealed type is 'Union[TypedDict('__main__.TypedDict1', {'key'?: Union[Literal['A'], Literal['C']]}), TypedDict('__main__.TypedDict2', {'key'?: Union[Literal['B'], Literal['C']]})]' +[builtins fixtures/primitives.pyi] + +[case testNarrowingNestedTypedDicts] +from typing import Union +from typing_extensions import TypedDict, Literal + +class A(TypedDict): + key: Literal['A'] +class B(TypedDict): + key: Literal['B'] +class C(TypedDict): + key: Literal['C'] + +class X(TypedDict): + inner: Union[A, B] +class Y(TypedDict): + inner: Union[B, C] + +unknown: Union[X, Y] +if unknown['inner']['key'] == 'A': + reveal_type(unknown) # N: Revealed type is 'TypedDict('__main__.X', {'inner': Union[TypedDict('__main__.A', {'key': Literal['A']}), TypedDict('__main__.B', {'key': Literal['B']})]})' + reveal_type(unknown['inner']) # N: Revealed type is 'TypedDict('__main__.A', {'key': Literal['A']})' +if unknown['inner']['key'] == 'B': + reveal_type(unknown) # N: Revealed type is 'Union[TypedDict('__main__.X', {'inner': Union[TypedDict('__main__.A', {'key': Literal['A']}), TypedDict('__main__.B', {'key': Literal['B']})]}), TypedDict('__main__.Y', {'inner': Union[TypedDict('__main__.B', {'key': Literal['B']}), TypedDict('__main__.C', {'key': Literal['C']})]})]' + reveal_type(unknown['inner']) # N: Revealed type is 'TypedDict('__main__.B', {'key': Literal['B']})' +if unknown['inner']['key'] == 'C': + reveal_type(unknown) # N: Revealed type is 'TypedDict('__main__.Y', {'inner': Union[TypedDict('__main__.B', {'key': Literal['B']}), TypedDict('__main__.C', {'key': Literal['C']})]})' + reveal_type(unknown['inner']) # N: Revealed type is 'TypedDict('__main__.C', {'key': Literal['C']})' +[builtins fixtures/primitives.pyi] + [case testNarrowingParentWithMultipleParents] from enum import Enum from typing import Union @@ -445,3 +610,353 @@ if y["model"]["key"] is Key.C: else: reveal_type(y) # N: Revealed type is 'Union[TypedDict('__main__.Parent1', {'model': TypedDict('__main__.Model1', {'key': Literal[__main__.Key.A]}), 'foo': builtins.int}), TypedDict('__main__.Parent2', {'model': TypedDict('__main__.Model2', {'key': Literal[__main__.Key.B]}), 'bar': builtins.str})]' reveal_type(y["model"]) # N: Revealed type is 'Union[TypedDict('__main__.Model1', {'key': Literal[__main__.Key.A]}), TypedDict('__main__.Model2', {'key': Literal[__main__.Key.B]})]' + +[case testNarrowingParentsHierarchyTypedDictWithStr] +# flags: --warn-unreachable +from typing import Union +from typing_extensions import TypedDict, Literal + +class Parent1(TypedDict): + model: Model1 + foo: int + +class Parent2(TypedDict): + model: Model2 + bar: str + +class Model1(TypedDict): + key: Literal['A'] + +class Model2(TypedDict): + key: Literal['B'] + +x: Union[Parent1, Parent2] +if x["model"]["key"] == 'A': + reveal_type(x) # N: Revealed type is 'TypedDict('__main__.Parent1', {'model': TypedDict('__main__.Model1', {'key': Literal['A']}), 'foo': builtins.int})' + reveal_type(x["model"]) # N: Revealed type is 'TypedDict('__main__.Model1', {'key': Literal['A']})' +else: + reveal_type(x) # N: Revealed type is 'TypedDict('__main__.Parent2', {'model': TypedDict('__main__.Model2', {'key': Literal['B']}), 'bar': builtins.str})' + reveal_type(x["model"]) # N: Revealed type is 'TypedDict('__main__.Model2', {'key': Literal['B']})' + +y: Union[Parent1, Parent2] +if y["model"]["key"] == 'C': + reveal_type(y) # E: Statement is unreachable + reveal_type(y["model"]) +else: + reveal_type(y) # N: Revealed type is 'Union[TypedDict('__main__.Parent1', {'model': TypedDict('__main__.Model1', {'key': Literal['A']}), 'foo': builtins.int}), TypedDict('__main__.Parent2', {'model': TypedDict('__main__.Model2', {'key': Literal['B']}), 'bar': builtins.str})]' + reveal_type(y["model"]) # N: Revealed type is 'Union[TypedDict('__main__.Model1', {'key': Literal['A']}), TypedDict('__main__.Model2', {'key': Literal['B']})]' +[builtins fixtures/primitives.pyi] + +[case testNarrowingEqualityFlipFlop] +# flags: --warn-unreachable --strict-equality +from typing_extensions import Literal, Final +from enum import Enum + +class State(Enum): + A = 1 + B = 2 + +class FlipFlopEnum: + def __init__(self) -> None: + self.state = State.A + + def mutate(self) -> None: + self.state = State.B if self.state == State.A else State.A + +class FlipFlopStr: + def __init__(self) -> None: + self.state = "state-1" + + def mutate(self) -> None: + self.state = "state-2" if self.state == "state-1" else "state-1" + +def test1(switch: FlipFlopEnum) -> None: + # Naively, we might assume the 'assert' here would narrow the type to + # Literal[State.A]. However, doing this ends up breaking a fair number of real-world + # code (usually test cases) that looks similar to this function: e.g. checks + # to make sure a field was mutated to some particular value. + # + # And since mypy can't really reason about state mutation, we take a conservative + # approach and avoid narrowing anything here. + + assert switch.state == State.A + reveal_type(switch.state) # N: Revealed type is '__main__.State' + + switch.mutate() + + assert switch.state == State.B + reveal_type(switch.state) # N: Revealed type is '__main__.State' + +def test2(switch: FlipFlopEnum) -> None: + # So strictly speaking, we ought to do the same thing with 'is' comparisons + # for the same reasons as above. But in practice, not too many people seem to + # know that doing 'some_enum is MyEnum.Value' is idiomatic. So in practice, + # this is probably good enough for now. + + assert switch.state is State.A + reveal_type(switch.state) # N: Revealed type is 'Literal[__main__.State.A]' + + switch.mutate() + + assert switch.state is State.B # E: Non-overlapping identity check (left operand type: "Literal[State.A]", right operand type: "Literal[State.B]") + reveal_type(switch.state) # E: Statement is unreachable + +def test3(switch: FlipFlopStr) -> None: + # This is the same thing as 'test1', except we try using str literals. + + assert switch.state == "state-1" + reveal_type(switch.state) # N: Revealed type is 'builtins.str' + + switch.mutate() + + assert switch.state == "state-2" + reveal_type(switch.state) # N: Revealed type is 'builtins.str' +[builtins fixtures/primitives.pyi] + +[case testNarrowingEqualityRequiresExplicitStrLiteral] +# flags: --strict-optional +from typing_extensions import Literal, Final + +A_final: Final = "A" +A_literal: Literal["A"] + +# Neither the LHS nor the RHS are explicit literals, so regrettably nothing +# is narrowed here -- see 'testNarrowingEqualityFlipFlop' for an example of +# why more precise inference here is problematic. +x_str: str +if x_str == "A": + reveal_type(x_str) # N: Revealed type is 'builtins.str' +else: + reveal_type(x_str) # N: Revealed type is 'builtins.str' +reveal_type(x_str) # N: Revealed type is 'builtins.str' + +if x_str == A_final: + reveal_type(x_str) # N: Revealed type is 'builtins.str' +else: + reveal_type(x_str) # N: Revealed type is 'builtins.str' +reveal_type(x_str) # N: Revealed type is 'builtins.str' + +# But the RHS is a literal, so we can at least narrow the 'if' case now. +if x_str == A_literal: + reveal_type(x_str) # N: Revealed type is 'Literal['A']' +else: + reveal_type(x_str) # N: Revealed type is 'builtins.str' +reveal_type(x_str) # N: Revealed type is 'builtins.str' + +# But in these two cases, the LHS is a literal/literal-like type. So we +# assume the user *does* want literal-based narrowing and narrow accordingly +# regardless of whether the RHS is an explicit literal or not. +x_union: Literal["A", "B", None] +if x_union == A_final: + reveal_type(x_union) # N: Revealed type is 'Literal['A']' +else: + reveal_type(x_union) # N: Revealed type is 'Union[Literal['B'], None]' +reveal_type(x_union) # N: Revealed type is 'Union[Literal['A'], Literal['B'], None]' + +if x_union == A_literal: + reveal_type(x_union) # N: Revealed type is 'Literal['A']' +else: + reveal_type(x_union) # N: Revealed type is 'Union[Literal['B'], None]' +reveal_type(x_union) # N: Revealed type is 'Union[Literal['A'], Literal['B'], None]' +[builtins fixtures/primitives.pyi] + +[case testNarrowingEqualityRequiresExplicitEnumLiteral] +# flags: --strict-optional +from typing_extensions import Literal, Final +from enum import Enum + +class Foo(Enum): + A = 1 + B = 2 + +A_final: Final = Foo.A +A_literal: Literal[Foo.A] + +# See comments in testNarrowingEqualityRequiresExplicitStrLiteral and +# testNarrowingEqualityFlipFlop for more on why we can't narrow here. +x1: Foo +if x1 == Foo.A: + reveal_type(x1) # N: Revealed type is '__main__.Foo' +else: + reveal_type(x1) # N: Revealed type is '__main__.Foo' + +x2: Foo +if x2 == A_final: + reveal_type(x2) # N: Revealed type is '__main__.Foo' +else: + reveal_type(x2) # N: Revealed type is '__main__.Foo' + +# But we let this narrow since there's an explicit literal in the RHS. +x3: Foo +if x3 == A_literal: + reveal_type(x3) # N: Revealed type is 'Literal[__main__.Foo.A]' +else: + reveal_type(x3) # N: Revealed type is 'Literal[__main__.Foo.B]' +[builtins fixtures/primitives.pyi] + +[case testNarrowingEqualityDisabledForCustomEquality] +from typing import Union +from typing_extensions import Literal +from enum import Enum + +class Custom: + def __eq__(self, other: object) -> bool: return True + +class Default: pass + +x1: Union[Custom, Literal[1], Literal[2]] +if x1 == 1: + reveal_type(x1) # N: Revealed type is 'Union[__main__.Custom, Literal[1], Literal[2]]' +else: + reveal_type(x1) # N: Revealed type is 'Union[__main__.Custom, Literal[1], Literal[2]]' + +x2: Union[Default, Literal[1], Literal[2]] +if x2 == 1: + reveal_type(x2) # N: Revealed type is 'Literal[1]' +else: + reveal_type(x2) # N: Revealed type is 'Union[__main__.Default, Literal[2]]' + +class CustomEnum(Enum): + A = 1 + B = 2 + + def __eq__(self, other: object) -> bool: return True + +x3: CustomEnum +key: Literal[CustomEnum.A] +if x3 == key: + reveal_type(x3) # N: Revealed type is '__main__.CustomEnum' +else: + reveal_type(x3) # N: Revealed type is '__main__.CustomEnum' + +# For comparison, this narrows since we bypass __eq__ +if x3 is key: + reveal_type(x3) # N: Revealed type is 'Literal[__main__.CustomEnum.A]' +else: + reveal_type(x3) # N: Revealed type is 'Literal[__main__.CustomEnum.B]' +[builtins fixtures/primitives.pyi] + +[case testNarrowingEqualityDisabledForCustomEqualityChain] +# flags: --strict-optional --strict-equality --warn-unreachable +from typing import Union +from typing_extensions import Literal + +class Custom: + def __eq__(self, other: object) -> bool: return True + +class Default: pass + +x: Literal[1, 2, None] +y: Custom +z: Default + +# We could maybe try doing something clever, but for simplicity we +# treat the whole chain as contaminated and mostly disable narrowing. +# +# The only exception is that we do at least strip away the 'None'. We +# (perhaps optimistically) assume no custom class would be pathological +# enough to declare itself to be equal to None and so permit this narrowing, +# since it's often convenient in practice. +if 1 == x == y: + reveal_type(x) # N: Revealed type is 'Union[Literal[1], Literal[2]]' + reveal_type(y) # N: Revealed type is '__main__.Custom' +else: + reveal_type(x) # N: Revealed type is 'Union[Literal[1], Literal[2], None]' + reveal_type(y) # N: Revealed type is '__main__.Custom' + +# No contamination here +if 1 == x == z: # E: Non-overlapping equality check (left operand type: "Union[Literal[1], Literal[2], None]", right operand type: "Default") + reveal_type(x) # E: Statement is unreachable + reveal_type(z) +else: + reveal_type(x) # N: Revealed type is 'Union[Literal[1], Literal[2], None]' + reveal_type(z) # N: Revealed type is '__main__.Default' +[builtins fixtures/primitives.pyi] + +[case testNarrowingUnreachableCases] +# flags: --strict-optional --strict-equality --warn-unreachable +from typing import Union +from typing_extensions import Literal + +a: Literal[1] +b: Literal[1, 2] +c: Literal[2, 3] + +if a == b == c: + reveal_type(a) # E: Statement is unreachable + reveal_type(b) + reveal_type(c) +else: + reveal_type(a) # N: Revealed type is 'Literal[1]' + reveal_type(b) # N: Revealed type is 'Union[Literal[1], Literal[2]]' + reveal_type(c) # N: Revealed type is 'Union[Literal[2], Literal[3]]' + +if a == a == a: + reveal_type(a) # N: Revealed type is 'Literal[1]' +else: + reveal_type(a) # E: Statement is unreachable + +if a == a == b: + reveal_type(a) # N: Revealed type is 'Literal[1]' + reveal_type(b) # N: Revealed type is 'Literal[1]' +else: + reveal_type(a) # N: Revealed type is 'Literal[1]' + reveal_type(b) # N: Revealed type is 'Literal[2]' + +# In this case, it's ok for 'b' to narrow down to Literal[1] in the else case +# since that's the only way 'b == 2' can be false +if b == 2: + reveal_type(b) # N: Revealed type is 'Literal[2]' +else: + reveal_type(b) # N: Revealed type is 'Literal[1]' + +# But in this case, we can't conclude anything about the else case. This expression +# could end up being either '2 == 2 == 3' or '1 == 2 == 2', which means we can't +# conclude anything. +if b == 2 == c: + reveal_type(b) # N: Revealed type is 'Literal[2]' + reveal_type(c) # N: Revealed type is 'Literal[2]' +else: + reveal_type(b) # N: Revealed type is 'Union[Literal[1], Literal[2]]' + reveal_type(c) # N: Revealed type is 'Union[Literal[2], Literal[3]]' +[builtins fixtures/primitives.pyi] + +[case testNarrowingUnreachableCases2] +# flags: --strict-optional --strict-equality --warn-unreachable +from typing import Union +from typing_extensions import Literal + +a: Literal[1, 2, 3, 4] +b: Literal[1, 2, 3, 4] + +if a == b == 1: + reveal_type(a) # N: Revealed type is 'Literal[1]' + reveal_type(b) # N: Revealed type is 'Literal[1]' +elif a == b == 2: + reveal_type(a) # N: Revealed type is 'Literal[2]' + reveal_type(b) # N: Revealed type is 'Literal[2]' +elif a == b == 3: + reveal_type(a) # N: Revealed type is 'Literal[3]' + reveal_type(b) # N: Revealed type is 'Literal[3]' +elif a == b == 4: + reveal_type(a) # N: Revealed type is 'Literal[4]' + reveal_type(b) # N: Revealed type is 'Literal[4]' +else: + # This branch is reachable if a == 1 and b == 2, for example. + reveal_type(a) # N: Revealed type is 'Union[Literal[1], Literal[2], Literal[3], Literal[4]]' + reveal_type(b) # N: Revealed type is 'Union[Literal[1], Literal[2], Literal[3], Literal[4]]' + +if a == a == 1: + reveal_type(a) # N: Revealed type is 'Literal[1]' +elif a == a == 2: + reveal_type(a) # N: Revealed type is 'Literal[2]' +elif a == a == 3: + reveal_type(a) # N: Revealed type is 'Literal[3]' +elif a == a == 4: + reveal_type(a) # N: Revealed type is 'Literal[4]' +else: + # In contrast, this branch must be unreachable: we assume (maybe naively) + # that 'a' won't be mutated in the middle of the expression. + reveal_type(a) # E: Statement is unreachable + reveal_type(b) +[builtins fixtures/primitives.pyi] diff --git a/test-data/unit/check-optional.test b/test-data/unit/check-optional.test index 15698e99ddf5..14f022c13f44 100644 --- a/test-data/unit/check-optional.test +++ b/test-data/unit/check-optional.test @@ -489,6 +489,10 @@ if x == '': reveal_type(x) # N: Revealed type is 'builtins.str' else: reveal_type(x) # N: Revealed type is 'Union[builtins.str, None]' +if x is '': + reveal_type(x) # N: Revealed type is 'builtins.str' +else: + reveal_type(x) # N: Revealed type is 'Union[builtins.str, None]' [builtins fixtures/ops.pyi] [case testInferEqualsNotOptionalWithUnion] @@ -498,6 +502,10 @@ if x == '': reveal_type(x) # N: Revealed type is 'Union[builtins.str, builtins.int]' else: reveal_type(x) # N: Revealed type is 'Union[builtins.str, builtins.int, None]' +if x is '': + reveal_type(x) # N: Revealed type is 'Union[builtins.str, builtins.int]' +else: + reveal_type(x) # N: Revealed type is 'Union[builtins.str, builtins.int, None]' [builtins fixtures/ops.pyi] [case testInferEqualsNotOptionalWithOverlap] @@ -507,6 +515,10 @@ if x == object(): reveal_type(x) # N: Revealed type is 'Union[builtins.str, builtins.int]' else: reveal_type(x) # N: Revealed type is 'Union[builtins.str, builtins.int, None]' +if x is object(): + reveal_type(x) # N: Revealed type is 'Union[builtins.str, builtins.int]' +else: + reveal_type(x) # N: Revealed type is 'Union[builtins.str, builtins.int, None]' [builtins fixtures/ops.pyi] [case testInferEqualsStillOptionalWithNoOverlap] @@ -516,6 +528,10 @@ if x == 0: reveal_type(x) # N: Revealed type is 'Union[builtins.str, None]' else: reveal_type(x) # N: Revealed type is 'Union[builtins.str, None]' +if x is 0: + reveal_type(x) # N: Revealed type is 'Union[builtins.str, None]' +else: + reveal_type(x) # N: Revealed type is 'Union[builtins.str, None]' [builtins fixtures/ops.pyi] [case testInferEqualsStillOptionalWithBothOptional] @@ -526,6 +542,10 @@ if x == y: reveal_type(x) # N: Revealed type is 'Union[builtins.str, builtins.int, None]' else: reveal_type(x) # N: Revealed type is 'Union[builtins.str, builtins.int, None]' +if x is y: + reveal_type(x) # N: Revealed type is 'Union[builtins.str, builtins.int, None]' +else: + reveal_type(x) # N: Revealed type is 'Union[builtins.str, builtins.int, None]' [builtins fixtures/ops.pyi] [case testInferEqualsNotOptionalWithMultipleArgs] From 70557254ad6fee39469562a627c58eb9c3991c23 Mon Sep 17 00:00:00 2001 From: Michael Lee Date: Wed, 8 Jan 2020 08:47:16 -0800 Subject: [PATCH 038/117] Update docs for Literal types (#8152) This pull request is a long-overdue update of the Literal type docs. It: 1. Removes the "this is alpha" warning we have at the top. 2. Mentions Literal enums are a thing (and works in a very brief example of one). 3. Adds a section about "intelligent indexing". 4. Adds a section with an example about the "tagged union" pattern (see https://github.com/python/mypy/pull/8151). 5. Cross-references the "tagged union" docs with the TypedDicts docs. --- docs/source/literal_types.rst | 161 +++++++++++++++++++++++++++++----- docs/source/more_types.rst | 13 +++ 2 files changed, 154 insertions(+), 20 deletions(-) diff --git a/docs/source/literal_types.rst b/docs/source/literal_types.rst index 707574752018..34ca4e4786e6 100644 --- a/docs/source/literal_types.rst +++ b/docs/source/literal_types.rst @@ -3,13 +3,6 @@ Literal types ============= -.. note:: - - ``Literal`` is an officially supported feature, but is highly experimental - and should be considered to be in alpha stage. It is very likely that future - releases of mypy will modify the behavior of literal types, either by adding - new features or by tuning or removing problematic ones. - Literal types let you indicate that an expression is equal to some specific primitive value. For example, if we annotate a variable with type ``Literal["foo"]``, mypy will understand that variable is not only of type ``str``, but is also @@ -23,8 +16,7 @@ precise type signature for this function using ``Literal[...]`` and overloads: .. code-block:: python - from typing import overload, Union - from typing_extensions import Literal + from typing import overload, Union, Literal # The first two overloads use Literal[...] so we can # have precise return types: @@ -53,18 +45,25 @@ precise type signature for this function using ``Literal[...]`` and overloads: variable = True reveal_type(fetch_data(variable)) # Revealed type is 'Union[bytes, str]' +.. note:: + + The examples in this page import ``Literal`` as well as ``Final`` and + ``TypedDict`` from the ``typing`` module. These types were added to + ``typing`` in Python 3.8, but are also available for use in Python 2.7 + and 3.4 - 3.7 via the ``typing_extensions`` package. + Parameterizing Literals *********************** -Literal types may contain one or more literal bools, ints, strs, and bytes. -However, literal types **cannot** contain arbitrary expressions: +Literal types may contain one or more literal bools, ints, strs, bytes, and +enum values. However, literal types **cannot** contain arbitrary expressions: types like ``Literal[my_string.trim()]``, ``Literal[x > 3]``, or ``Literal[3j + 4]`` are all illegal. Literals containing two or more values are equivalent to the union of those values. -So, ``Literal[-3, b"foo", True]`` is equivalent to -``Union[Literal[-3], Literal[b"foo"], Literal[True]]``. This makes writing -more complex types involving literals a little more convenient. +So, ``Literal[-3, b"foo", MyEnum.A]`` is equivalent to +``Union[Literal[-3], Literal[b"foo"], Literal[MyEnum.A]]``. This makes writing more +complex types involving literals a little more convenient. Literal types may also contain ``None``. Mypy will treat ``Literal[None]`` as being equivalent to just ``None``. This means that ``Literal[4, None]``, @@ -88,9 +87,6 @@ Literals may not contain any other kind of type or expression. This means doing ``Literal[my_instance]``, ``Literal[Any]``, ``Literal[3.14]``, or ``Literal[{"foo": 2, "bar": 5}]`` are all illegal. -Future versions of mypy may relax some of these restrictions. For example, we -plan on adding support for using enum values inside ``Literal[...]`` in an upcoming release. - Declaring literal variables *************************** @@ -115,7 +111,7 @@ you can instead change the variable to be ``Final`` (see :ref:`final_attrs`): .. code-block:: python - from typing_extensions import Final, Literal + from typing import Final, Literal def expects_literal(x: Literal[19]) -> None: pass @@ -134,7 +130,7 @@ For example, mypy will type check the above program almost as if it were written .. code-block:: python - from typing_extensions import Final, Literal + from typing import Final, Literal def expects_literal(x: Literal[19]) -> None: pass @@ -151,7 +147,7 @@ For example, compare and contrast what happens when you try appending these type .. code-block:: python - from typing_extensions import Final, Literal + from typing import Final, Literal a: Final = 19 b: Literal[19] = 19 @@ -168,6 +164,131 @@ For example, compare and contrast what happens when you try appending these type reveal_type(list_of_lits) # Revealed type is 'List[Literal[19]]' +Intelligent indexing +******************** + +We can use Literal types to more precisely index into structured heterogeneous +types such as tuples, NamedTuples, and TypedDicts. This feature is known as +*intelligent indexing*. + +For example, when we index into a tuple using some int, the inferred type is +normally the union of the tuple item types. However, if we want just the type +corresponding to some particular index, we can use Literal types like so: + +.. code-block:: python + + from typing import TypedDict + + tup = ("foo", 3.4) + + # Indexing with an int literal gives us the exact type for that index + reveal_type(tup[0]) # Revealed type is 'str' + + # But what if we want the index to be a variable? Normally mypy won't + # know exactly what the index is and so will return a less precise type: + int_index = 1 + reveal_type(tup[int_index]) # Revealed type is 'Union[str, float]' + + # But if we use either Literal types or a Final int, we can gain back + # the precision we originally had: + lit_index: Literal[1] = 1 + fin_index: Final = 1 + reveal_type(tup[lit_index]) # Revealed type is 'str' + reveal_type(tup[fin_index]) # Revealed type is 'str' + + # We can do the same thing with with TypedDict and str keys: + class MyDict(TypedDict): + name: str + main_id: int + backup_id: int + + d: MyDict = {"name": "Saanvi", "main_id": 111, "backup_id": 222} + name_key: Final = "name" + reveal_type(d[name_key]) # Revealed type is 'str' + + # You can also index using unions of literals + id_key: Literal["main_id", "backup_id"] + reveal_type(d[id_key]) # Revealed type is 'int' + +.. _tagged_unions: + +Tagged unions +************* + +When you have a union of types, you can normally discriminate between each type +in the union by using ``isinstance`` checks. For example, if you had a variable ``x`` of +type ``Union[int, str]``, you could write some code that runs only if ``x`` is an int +by doing ``if isinstance(x, int): ...``. + +However, it is not always possible or convenient to do this. For example, it is not +possible to use ``isinstance`` to distinguish between two different TypedDicts since +at runtime, your variable will simply be just a dict. + +Instead, what you can do is *label* or *tag* your TypedDicts with a distinct Literal +type. Then, you can discriminate between each kind of TypedDict by checking the label: + +.. code-block:: python + + from typing import Literal, TypedDict, Union + + class NewJobEvent(TypedDict): + tag: Literal["new-job"] + job_name: str + config_file_path: str + + class CancelJobEvent(TypedDict): + tag: Literal["cancel-job"] + job_id: int + + Event = Union[NewJobEvent, CancelJobEvent] + + def process_event(event: Event) -> None: + # Since we made sure both TypedDicts have a key named 'tag', it's + # safe to do 'event["tag"]'. This expression normally has the type + # Literal["new-job", "cancel-job"], but the check below will narrow + # the type to either Literal["new-job"] or Literal["cancel-job"]. + # + # This in turns narrows the type of 'event' to either NewJobEvent + # or CancelJobEvent. + if event["tag"] == "new-job": + print(event["job_name"]) + else: + print(event["job_id"]) + +While this feature is mostly useful when working with TypedDicts, you can also +use the same technique wih regular objects, tuples, or namedtuples. + +Similarly, tags do not need to be specifically str Literals: they can be any type +you can normally narrow within ``if`` statements and the like. For example, you +could have your tags be int or Enum Literals or even regular classes you narrow +using ``isinstance()``: + +.. code-block:: python + + from typing import Generic, TypeVar, Union + + T = TypeVar('T') + + class Wrapper(Generic[T]): + def __init__(self, inner: T) -> None: + self.inner = inner + + def process(w: Union[Wrapper[int], Wrapper[str]]) -> None: + # Doing `if isinstance(w, Wrapper[int])` does not work: isinstance requires + # that the second argument always be an *erased* type, with no generics. + # This is because generics are a typing-only concept and do not exist at + # runtime in a way `isinstance` can always check. + # + # However, we can side-step this by checking the type of `w.inner` to + # narrow `w` itself: + if isinstance(w.inner, int): + reveal_type(w) # Revealed type is 'Wrapper[int]' + else: + reveal_type(w) # Revealed type is 'Wrapper[str]' + +This feature is sometimes called "sum types" or "discriminated union types" +in other programming languages. + Limitations *********** diff --git a/docs/source/more_types.rst b/docs/source/more_types.rst index 3a0cd865c3d1..3a962553e68a 100644 --- a/docs/source/more_types.rst +++ b/docs/source/more_types.rst @@ -1119,3 +1119,16 @@ and non-required keys, such as ``Movie`` above, will only be compatible with another ``TypedDict`` if all required keys in the other ``TypedDict`` are required keys in the first ``TypedDict``, and all non-required keys of the other ``TypedDict`` are also non-required keys in the first ``TypedDict``. + +Unions of TypedDicts +-------------------- + +Since TypedDicts are really just regular dicts at runtime, it is not possible to +use ``isinstance`` checks to distinguish between different variants of a Union of +TypedDict in the same way you can with regular objects. + +Instead, you can use the :ref:`tagged union pattern `. The referenced +section of the docs has a full description with an example, but in short, you will +need to give each TypedDict the same key where each value has a unique +unique :ref:`Literal type `. Then, check that key to distinguish +between your TypedDicts. From 33fc8a3f308adec1929d8bc09bd1860684c71c69 Mon Sep 17 00:00:00 2001 From: hauntsaninja Date: Wed, 8 Jan 2020 13:40:54 -0800 Subject: [PATCH 039/117] Narrow types for walrus assignment in if statements (#8258) Fixes #7313 Fixes #7316 --- mypy/checker.py | 2 ++ mypy/checkexpr.py | 1 + test-data/unit/check-python38.test | 27 ++++++++++++++++++--------- 3 files changed, 21 insertions(+), 9 deletions(-) diff --git a/mypy/checker.py b/mypy/checker.py index 18f3573f14c5..9c02d08ebd34 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -3982,6 +3982,8 @@ def has_no_custom_eq_checks(t: Type) -> bool: partial_type_maps.append((if_map, else_map)) return reduce_conditional_maps(partial_type_maps) + elif isinstance(node, AssignmentExpr): + return self.find_isinstance_check_helper(node.target) elif isinstance(node, RefExpr): # Restrict the type of the variable to True-ish/False-ish in the if and else branches # respectively diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index 47f0e74691c0..9a0815060bca 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -2773,6 +2773,7 @@ def visit_assignment_expr(self, e: AssignmentExpr) -> Type: value = self.accept(e.value) self.chk.check_assignment(e.target, e.value) self.chk.check_final(e) + self.find_partial_type_ref_fast_path(e.target) return value def visit_unary_expr(self, e: UnaryExpr) -> Type: diff --git a/test-data/unit/check-python38.test b/test-data/unit/check-python38.test index fd0e46ebd1f4..a4388aeb0299 100644 --- a/test-data/unit/check-python38.test +++ b/test-data/unit/check-python38.test @@ -266,11 +266,7 @@ def check_binder(x: Optional[int], y: Optional[int], z: Optional[int], a: Option reveal_type(y) # N: Revealed type is 'Union[builtins.int, None]' if x and (y := 1): - # TODO should just be int - # This is because in check_boolean_op in checkexpr.py we accept the right conditional - # within a binder frame context, so the types assigned in it are lost later. Perhaps - # we need to make find_isinstance_check() walrus-aware. - reveal_type(y) # N: Revealed type is 'Union[builtins.int, None]' + reveal_type(y) # N: Revealed type is 'builtins.int' if (a := 1) and x: reveal_type(a) # N: Revealed type is 'builtins.int' @@ -288,10 +284,23 @@ def check_partial() -> None: reveal_type(x) # N: Revealed type is 'Union[builtins.int, None]' +def check_narrow(x: Optional[int]) -> None: + if (y := x): + reveal_type(y) # N: Revealed type is 'builtins.int' +[builtins fixtures/f_string.pyi] + +[case testWalrusPartialTypes] +from typing import List + def check_partial_list() -> None: - if (x := []): - x.append(3) + if (x := []): # E: Need type annotation for 'x' (hint: "x: List[] = ...") + pass - reveal_type(x) # N: Revealed type is 'builtins.list[builtins.int]' + y: List[str] + if (y := []): + pass -[builtins fixtures/f_string.pyi] + if (z := []): + z.append(3) + reveal_type(z) # N: Revealed type is 'builtins.list[builtins.int]' +[builtins fixtures/list.pyi] From 01ca4e062e38a9db8f462b9464fbbd5dd945a8cf Mon Sep 17 00:00:00 2001 From: Jacob Beck Date: Thu, 9 Jan 2020 05:11:36 -0700 Subject: [PATCH 040/117] dataclasses: Fix deeply nested InitVar definitions with init=False (#8208) Fixes #8207 In #8159, I fixed InitVar handling so that it looked up the superclass __init__ and used that to determine the definition of InitVars. That fix doesn't work in the presence of `init=False` subclasses of the initial `init=False` class In that case, the lookup fails because the first attribute reached in MRO order on the subclass is the intermediate class, which also doesn't have the variable in its `__init__`. Then mypy didn't know about the InitVar but still tried to process it as if it did, resulting in an assertion error. This PR fixes that issue by not adding the field to the set of known attrs until an actual definition is found. --- mypy/plugins/dataclasses.py | 8 ++++++-- test-data/unit/check-dataclasses.test | 28 +++++++++++++++++++++++++++ 2 files changed, 34 insertions(+), 2 deletions(-) diff --git a/mypy/plugins/dataclasses.py b/mypy/plugins/dataclasses.py index 318603734342..86b24f0a58fb 100644 --- a/mypy/plugins/dataclasses.py +++ b/mypy/plugins/dataclasses.py @@ -311,9 +311,13 @@ def collect_attributes(self) -> Optional[List[DataclassAttribute]]: superclass_init = info.get_method('__init__') if isinstance(superclass_init, FuncDef): attr_node = _get_arg_from_init(superclass_init, attr.name) - if attr_node is not None: + if attr_node is None: + # Continue the loop: we will look it up in the next MRO entry. + # Don't add it to the known or super attrs because we don't know + # anything about it yet + continue + else: cls.info.names[attr.name] = attr_node - known_attrs.add(name) super_attrs.append(attr) elif all_attrs: diff --git a/test-data/unit/check-dataclasses.test b/test-data/unit/check-dataclasses.test index 783a142339ba..36b62d53dbf1 100644 --- a/test-data/unit/check-dataclasses.test +++ b/test-data/unit/check-dataclasses.test @@ -104,6 +104,34 @@ reveal_type(C) # N: Revealed type is 'def (b: builtins.bool, a: builtins.bool) [builtins fixtures/bool.pyi] +[case testDataclassesDeepInitVarInheritance] +from dataclasses import dataclass, field, InitVar +@dataclass +class A: + a: bool + +@dataclass +class B: + b: InitVar[bool] + _b: bool = field(init=False) + + def __post_init__(self, b: bool): + self._b = b + +@dataclass(init=False) +class C(B): + def __init__(self): + super().__init__(True) + +@dataclass +class D(C): + pass + +reveal_type(C) # N: Revealed type is 'def () -> __main__.C' +reveal_type(D) # N: Revealed type is 'def (b: builtins.bool) -> __main__.D' + +[builtins fixtures/bool.pyi] + [case testDataclassesOverriding] # flags: --python-version 3.6 from dataclasses import dataclass From f709429a112ee9c6380fa369ee3af943f1c05471 Mon Sep 17 00:00:00 2001 From: Shantanu Date: Thu, 9 Jan 2020 13:11:06 -0800 Subject: [PATCH 041/117] Fix false positive for subclasses of bytes overriding __str__ (#8222) Fixes #8180 --- mypy/checkstrformat.py | 3 ++- test-data/unit/check-expressions.test | 6 ++++++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/mypy/checkstrformat.py b/mypy/checkstrformat.py index f89d5d0451b2..f3081a2fa491 100644 --- a/mypy/checkstrformat.py +++ b/mypy/checkstrformat.py @@ -382,7 +382,8 @@ def perform_special_format_checks(self, spec: ConversionSpecifier, call: CallExp self.msg.requires_int_or_char(call, format_call=True) if (not spec.type or spec.type == 's') and not spec.conversion: if self.chk.options.python_version >= (3, 0): - if has_type_component(actual_type, 'builtins.bytes'): + if (has_type_component(actual_type, 'builtins.bytes') and + not custom_special_method(actual_type, '__str__')): self.msg.fail("On Python 3 '{}'.format(b'abc') produces \"b'abc'\";" " use !r if this is a desired behavior", call, code=codes.STR_BYTES_PY3) diff --git a/test-data/unit/check-expressions.test b/test-data/unit/check-expressions.test index bb4511228798..5820e2b1653e 100644 --- a/test-data/unit/check-expressions.test +++ b/test-data/unit/check-expressions.test @@ -1570,6 +1570,12 @@ def func(x: A) -> A: '{!r}'.format(b) '{!r}'.format(x) '{!r}'.format(n) + +class D(bytes): + def __str__(self) -> str: + return "overrides __str__ of bytes" + +'{}'.format(D()) [builtins fixtures/primitives.pyi] [case testFormatCallFormatTypesBytesNotPy2] From abe9f8045abd4863845cc0ee9fcde46c0ddeb99c Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Fri, 10 Jan 2020 10:58:14 +0000 Subject: [PATCH 042/117] Speed up tests by simplifying default builtins (#8256) This removes tuple from the default builtins test stubs, which seems to speed up fast tests by about 7%. Also improved error messages that are generated when a test case is missing the right fixture. I didn't update Python 2 test stubs since they aren't used in many tests. Fixes #6428 (except for Python 2 which doesn't seem important). --- mypy/checker.py | 14 +++-- mypy/messages.py | 24 ++++++++ mypy/semanal.py | 28 +-------- test-data/unit/check-abstract.test | 5 ++ test-data/unit/check-annotated.test | 17 ++++++ test-data/unit/check-class-namedtuple.test | 33 ++++++++++- test-data/unit/check-classes.test | 38 ++++++++++++ test-data/unit/check-custom-plugin.test | 1 + test-data/unit/check-default-plugin.test | 2 + test-data/unit/check-dynamic-typing.test | 6 ++ test-data/unit/check-enum.test | 4 ++ test-data/unit/check-errorcodes.test | 4 ++ test-data/unit/check-expressions.test | 23 ++++++++ test-data/unit/check-fastparse.test | 2 + test-data/unit/check-final.test | 10 ++++ test-data/unit/check-flags.test | 7 +++ test-data/unit/check-functions.test | 8 +++ test-data/unit/check-generic-subtyping.test | 8 +++ test-data/unit/check-generics.test | 9 +++ test-data/unit/check-incomplete-fixture.test | 25 +++++--- test-data/unit/check-incremental.test | 16 +++++ test-data/unit/check-inference-context.test | 1 + test-data/unit/check-inference.test | 11 ++++ test-data/unit/check-isinstance.test | 1 + test-data/unit/check-kwargs.test | 3 + test-data/unit/check-literal.test | 58 +++++++++++++++++++ test-data/unit/check-modules.test | 3 + .../unit/check-multiple-inheritance.test | 1 + test-data/unit/check-namedtuple.test | 45 +++++++++++++- test-data/unit/check-narrowing.test | 5 ++ test-data/unit/check-newsemanal.test | 25 ++++++++ test-data/unit/check-optional.test | 1 + test-data/unit/check-overloading.test | 29 +++++++++- test-data/unit/check-protocols.test | 3 + test-data/unit/check-python38.test | 2 + test-data/unit/check-selftype.test | 5 ++ test-data/unit/check-semanal-error.test | 1 + test-data/unit/check-serialize.test | 2 + test-data/unit/check-statements.test | 10 ++++ test-data/unit/check-super.test | 2 + test-data/unit/check-tuples.test | 10 ++++ test-data/unit/check-type-aliases.test | 4 ++ test-data/unit/check-typeddict.test | 3 + test-data/unit/check-typevar-values.test | 1 + test-data/unit/check-unions.test | 5 ++ test-data/unit/check-unreachable-code.test | 6 ++ test-data/unit/check-varargs.test | 1 + test-data/unit/check-warnings.test | 1 + test-data/unit/deps-classes.test | 4 ++ test-data/unit/deps-expressions.test | 1 + test-data/unit/deps-generics.test | 1 + test-data/unit/deps-types.test | 3 + test-data/unit/diff.test | 5 ++ test-data/unit/fine-grained-blockers.test | 1 + test-data/unit/fine-grained-modules.test | 2 + test-data/unit/fine-grained-suggest.test | 3 + test-data/unit/fine-grained.test | 33 ++++++++++- test-data/unit/fixtures/tuple.pyi | 16 +++-- test-data/unit/lib-stub/builtins.pyi | 4 -- test-data/unit/merge.test | 57 +++++++++--------- test-data/unit/semanal-classvar.test | 1 + test-data/unit/semanal-errors.test | 10 ++++ test-data/unit/semanal-namedtuple.test | 16 +++++ test-data/unit/semanal-typealiases.test | 2 + test-data/unit/semanal-types.test | 4 ++ test-data/unit/typexport-basic.test | 3 + 66 files changed, 607 insertions(+), 82 deletions(-) diff --git a/mypy/checker.py b/mypy/checker.py index 9c02d08ebd34..fdd1c2422b06 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -39,7 +39,7 @@ from mypy.sametypes import is_same_type from mypy.messages import ( MessageBuilder, make_inferred_type_note, append_invariance_notes, - format_type, format_type_bare, format_type_distinctly, + format_type, format_type_bare, format_type_distinctly, SUGGESTED_TEST_FIXTURES ) import mypy.checkexpr from mypy.checkmember import ( @@ -4499,9 +4499,15 @@ def lookup_qualified(self, name: str) -> SymbolTableNode: if last in n.names: return n.names[last] elif len(parts) == 2 and parts[0] == 'builtins': - raise KeyError("Could not find builtin symbol '{}'. (Are you running a " - "test case? If so, make sure to include a fixture that " - "defines this symbol.)".format(last)) + fullname = 'builtins.' + last + if fullname in SUGGESTED_TEST_FIXTURES: + suggestion = ", e.g. add '[builtins fixtures/{}]' to your test".format( + SUGGESTED_TEST_FIXTURES[fullname]) + else: + suggestion = '' + raise KeyError("Could not find builtin symbol '{}' (If you are running a " + "test case, use a fixture that " + "defines this symbol{})".format(last, suggestion)) else: msg = "Failed qualified lookup: '{}' (fullname = '{}')." raise KeyError(msg.format(last, name)) diff --git a/mypy/messages.py b/mypy/messages.py index 6eddc8184cb1..3f5cbca99ebd 100644 --- a/mypy/messages.py +++ b/mypy/messages.py @@ -53,6 +53,23 @@ } # type: Final +# Map from the full name of a missing definition to the test fixture (under +# test-data/unit/fixtures/) that provides the definition. This is used for +# generating better error messages when running mypy tests only. +SUGGESTED_TEST_FIXTURES = { + 'builtins.list': 'list.pyi', + 'builtins.dict': 'dict.pyi', + 'builtins.set': 'set.pyi', + 'builtins.tuple': 'tuple.pyi', + 'builtins.bool': 'bool.pyi', + 'builtins.Exception': 'exception.pyi', + 'builtins.BaseException': 'exception.pyi', + 'builtins.isinstance': 'isinstancelist.pyi', + 'builtins.property': 'property.pyi', + 'builtins.classmethod': 'classmethod.pyi', +} # type: Final + + class MessageBuilder: """Helper class for reporting type checker error messages with parameters. @@ -1480,6 +1497,13 @@ def generate_incompatible_tuple_error(self, for note in notes: self.note(note, context, code=code) + def add_fixture_note(self, fullname: str, ctx: Context) -> None: + self.note('Maybe your test fixture does not define "{}"?'.format(fullname), ctx) + if fullname in SUGGESTED_TEST_FIXTURES: + self.note( + 'Consider adding [builtins fixtures/{}] to your test description'.format( + SUGGESTED_TEST_FIXTURES[fullname]), ctx) + def quote_type_string(type_string: str) -> str: """Quotes a type representation for use in messages.""" diff --git a/mypy/semanal.py b/mypy/semanal.py index 0bf18a7b2197..42770e1a0389 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -81,7 +81,7 @@ from mypy.typevars import fill_typevars from mypy.visitor import NodeVisitor from mypy.errors import Errors, report_internal_error -from mypy.messages import best_matches, MessageBuilder, pretty_or +from mypy.messages import best_matches, MessageBuilder, pretty_or, SUGGESTED_TEST_FIXTURES from mypy.errorcodes import ErrorCode from mypy import message_registry, errorcodes as codes from mypy.types import ( @@ -120,21 +120,6 @@ T = TypeVar('T') -# Map from the full name of a missing definition to the test fixture (under -# test-data/unit/fixtures/) that provides the definition. This is used for -# generating better error messages when running mypy tests only. -SUGGESTED_TEST_FIXTURES = { - 'builtins.list': 'list.pyi', - 'builtins.dict': 'dict.pyi', - 'builtins.set': 'set.pyi', - 'builtins.bool': 'bool.pyi', - 'builtins.Exception': 'exception.pyi', - 'builtins.BaseException': 'exception.pyi', - 'builtins.isinstance': 'isinstancelist.pyi', - 'builtins.property': 'property.pyi', - 'builtins.classmethod': 'classmethod.pyi', -} # type: Final - TYPES_FOR_UNIMPORTED_HINTS = { 'typing.Any', 'typing.Callable', @@ -1828,7 +1813,7 @@ def report_missing_module_attribute(self, import_id: str, source_id: str, import if (self.lookup_fully_qualified_or_none(fullname) is None and fullname in SUGGESTED_TEST_FIXTURES): # Yes. Generate a helpful note. - self.add_fixture_note(fullname, context) + self.msg.add_fixture_note(fullname, context) def process_import_over_existing_name(self, imported_id: str, existing_symbol: SymbolTableNode, @@ -1858,13 +1843,6 @@ def process_import_over_existing_name(self, return True return False - def add_fixture_note(self, fullname: str, ctx: Context) -> None: - self.note('Maybe your test fixture does not define "{}"?'.format(fullname), ctx) - if fullname in SUGGESTED_TEST_FIXTURES: - self.note( - 'Consider adding [builtins fixtures/{}] to your test description'.format( - SUGGESTED_TEST_FIXTURES[fullname]), ctx) - def correct_relative_import(self, node: Union[ImportFrom, ImportAll]) -> str: import_id, ok = correct_relative_import(self.cur_mod_id, node.relative, node.id, self.cur_mod_node.is_package_init_file()) @@ -4599,7 +4577,7 @@ def name_not_defined(self, name: str, ctx: Context, namespace: Optional[str] = N fullname = 'builtins.{}'.format(name) if self.lookup_fully_qualified_or_none(fullname) is None: # Yes. Generate a helpful note. - self.add_fixture_note(fullname, ctx) + self.msg.add_fixture_note(fullname, ctx) modules_with_unimported_hints = { name.split('.', 1)[0] diff --git a/test-data/unit/check-abstract.test b/test-data/unit/check-abstract.test index eb78c287cd71..49b14ace0bed 100644 --- a/test-data/unit/check-abstract.test +++ b/test-data/unit/check-abstract.test @@ -38,6 +38,7 @@ class J(metaclass=ABCMeta): class A(I, J): pass class B(A): pass class C(I): pass +[builtins fixtures/tuple.pyi] [case testAbstractClassSubtypingViaExtension] @@ -67,6 +68,7 @@ class I(metaclass=ABCMeta): def f(self): pass class J(I): pass class A(J): pass +[builtins fixtures/tuple.pyi] [case testInheritingAbstractClassInSubclass] from abc import abstractmethod, ABCMeta @@ -134,6 +136,7 @@ if int(): i = cast(I, o) if int(): i = cast(I, a) +[builtins fixtures/tuple.pyi] [case testInstantiatingClassThatImplementsAbstractMethod] from abc import abstractmethod, ABCMeta @@ -396,6 +399,7 @@ i.g() # E: "I" has no attribute "g" if int(): b = i.f(a) +[builtins fixtures/tuple.pyi] [case testAccessingInheritedAbstractMethod] from abc import abstractmethod, ABCMeta @@ -415,6 +419,7 @@ if int(): -- Any (dynamic) types -- ------------------- +[builtins fixtures/tuple.pyi] [case testAbstractClassWithAllDynamicTypes] diff --git a/test-data/unit/check-annotated.test b/test-data/unit/check-annotated.test index 092c28f6a52a..aeb1a1985e6d 100644 --- a/test-data/unit/check-annotated.test +++ b/test-data/unit/check-annotated.test @@ -2,63 +2,75 @@ from typing_extensions import Annotated x: Annotated[int, ...] reveal_type(x) # N: Revealed type is 'builtins.int' +[builtins fixtures/tuple.pyi] [case testAnnotated1] from typing import Union from typing_extensions import Annotated x: Annotated[Union[int, str], ...] reveal_type(x) # N: Revealed type is 'Union[builtins.int, builtins.str]' +[builtins fixtures/tuple.pyi] [case testAnnotated2] from typing_extensions import Annotated x: Annotated[int, THESE, ARE, IGNORED, FOR, NOW] reveal_type(x) # N: Revealed type is 'builtins.int' +[builtins fixtures/tuple.pyi] [case testAnnotated3] from typing_extensions import Annotated x: Annotated[int, -+~12.3, "som"[e], more(anno+a+ions, that=[are]), (b"ignored",), 4, N.O.W, ...] reveal_type(x) # N: Revealed type is 'builtins.int' +[builtins fixtures/tuple.pyi] [case testAnnotatedBadType] from typing_extensions import Annotated x: Annotated[XXX, ...] # E: Name 'XXX' is not defined reveal_type(x) # N: Revealed type is 'Any' +[builtins fixtures/tuple.pyi] [case testAnnotatedBadNoArgs] from typing_extensions import Annotated x: Annotated # E: Annotated[...] must have exactly one type argument and at least one annotation reveal_type(x) # N: Revealed type is 'Any' +[builtins fixtures/tuple.pyi] [case testAnnotatedBadOneArg] from typing_extensions import Annotated x: Annotated[int] # E: Annotated[...] must have exactly one type argument and at least one annotation reveal_type(x) # N: Revealed type is 'Any' +[builtins fixtures/tuple.pyi] [case testAnnotatedNested0] from typing_extensions import Annotated x: Annotated[Annotated[int, ...], ...] reveal_type(x) # N: Revealed type is 'builtins.int' +[builtins fixtures/tuple.pyi] [case testAnnotatedNested1] from typing import Union from typing_extensions import Annotated x: Annotated[Annotated[Union[int, str], ...], ...] reveal_type(x) # N: Revealed type is 'Union[builtins.int, builtins.str]' +[builtins fixtures/tuple.pyi] [case testAnnotatedNestedBadType] from typing_extensions import Annotated x: Annotated[Annotated[XXX, ...], ...] # E: Name 'XXX' is not defined reveal_type(x) # N: Revealed type is 'Any' +[builtins fixtures/tuple.pyi] [case testAnnotatedNestedBadNoArgs] from typing_extensions import Annotated x: Annotated[Annotated, ...] # E: Annotated[...] must have exactly one type argument and at least one annotation reveal_type(x) # N: Revealed type is 'Any' +[builtins fixtures/tuple.pyi] [case testAnnotatedNestedBadOneArg] from typing_extensions import Annotated x: Annotated[Annotated[int], ...] # E: Annotated[...] must have exactly one type argument and at least one annotation reveal_type(x) # N: Revealed type is 'Any' +[builtins fixtures/tuple.pyi] [case testAnnotatedNoImport] x: Annotated[int, ...] # E: Name 'Annotated' is not defined @@ -68,6 +80,7 @@ reveal_type(x) # N: Revealed type is 'Any' from typing_extensions import Annotated as An x: An[int, ...] reveal_type(x) # N: Revealed type is 'builtins.int' +[builtins fixtures/tuple.pyi] [case testAnnotatedAliasSimple] from typing import Tuple @@ -75,6 +88,7 @@ from typing_extensions import Annotated Alias = Annotated[Tuple[int, ...], ...] x: Alias reveal_type(x) # N: Revealed type is 'builtins.tuple[builtins.int]' +[builtins fixtures/tuple.pyi] [case testAnnotatedAliasTypeVar] from typing import TypeVar @@ -83,6 +97,7 @@ T = TypeVar('T') Alias = Annotated[T, ...] x: Alias[int] reveal_type(x) # N: Revealed type is 'builtins.int' +[builtins fixtures/tuple.pyi] [case testAnnotatedAliasGenericTuple] from typing import TypeVar, Tuple @@ -91,6 +106,7 @@ T = TypeVar('T') Alias = Annotated[Tuple[T, T], ...] x: Alias[int] reveal_type(x) # N: Revealed type is 'Tuple[builtins.int, builtins.int]' +[builtins fixtures/tuple.pyi] [case testAnnotatedAliasGenericUnion] from typing import TypeVar, Union @@ -99,3 +115,4 @@ T = TypeVar('T') Alias = Annotated[Union[T, str], ...] x: Alias[int] reveal_type(x) # N: Revealed type is 'Union[builtins.int, builtins.str]' +[builtins fixtures/tuple.pyi] diff --git a/test-data/unit/check-class-namedtuple.test b/test-data/unit/check-class-namedtuple.test index c0146aedf245..b2c517abe967 100644 --- a/test-data/unit/check-class-namedtuple.test +++ b/test-data/unit/check-class-namedtuple.test @@ -4,6 +4,7 @@ from typing import NamedTuple class E(NamedTuple): # E: NamedTuple class syntax is only supported in Python 3.6 pass +[builtins fixtures/tuple.pyi] [case testNewNamedTupleNoUnderscoreFields] # flags: --python-version 3.6 @@ -13,6 +14,7 @@ class X(NamedTuple): x: int _y: int # E: NamedTuple field name cannot start with an underscore: _y _z: int # E: NamedTuple field name cannot start with an underscore: _z +[builtins fixtures/tuple.pyi] [case testNewNamedTupleAccessingAttributes] # flags: --python-version 3.6 @@ -26,6 +28,7 @@ x: X x.x x.y x.z # E: "X" has no attribute "z" +[builtins fixtures/tuple.pyi] [case testNewNamedTupleAttributesAreReadOnly] # flags: --python-version 3.6 @@ -41,6 +44,7 @@ x.y = 5 # E: "X" has no attribute "y" class A(X): pass a: A a.x = 5 # E: Property "x" defined in "X" is read-only +[builtins fixtures/tuple.pyi] [case testNewNamedTupleCreateWithPositionalArguments] # flags: --python-version 3.6 @@ -55,6 +59,7 @@ x.x x.z # E: "X" has no attribute "z" x = X(1) # E: Too few arguments for "X" x = X(1, '2', 3) # E: Too many arguments for "X" +[builtins fixtures/tuple.pyi] [case testNewNamedTupleShouldBeSingleBase] # flags: --python-version 3.6 @@ -63,6 +68,7 @@ from typing import NamedTuple class A: ... class X(NamedTuple, A): # E: NamedTuple should be a single base pass +[builtins fixtures/tuple.pyi] [case testCreateNewNamedTupleWithKeywordArguments] # flags: --python-version 3.6 @@ -76,6 +82,7 @@ x = X(x=1, y='x') x = X(1, y='x') x = X(x=1, z=1) # E: Unexpected keyword argument "z" for "X" x = X(y='x') # E: Missing positional argument "x" in call to "X" +[builtins fixtures/tuple.pyi] [case testNewNamedTupleCreateAndUseAsTuple] # flags: --python-version 3.6 @@ -88,6 +95,7 @@ class X(NamedTuple): x = X(1, 'x') a, b = x a, b, c = x # E: Need more than 2 values to unpack (3 expected) +[builtins fixtures/tuple.pyi] [case testNewNamedTupleWithItemTypes] # flags: --python-version 3.6 @@ -105,6 +113,7 @@ i: int = n.b # E: Incompatible types in assignment (expression has type "str", x, y = n if int(): x = y # E: Incompatible types in assignment (expression has type "str", variable has type "int") +[builtins fixtures/tuple.pyi] [case testNewNamedTupleConstructorArgumentTypes] # flags: --python-version 3.6 @@ -118,6 +127,7 @@ n = N('x', 'x') # E: Argument 1 to "N" has incompatible type "str"; expected "in n = N(1, b=2) # E: Argument "b" to "N" has incompatible type "int"; expected "str" N(1, 'x') N(b='x', a=1) +[builtins fixtures/tuple.pyi] [case testNewNamedTupleAsBaseClass] # flags: --python-version 3.6 @@ -138,6 +148,7 @@ if int(): i, s = x if int(): s, s = x # E: Incompatible types in assignment (expression has type "int", variable has type "str") +[builtins fixtures/tuple.pyi] [case testNewNamedTupleSelfTypeWithNamedTupleAsBase] # flags: --python-version 3.6 @@ -157,6 +168,7 @@ class B(A): i, s = self i, i = self # E: Incompatible types in assignment (expression has type "str", \ variable has type "int") +[builtins fixtures/tuple.pyi] [out] [case testNewNamedTupleTypeReferenceToClassDerivedFrom] @@ -179,6 +191,7 @@ class B(A): variable has type "str") i, i = self # E: Incompatible types in assignment (expression has type "str", \ variable has type "int") +[builtins fixtures/tuple.pyi] [case testNewNamedTupleSubtyping] # flags: --python-version 3.6 @@ -206,6 +219,7 @@ if int(): t = b if int(): a = b +[builtins fixtures/tuple.pyi] [case testNewNamedTupleSimpleTypeInference] # flags: --python-version 3.6 @@ -233,6 +247,7 @@ class MyNamedTuple(NamedTuple): b: str MyNamedTuple.x # E: "Type[MyNamedTuple]" has no attribute "x" +[builtins fixtures/tuple.pyi] [case testNewNamedTupleEmptyItems] # flags: --python-version 3.6 @@ -240,6 +255,7 @@ from typing import NamedTuple class A(NamedTuple): ... +[builtins fixtures/tuple.pyi] [case testNewNamedTupleForwardRef] # flags: --python-version 3.6 @@ -252,6 +268,7 @@ class B: ... a = A(B()) a = A(1) # E: Argument 1 to "A" has incompatible type "int"; expected "B" +[builtins fixtures/tuple.pyi] [case testNewNamedTupleProperty] # flags: --python-version 3.6 @@ -295,6 +312,7 @@ x: X reveal_type(x._replace()) # N: Revealed type is 'Tuple[builtins.int, builtins.str, fallback=__main__.X]' x._replace(x=5) x._replace(y=5) # E: Argument "y" to "_replace" of "X" has incompatible type "int"; expected "str" +[builtins fixtures/tuple.pyi] [case testNewNamedTupleFields] # flags: --python-version 3.6 @@ -321,6 +339,7 @@ class X(NamedTuple): x: X = X() x._replace() x._fields[0] # E: Tuple index out of range +[builtins fixtures/tuple.pyi] [case testNewNamedTupleJoinNamedTuple] # flags: --python-version 3.6 @@ -358,6 +377,7 @@ class X(NamedTuple): x: int y = z = 2 # E: Invalid statement in NamedTuple definition; expected "field_name: field_type [= default]" def f(self): pass +[builtins fixtures/tuple.pyi] [case testNewNamedTupleWithInvalidItems2] # flags: --python-version 3.6 @@ -386,6 +406,7 @@ from typing import NamedTuple class X(NamedTuple): x: int y = 2 # E: Invalid statement in NamedTuple definition; expected "field_name: field_type [= default]" +[builtins fixtures/tuple.pyi] [case testTypeUsingTypeCNamedTuple] # flags: --python-version 3.6 @@ -467,13 +488,15 @@ from typing import NamedTuple class X(NamedTuple): x: int y: int = 'not an int' # E: Incompatible types in assignment (expression has type "str", variable has type "int") +[builtins fixtures/tuple.pyi] [case testNewNamedTupleErrorInDefault] # flags: --python-version 3.6 from typing import NamedTuple class X(NamedTuple): - x: int = 1 + '1' # E: Unsupported operand types for + ("int" and "str") + x: int = 1 + '1' # E: Unsupported left operand type for + ("int") +[builtins fixtures/tuple.pyi] [case testNewNamedTupleInheritance] # flags: --python-version 3.6 @@ -494,6 +517,7 @@ Y(y=1, x='1').method() class CallsBaseInit(X): def __init__(self, x: str) -> None: super().__init__(x) # E: Too many arguments for "__init__" of "object" +[builtins fixtures/tuple.pyi] [case testNewNamedTupleWithMethods] from typing import NamedTuple @@ -510,15 +534,16 @@ class XRepr(NamedTuple): y: int = 1 def __str__(self) -> str: return 'string' - def __add__(self, other: XRepr) -> int: + def __sub__(self, other: XRepr) -> int: return 0 reveal_type(XMeth(1).double()) # N: Revealed type is 'builtins.int' reveal_type(XMeth(1).asyncdouble()) # N: Revealed type is 'typing.Coroutine[Any, Any, builtins.int]' reveal_type(XMeth(42).x) # N: Revealed type is 'builtins.int' reveal_type(XRepr(42).__str__()) # N: Revealed type is 'builtins.str' -reveal_type(XRepr(1, 2).__add__(XRepr(3))) # N: Revealed type is 'builtins.int' +reveal_type(XRepr(1, 2).__sub__(XRepr(3))) # N: Revealed type is 'builtins.int' [typing fixtures/typing-async.pyi] +[builtins fixtures/tuple.pyi] [case testNewNamedTupleOverloading] from typing import NamedTuple, overload @@ -538,6 +563,7 @@ Overloader(1).method(('tuple',)) # E: No overload variant of "method" of "Overl # N: Possible overload variants: \ # N: def method(self, y: str) -> str \ # N: def method(self, y: int) -> int +[builtins fixtures/tuple.pyi] [case testNewNamedTupleMethodInheritance] from typing import NamedTuple, TypeVar @@ -642,6 +668,7 @@ class BadDoc(NamedTuple): return '' reveal_type(BadDoc(1).__doc__()) # N: Revealed type is 'builtins.str' +[builtins fixtures/tuple.pyi] [case testNewNamedTupleClassMethod] from typing import NamedTuple diff --git a/test-data/unit/check-classes.test b/test-data/unit/check-classes.test index be765be67bfe..290926c80466 100644 --- a/test-data/unit/check-classes.test +++ b/test-data/unit/check-classes.test @@ -1869,6 +1869,7 @@ class B: def __radd__(self, x: Callable[[], int]) -> int: pass class C: def __radd__(self, x: Any) -> int: pass +[builtins fixtures/tuple.pyi] [out] [case testReverseOperatorMethodInvalid] @@ -2106,6 +2107,7 @@ class A: class B: def __radd__(*self) -> int: pass def __rsub__(*self: 'B') -> int: pass +[builtins fixtures/tuple.pyi] [case testReverseOperatorTypeVar1] from typing import TypeVar, Any @@ -2466,6 +2468,7 @@ class B: pass a = a.foo b = a.bar +[builtins fixtures/tuple.pyi] [out] main:9: error: Incompatible types in assignment (expression has type "A", variable has type "B") @@ -2492,6 +2495,7 @@ class B: pass a = a.foo b = a.bar +[builtins fixtures/tuple.pyi] [out] main:9: error: Incompatible types in assignment (expression has type "A", variable has type "B") @@ -3036,6 +3040,7 @@ def foo(arg: Type[Any]): x.foo class X: pass foo(X) +[builtins fixtures/tuple.pyi] [out] [case testTypeUsingTypeCTypeAnyMember] @@ -3192,6 +3197,7 @@ def f(a: T): pass from typing import Type, Tuple def f(a: Type[Tuple[int, int]]): a() +[builtins fixtures/tuple.pyi] [out] main:2: error: Unsupported type Type["Tuple[int, int]"] @@ -4026,6 +4032,7 @@ class A: __slots__ = ("a") class B(A): __slots__ = ("a", "b") +[builtins fixtures/tuple.pyi] [case testClassOrderOfError] class A: @@ -4384,6 +4391,7 @@ from typing import NamedTuple N = NamedTuple('N', [('x', N)]) # E: Cannot resolve name "N" (possible cyclic definition) n: N reveal_type(n) # N: Revealed type is 'Tuple[Any, fallback=__main__.N]' +[builtins fixtures/tuple.pyi] [case testCrashOnSelfRecursiveTypedDictVar] from mypy_extensions import TypedDict @@ -4506,6 +4514,7 @@ class N(NamedTuple): x: NT = N(1) # E: Incompatible types in assignment (expression has type "N", variable has type "NT") x = NT(N(1)) +[builtins fixtures/tuple.pyi] [out] [case testNewTypeFromForwardTypedDict] @@ -4528,6 +4537,7 @@ def get_state(proc: 'Process') -> int: return proc.state class Process(NamedTuple): state: int +[builtins fixtures/tuple.pyi] [out] [case testCorrectItemTypeInForwardRefToTypedDict] @@ -4554,6 +4564,7 @@ class B(NamedTuple): y: A y = x reveal_type(x.one.attr) # N: Revealed type is 'builtins.str' +[builtins fixtures/tuple.pyi] [out] [case testCrashOnDoubleForwardTypedDict] @@ -4583,6 +4594,7 @@ def foo(node: Node) -> int: x = node reveal_type(node) # N: Revealed type is 'Union[Tuple[builtins.int, fallback=__main__.Foo], Tuple[builtins.int, fallback=__main__.Bar]]' return x.x +[builtins fixtures/tuple.pyi] [out] [case testCrashOnForwardUnionOfTypedDicts] @@ -4755,6 +4767,7 @@ class A(six.with_metaclass(M)): pass class B: pass reveal_type(type(A).x) # N: Revealed type is 'builtins.int' reveal_type(type(B).x) # N: Revealed type is 'builtins.int' +[builtins fixtures/tuple.pyi] [case testSixMetaclass_python2] import six @@ -4775,6 +4788,7 @@ class A(with_metaclass(M)): pass class B: pass reveal_type(type(A).x) # N: Revealed type is 'builtins.int' reveal_type(type(B).x) # N: Revealed type is 'builtins.int' +[builtins fixtures/tuple.pyi] [case testSixMetaclassImportFrom] import six @@ -4787,6 +4801,7 @@ reveal_type(type(B).x) # N: Revealed type is 'builtins.int' [file metadefs.py] class M(type): x = 5 +[builtins fixtures/tuple.pyi] [case testSixMetaclassImport] import six @@ -4799,6 +4814,7 @@ reveal_type(type(B).x) # N: Revealed type is 'builtins.int' [file metadefs.py] class M(type): x = 5 +[builtins fixtures/tuple.pyi] [case testSixMetaclassAndBase] from typing import Iterable, Iterator @@ -4832,6 +4848,7 @@ C2().bar() D2().bar() C2().baz() # E: "C2" has no attribute "baz" D2().baz() # E: "D2" has no attribute "baz" +[builtins fixtures/tuple.pyi] [case testSixMetaclassGenerics] from typing import Generic, GenericMeta, TypeVar @@ -4883,6 +4900,7 @@ class Q1(metaclass=M1): pass @six.add_metaclass(M) class CQA(Q1): pass # E: Inconsistent metaclass structure for 'CQA' class CQW(six.with_metaclass(M, Q1)): pass # E: Inconsistent metaclass structure for 'CQW' +[builtins fixtures/tuple.pyi] [case testSixMetaclassErrors_python2] # flags: --python-version 2.7 @@ -4898,6 +4916,7 @@ class E(metaclass=t.M): pass class F(six.with_metaclass(t.M)): pass @six.add_metaclass(t.M) class G: pass +[builtins fixtures/tuple.pyi] -- Special support for future.utils -- -------------------------------- @@ -4908,6 +4927,7 @@ class M(type): x = 5 class A(future.utils.with_metaclass(M)): pass reveal_type(type(A).x) # N: Revealed type is 'builtins.int' +[builtins fixtures/tuple.pyi] [case testFutureMetaclass_python2] import future.utils @@ -4922,6 +4942,7 @@ class M(type): x = 5 class A(with_metaclass(M)): pass reveal_type(type(A).x) # N: Revealed type is 'builtins.int' +[builtins fixtures/tuple.pyi] [case testFutureMetaclassImportFrom] import future.utils @@ -4931,6 +4952,7 @@ reveal_type(type(A).x) # N: Revealed type is 'builtins.int' [file metadefs.py] class M(type): x = 5 +[builtins fixtures/tuple.pyi] [case testFutureMetaclassImport] import future.utils @@ -4940,6 +4962,7 @@ reveal_type(type(A).x) # N: Revealed type is 'builtins.int' [file metadefs.py] class M(type): x = 5 +[builtins fixtures/tuple.pyi] [case testFutureMetaclassAndBase] from typing import Iterable, Iterator @@ -4962,6 +4985,7 @@ for x in C2: reveal_type(x) # N: Revealed type is 'builtins.int*' C2().foo() C2().bar() C2().baz() # E: "C2" has no attribute "baz" +[builtins fixtures/tuple.pyi] [case testFutureMetaclassGenerics] from typing import Generic, GenericMeta, TypeVar @@ -4995,6 +5019,7 @@ class C5(future.utils.with_metaclass(f())): pass # E: Dynamic metaclass not sup class M1(type): pass class Q1(metaclass=M1): pass class CQW(future.utils.with_metaclass(M, Q1)): pass # E: Inconsistent metaclass structure for 'CQW' +[builtins fixtures/tuple.pyi] [case testFutureMetaclassErrors_python2] # flags: --python-version 2.7 @@ -5011,6 +5036,7 @@ class F(future.utils.with_metaclass(t.M)): pass -- Misc -- ---- +[builtins fixtures/tuple.pyi] [case testCorrectEnclosingClassPushedInDeferred] class C: @@ -5132,6 +5158,7 @@ class C: __slots__ = ('x',) class D(B, C): __slots__ = ('aa', 'bb', 'cc') +[builtins fixtures/tuple.pyi] [case testRevealLocalsOnClassVars] class C1(object): @@ -5511,6 +5538,7 @@ class B(A): pass reveal_type(B) # N: Revealed type is 'def (x: builtins.int) -> __main__.B' +[builtins fixtures/tuple.pyi] [case testNewAndInit3] from typing import Any @@ -5522,6 +5550,7 @@ class A: pass reveal_type(A) # N: Revealed type is 'def (x: builtins.int) -> __main__.A' +[builtins fixtures/tuple.pyi] [case testCyclicDecorator] import b @@ -5543,6 +5572,7 @@ class B: from typing import TypeVar, Tuple, Callable T = TypeVar('T') def deco(f: Callable[..., T]) -> Callable[..., Tuple[T, int]]: ... +[builtins fixtures/tuple.pyi] [out] [case testCyclicOverload] @@ -5647,6 +5677,7 @@ class Base: from typing import TypeVar, Tuple, Callable T = TypeVar('T') def deco(f: Callable[..., T]) -> Callable[..., Tuple[T, int]]: ... +[builtins fixtures/tuple.pyi] [out] [case testCyclicOverrideCheckedDecoratorDeferred] @@ -5670,6 +5701,7 @@ def f() -> int: ... from typing import TypeVar, Tuple, Callable T = TypeVar('T') def deco(f: Callable[..., T]) -> Callable[..., Tuple[T, int]]: ... +[builtins fixtures/tuple.pyi] [out] [case testCyclicOverrideAnyDecoratorDeferred] @@ -5723,6 +5755,7 @@ class B: from typing import TypeVar, Tuple, Callable T = TypeVar('T') def deco(f: Callable[..., T]) -> Callable[..., Tuple[T, int]]: ... +[builtins fixtures/tuple.pyi] [out] [case testCyclicDecoratorSuper] @@ -5749,6 +5782,7 @@ class B: from typing import TypeVar, Tuple, Callable T = TypeVar('T') def deco(f: Callable[..., T]) -> Callable[..., Tuple[T, int]]: ... +[builtins fixtures/tuple.pyi] [out] [case testCyclicDecoratorBothDeferred] @@ -5780,6 +5814,7 @@ def f() -> int: ... from typing import TypeVar, Tuple, Callable T = TypeVar('T') def deco(f: Callable[..., T]) -> Callable[..., Tuple[T, int]]: ... +[builtins fixtures/tuple.pyi] [out] [case testCyclicDecoratorSuperDeferred] @@ -5813,6 +5848,7 @@ class B: from typing import TypeVar, Tuple, Callable T = TypeVar('T') def deco(f: Callable[..., T]) -> Callable[..., Tuple[T, int]]: ... +[builtins fixtures/tuple.pyi] [out] [case testOptionalDescriptorsBinder] @@ -6199,6 +6235,7 @@ N = NamedTuple('N', [('x', int)]) class B(A, N): pass reveal_type(A()) # N: Revealed type is 'Tuple[builtins.int, fallback=__main__.B]' +[builtins fixtures/tuple.pyi] [case testNewReturnType8] from typing import TypeVar, Any @@ -6283,6 +6320,7 @@ class C(B[int, T]): def __init__(self) -> None: # TODO: error message could be better. self.x: Tuple[str, T] # E: Incompatible types in assignment (expression has type "Tuple[str, T]", base class "A" defined the type as "Tuple[int, T]") +[builtins fixtures/tuple.pyi] [case testInitSubclassWrongType] class Base: diff --git a/test-data/unit/check-custom-plugin.test b/test-data/unit/check-custom-plugin.test index d2e2221ef5e3..16651e16efc1 100644 --- a/test-data/unit/check-custom-plugin.test +++ b/test-data/unit/check-custom-plugin.test @@ -437,6 +437,7 @@ for x in foo: [file mypy.ini] \[mypy] plugins=/test-data/unit/plugins/method_sig_hook.py +[builtins fixtures/tuple.pyi] [case testMethodSignatureHookNamesFullyQualified] # flags: --config-file tmp/mypy.ini diff --git a/test-data/unit/check-default-plugin.test b/test-data/unit/check-default-plugin.test index e479d6b58823..0b4de54dbe8b 100644 --- a/test-data/unit/check-default-plugin.test +++ b/test-data/unit/check-default-plugin.test @@ -22,6 +22,7 @@ f = yield_id def g(x, y): pass f = g # E: Incompatible types in assignment (expression has type "Callable[[Any, Any], Any]", variable has type "Callable[[T], GeneratorContextManager[T]]") [typing fixtures/typing-medium.pyi] +[builtins fixtures/tuple.pyi] [case testContextManagerWithUnspecifiedArguments] from contextlib import contextmanager @@ -31,3 +32,4 @@ c: Callable[..., Iterator[int]] reveal_type(c) # N: Revealed type is 'def (*Any, **Any) -> typing.Iterator[builtins.int]' reveal_type(contextmanager(c)) # N: Revealed type is 'def (*Any, **Any) -> contextlib.GeneratorContextManager[builtins.int*]' [typing fixtures/typing-medium.pyi] +[builtins fixtures/tuple.pyi] diff --git a/test-data/unit/check-dynamic-typing.test b/test-data/unit/check-dynamic-typing.test index 99aed1b6faf5..98895ba3b302 100644 --- a/test-data/unit/check-dynamic-typing.test +++ b/test-data/unit/check-dynamic-typing.test @@ -38,6 +38,7 @@ s, t = d class A: pass class B: pass +[builtins fixtures/tuple.pyi] -- Expressions @@ -65,6 +66,7 @@ def f(x: Any) -> 'A': class A: pass class B: pass +[builtins fixtures/tuple.pyi] [case testCallingWithDynamicReturnType] from typing import Any @@ -80,6 +82,7 @@ def f(x: 'A') -> Any: class A: pass class B: pass +[builtins fixtures/tuple.pyi] [case testBinaryOperationsWithDynamicLeftOperand] from typing import Any @@ -445,6 +448,7 @@ def f13(x, y = b, z = b): pass class A: pass class B: pass +[builtins fixtures/tuple.pyi] [case testSkipTypeCheckingWithImplicitSignature] a = None # type: A @@ -672,6 +676,7 @@ class A(B): pass def g(self, x: Any) -> None: pass +[builtins fixtures/tuple.pyi] [case testOverridingMethodWithImplicitDynamicTypes] @@ -690,6 +695,7 @@ class A(B): pass def g(self, x): pass +[builtins fixtures/tuple.pyi] [case testOverridingMethodAcrossHierarchy] import typing diff --git a/test-data/unit/check-enum.test b/test-data/unit/check-enum.test index 18130d2d818c..8984b68cbc24 100644 --- a/test-data/unit/check-enum.test +++ b/test-data/unit/check-enum.test @@ -24,6 +24,7 @@ reveal_type(Medal.bronze) # N: Revealed type is 'Literal[__main__.Medal.bronze] m = Medal.gold if int(): m = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "Medal") +[builtins fixtures/tuple.pyi] [case testEnumFromEnumMetaSubclass] from enum import EnumMeta @@ -38,6 +39,7 @@ reveal_type(Medal.bronze) # N: Revealed type is 'Literal[__main__.Medal.bronze] m = Medal.gold if int(): m = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "Medal") +[builtins fixtures/tuple.pyi] [case testEnumFromEnumMetaGeneric] from enum import EnumMeta @@ -159,6 +161,7 @@ class E(N, Enum): def f(x: E) -> None: pass f(E.X) +[builtins fixtures/tuple.pyi] [case testEnumCall] from enum import IntEnum @@ -708,6 +711,7 @@ elif y2 is Bar.B: reveal_type(y2) # N: Revealed type is 'Literal[__main__.Bar.B]' else: reveal_type(y2) # No output here: this branch is unreachable +[builtins fixtures/tuple.pyi] [case testEnumReachabilityChecksIndirect] from enum import Enum diff --git a/test-data/unit/check-errorcodes.test b/test-data/unit/check-errorcodes.test index 89e777004551..f6886261570f 100644 --- a/test-data/unit/check-errorcodes.test +++ b/test-data/unit/check-errorcodes.test @@ -251,6 +251,7 @@ x: f # E: Function "__main__.f" is not valid as a type [valid-type] \ import sys y: sys # E: Module "sys" is not valid as a type [valid-type] z: y # E: Variable "__main__.y" is not valid as a type [valid-type] +[builtins fixtures/tuple.pyi] [case testErrorCodeNeedTypeAnnotation] from typing import TypeVar @@ -363,6 +364,7 @@ async def asyncf(): # E: Function is missing a return type annotation [no-unty async def asyncf2(x: int): # E: Function is missing a return type annotation [no-untyped-def] return 0 [typing fixtures/typing-async.pyi] +[builtins fixtures/tuple.pyi] [case testErrorCodeCallUntypedFunction] # flags: --disallow-untyped-calls @@ -432,6 +434,7 @@ B() + '' # E: Unsupported operand types for + ("B" and "str") [operator] '' in B() # E: Unsupported operand types for in ("str" and "B") [operator] 1() # E: "int" not callable [operator] +[builtins fixtures/tuple.pyi] [case testErrorCodeListOrDictItem] from typing import List, Dict @@ -636,6 +639,7 @@ def g(p: P) -> None: pass p: A g(p) # type: ignore[arg-type] +[builtins fixtures/tuple.pyi] [case testErrorCodeNoneReturnNoteIgnore] # flags: --disallow-untyped-defs diff --git a/test-data/unit/check-expressions.test b/test-data/unit/check-expressions.test index 5820e2b1653e..7eca7432aad0 100644 --- a/test-data/unit/check-expressions.test +++ b/test-data/unit/check-expressions.test @@ -140,6 +140,7 @@ class B: pass class C: pass +[builtins fixtures/tuple.pyi] [case testSub] a, b, c = None, None, None # type: (A, B, C) @@ -159,6 +160,7 @@ class B: pass class C: pass +[builtins fixtures/tuple.pyi] [case testMul] a, b, c = None, None, None # type: (A, B, C) @@ -178,6 +180,7 @@ class B: pass class C: pass +[builtins fixtures/tuple.pyi] [case testMatMul] a, b, c = None, None, None # type: (A, B, C) @@ -197,6 +200,7 @@ class B: pass class C: pass +[builtins fixtures/tuple.pyi] [case testDiv] a, b, c = None, None, None # type: (A, B, C) @@ -215,6 +219,7 @@ class B: pass class C: pass +[builtins fixtures/tuple.pyi] [case testIntDiv] a, b, c = None, None, None # type: (A, B, C) @@ -233,6 +238,7 @@ class B: pass class C: pass +[builtins fixtures/tuple.pyi] [case testMod] a, b, c = None, None, None # type: (A, B, C) @@ -252,6 +258,7 @@ class B: pass class C: pass +[builtins fixtures/tuple.pyi] [case testPow] a, b, c = None, None, None # type: (A, B, C) @@ -271,6 +278,7 @@ class B: pass class C: pass +[builtins fixtures/tuple.pyi] [case testMiscBinaryOperators] @@ -293,6 +301,7 @@ class A: def __lshift__(self, x: 'A') -> 'B': pass def __rshift__(self, x: 'B') -> 'B': pass class B: pass +[builtins fixtures/tuple.pyi] [out] main:3: error: Unsupported operand types for & ("A" and "A") main:4: error: Unsupported operand types for | ("A" and "B") @@ -813,6 +822,7 @@ class A: pass class B: pass +[builtins fixtures/tuple.pyi] [case testUnaryPlus] a, b = None, None # type: (A, B) @@ -828,6 +838,7 @@ class A: pass class B: pass +[builtins fixtures/tuple.pyi] [case testUnaryNot] a, b = None, None # type: (A, bool) @@ -859,6 +870,7 @@ class B: -- Indexing -- -------- +[builtins fixtures/tuple.pyi] [case testIndexing] @@ -878,6 +890,7 @@ class A: pass class B: pass class C: pass +[builtins fixtures/tuple.pyi] [case testIndexingAsLvalue] @@ -894,6 +907,7 @@ class B: pass class C: pass +[builtins fixtures/tuple.pyi] [out] main:3: error: Invalid index type "C" for "A"; expected type "B" main:4: error: Incompatible types in assignment (expression has type "A", target has type "C") @@ -931,6 +945,7 @@ class A: pass class B: pass class C: pass +[builtins fixtures/tuple.pyi] [out] @@ -961,6 +976,7 @@ if int(): if int(): a = cast(Any, b) b = cast(Any, a) +[builtins fixtures/tuple.pyi] [out] [case testAnyCast] @@ -971,6 +987,7 @@ a = cast(Any, b) b = cast(Any, a) class A: pass class B: pass +[builtins fixtures/tuple.pyi] [out] main:3: error: "A" not callable @@ -1002,6 +1019,7 @@ class A: pass def __call__(self) -> None: pass +[builtins fixtures/tuple.pyi] [case testNoneReturnTypeWithStatements] import typing @@ -1330,17 +1348,20 @@ main:4: error: Incompatible types in string interpolation (expression has type " def foo(a: bytes, b: bytes): b'%s:%s' % (a, b) foo(b'a', b'b') == b'a:b' +[builtins fixtures/tuple.pyi] [case testStringInterpolationStarArgs] x = (1, 2) "%d%d" % (*x,) [typing fixtures/typing-medium.pyi] +[builtins fixtures/tuple.pyi] [case testBytePercentInterpolationSupported] b'%s' % (b'xyz',) b'%(name)s' % {'name': b'jane'} # E: Dictionary keys in bytes formatting must be bytes, not strings b'%(name)s' % {b'name': 'jane'} # E: On Python 3 b'%s' requires bytes, not string b'%c' % (123) +[builtins fixtures/tuple.pyi] [case testUnicodeInterpolation_python2] u'%s' % (u'abc',) @@ -1365,6 +1386,7 @@ b: Union[Tuple[int, str], Tuple[int, int], Tuple[str, int]] = ('A', 1) c: Union[Tuple[str, int], Tuple[str, int, str]] = ('A', 1) '%s %s' % c # E: Not all arguments converted during string formatting +[builtins fixtures/tuple.pyi] -- str.format() calls -- ------------------ @@ -2035,6 +2057,7 @@ class B: pass class C(B): pass +[builtins fixtures/tuple.pyi] [out] diff --git a/test-data/unit/check-fastparse.test b/test-data/unit/check-fastparse.test index 6e81b6dd9403..1e7dba635440 100644 --- a/test-data/unit/check-fastparse.test +++ b/test-data/unit/check-fastparse.test @@ -113,6 +113,7 @@ with open('test', 'r') as f: # type: int # type: ignore [case testFastParseTypeWithIgnoreForStmt] for i in (1, 2, 3, 100): # type: str # type: ignore pass +[builtins fixtures/tuple.pyi] [case testFastParseVariableCommentThenIgnore] a="test" # type: int #comment # type: ignore # E: Incompatible types in assignment (expression has type "str", variable has type "int") @@ -343,6 +344,7 @@ assert (1, 2) # E: Assertion is always true, perhaps remove parentheses? assert (1, 2), 3 # E: Assertion is always true, perhaps remove parentheses? assert () assert (1,) # E: Assertion is always true, perhaps remove parentheses? +[builtins fixtures/tuple.pyi] [case testFastParseAssertMessage] diff --git a/test-data/unit/check-final.test b/test-data/unit/check-final.test index e3174aff04b1..40ed4f3a9a45 100644 --- a/test-data/unit/check-final.test +++ b/test-data/unit/check-final.test @@ -43,6 +43,7 @@ class C: self.y: Final[float] = 1 reveal_type(C((1, 2)).x) # N: Revealed type is 'Tuple[builtins.int, Any]' reveal_type(C((1, 2)).y) # N: Revealed type is 'builtins.float' +[builtins fixtures/tuple.pyi] [out] [case testFinalBadDefinitionTooManyArgs] @@ -182,6 +183,7 @@ reveal_type(C().g) # N: Revealed type is 'builtins.int' from typing import Final, Callable, Tuple, Any x: Tuple[Final] # E: Final can be only used as an outermost qualifier in a variable annotation y: Callable[[], Tuple[Final[int]]] # E: Final can be only used as an outermost qualifier in a variable annotation +[builtins fixtures/tuple.pyi] [out] [case testFinalDefiningNotInMethod] @@ -196,6 +198,7 @@ from typing_extensions import Final def f(x: Final[int]) -> int: ... # E: Final can be only used as an outermost qualifier in a variable annotation def g(x: int) -> Final[int]: ... # E: Final can be only used as an outermost qualifier in a variable annotation +[builtins fixtures/tuple.pyi] [out] [case testFinalDefiningNoRhs] @@ -234,6 +237,7 @@ d: Any class C(Generic[T]): x: Final[Tuple[T, T]] = d # E: Final name declared in class body cannot depend on type variables +[builtins fixtures/tuple.pyi] [out] [case testFinalDefiningTypevarsImplicit] @@ -250,6 +254,7 @@ reveal_type(C((1, 2)).x) # N: Revealed type is 'Tuple[builtins.int*, builtins.i C.x # E: Cannot access final instance attribute "x" on class object \ # E: Access to generic instance variables via class is ambiguous C.y # E: Cannot access final instance attribute "y" on class object +[builtins fixtures/tuple.pyi] [out] [case testFinalDefiningNotInOtherMethod] @@ -259,6 +264,7 @@ class C: def meth(self, x: Tuple[int, Any]) -> None: self.x: Final = x # E: Can only declare a final attribute in class body or __init__ self.y: Final[float] = 1 # E: Can only declare a final attribute in class body or __init__ +[builtins fixtures/tuple.pyi] [out] [case testFinalDefiningOnlyOnSelf] @@ -273,6 +279,7 @@ class C: slf.x: Final = x # E: Final can be only applied to a name or an attribute on self slf.y: Final[float] = 1 # E: Type cannot be declared in assignment to non-self attribute \ # E: Final can be only applied to a name or an attribute on self +[builtins fixtures/tuple.pyi] [out] [case testFinalNotInProtocol] @@ -1026,6 +1033,7 @@ class C(B): @final class F: ... class E(F): ... # E: Cannot inherit from final class "F" +[builtins fixtures/tuple.pyi] [out] [case testFinalCanUseTypingExtensionsAliased] @@ -1047,6 +1055,7 @@ class C(B): @f class D(C): ... class E(D): ... # E: Cannot inherit from final class "D" +[builtins fixtures/tuple.pyi] [out] [case testFinalMultiassignAllowed] @@ -1072,3 +1081,4 @@ class A: def __init__(self) -> None: self.x = 10 # type: Final undefined # type: ignore +[builtins fixtures/tuple.pyi] diff --git a/test-data/unit/check-flags.test b/test-data/unit/check-flags.test index 1aa5a020f32d..38fb0213bcfe 100644 --- a/test-data/unit/check-flags.test +++ b/test-data/unit/check-flags.test @@ -726,6 +726,7 @@ from missing import Unchecked foo: Unchecked = "" foo = "" x, y = 1, 2 # type: Unchecked, Unchecked +[builtins fixtures/tuple.pyi] [out] main:4: error: Type of variable becomes "Any" due to an unfollowed import main:6: error: A type on this line becomes "Any" due to an unfollowed import @@ -1083,6 +1084,7 @@ from typing import NamedTuple N = NamedTuple('N', [('x', N)]) # type: ignore n: N +[builtins fixtures/tuple.pyi] [out] [case testCheckDisallowAnyGenericsTypedDict] @@ -1180,6 +1182,7 @@ b = 6 [file other_module_2.py] from other_module_1 import a, b __all__ = ('b',) +[builtins fixtures/tuple.pyi] [out] main:2: error: Module 'other_module_2' has no attribute 'a' @@ -1203,6 +1206,7 @@ b = 6 [file other_module_2.py] from other_module_1 import * __all__ = ('b',) +[builtins fixtures/tuple.pyi] [out] main:2: error: Module 'other_module_2' has no attribute 'a' @@ -1298,6 +1302,7 @@ def foo(x: TupleAny[str]) -> None: # no error def goo(x: TupleAny[Any]) -> None: # E: Explicit "Any" is not allowed pass +[builtins fixtures/tuple.pyi] [case testDisallowAnyExplicitCast] # flags: --disallow-any-explicit @@ -1360,6 +1365,7 @@ def g(s) -> Tuple: # E: Missing type parameters for generic type "Tuple" def h(s) -> Tuple[str, str]: # no error return 'a', 'b' x: Tuple = () # E: Missing type parameters for generic type "Tuple" +[builtins fixtures/tuple.pyi] [case testDisallowAnyGenericsTupleWithNoTypeParamsGeneric] # flags: --disallow-any-generics @@ -1403,6 +1409,7 @@ def g(s) -> A: # E: Missing type parameters for generic type "A" def h(s) -> A[str]: # no error return 'a', 'b', 'c' x: A = ('a', 'b', 1) # E: Missing type parameters for generic type "A" +[builtins fixtures/tuple.pyi] [case testDisallowAnyGenericsPlainList] # flags: --python-version 3.6 --disallow-any-generics diff --git a/test-data/unit/check-functions.test b/test-data/unit/check-functions.test index 07999e630127..c37ca101de76 100644 --- a/test-data/unit/check-functions.test +++ b/test-data/unit/check-functions.test @@ -25,6 +25,7 @@ if int(): class A: pass class B: pass +[builtins fixtures/tuple.pyi] [case testKeywordOnlyArgumentOrderInsensitivity] import typing @@ -243,6 +244,7 @@ if int(): o = f class A: pass +[builtins fixtures/tuple.pyi] [case testFunctionSubtypingWithVoid] from typing import Callable @@ -407,6 +409,7 @@ def f(x: A) -> A: pass def f(x: B) -> B: pass @overload def f(x: C) -> C: pass +[builtins fixtures/tuple.pyi] [case testInferConstraintsUnequalLengths] from typing import Any, Callable, List @@ -459,6 +462,7 @@ def f(x: 'A' = None) -> 'B': pass class A: pass class AA(A): pass class B: pass +[builtins fixtures/tuple.pyi] [case testDefaultArgumentExpressions] import typing @@ -1552,6 +1556,7 @@ from contextlib import contextmanager def f(): yield [typing fixtures/typing-medium.pyi] +[builtins fixtures/tuple.pyi] -- Conditional method definition @@ -1713,6 +1718,7 @@ def a(f: F): from collections import namedtuple class C(namedtuple('t', 'x')): pass +[builtins fixtures/tuple.pyi] [case testCallableParsingSameName] from typing import Callable @@ -1978,9 +1984,11 @@ def g3(*x: T) -> T: pass f(g1) f(g2) f(g3) +[builtins fixtures/tuple.pyi] -- (...) -> T -- ---------------- + [case testEllipsisWithArbitraryArgsOnBareFunction] def f(x, y, z): # type: (...) -> None pass diff --git a/test-data/unit/check-generic-subtyping.test b/test-data/unit/check-generic-subtyping.test index 176d917b4f50..aca56a061e8c 100644 --- a/test-data/unit/check-generic-subtyping.test +++ b/test-data/unit/check-generic-subtyping.test @@ -137,6 +137,7 @@ class A(Generic[T]): class B(A[S], Generic[T, S]): pass class C: pass class D: pass +[builtins fixtures/tuple.pyi] [case testAccessingMethodInheritedFromGenericTypeInNonGenericType] from typing import TypeVar, Generic @@ -152,6 +153,7 @@ class A(Generic[T]): def f(self, a: T) -> None: pass class B(A[D]): pass +[builtins fixtures/tuple.pyi] [case testAccessingMemberVarInheritedFromGenericType] from typing import TypeVar, Generic @@ -170,6 +172,7 @@ b.a = d class B(A[S], Generic[T, S]): pass class C: pass class D: pass +[builtins fixtures/tuple.pyi] -- Overriding with generic types @@ -483,6 +486,7 @@ if int(): class C: pass class D: pass class E: pass +[builtins fixtures/tuple.pyi] [out] [case testSubtypingWithTypeImplementingGenericABCViaInheritance2-skip] @@ -516,6 +520,7 @@ class J(Generic[t]): pass class X(metaclass=ABCMeta): pass class I(X, J[t], Generic[t]): pass class A(I[t], Generic[t]): pass +[builtins fixtures/tuple.pyi] -- Subclassing a generic ABC @@ -569,6 +574,7 @@ class A(B): def f(self, a: 'C', b: 'C') -> None: pass class C: pass class D: pass +[builtins fixtures/tuple.pyi] [case testSubclassingGenericABCWithDeepHierarchy2] from typing import Any, TypeVar, Generic @@ -675,6 +681,7 @@ ia = None # type: I[A] ia.f(b) # E: Argument 1 to "f" of "I" has incompatible type "B"; expected "A" ia.f(a) +[builtins fixtures/tuple.pyi] [case testAccessingInheritedGenericABCMembers] from typing import TypeVar, Generic @@ -691,6 +698,7 @@ ia = None # type: I[A] ia.f(b) # E: Argument 1 to "f" of "J" has incompatible type "B"; expected "A" ia.f(a) +[builtins fixtures/tuple.pyi] -- Misc diff --git a/test-data/unit/check-generics.test b/test-data/unit/check-generics.test index 63e3470c8f56..93714a97ddde 100644 --- a/test-data/unit/check-generics.test +++ b/test-data/unit/check-generics.test @@ -15,6 +15,7 @@ class A(Generic[T]): class B: pass class C: pass +[builtins fixtures/tuple.pyi] [case testGenericMethodArgument] from typing import TypeVar, Generic @@ -47,6 +48,7 @@ a.v = b class B: pass class C: pass +[builtins fixtures/tuple.pyi] [out] main:8: error: Incompatible types in assignment (expression has type "C", variable has type "B") @@ -61,6 +63,7 @@ class A(Generic[T]): v = None # type: T class B: pass class C: pass +[builtins fixtures/tuple.pyi] [out] main:4: error: Incompatible types in assignment (expression has type "C", variable has type "B") @@ -80,6 +83,7 @@ if int(): class A(Generic[T]): pass class B: pass class C(B): pass +[builtins fixtures/tuple.pyi] [case testGenericTypeCompatibilityWithAny] from typing import Any, TypeVar, Generic @@ -94,6 +98,7 @@ d = c class A(Generic[T]): pass class B: pass class C(B): pass +[builtins fixtures/tuple.pyi] [out] [case testTypeVariableAsTypeArgument] @@ -824,6 +829,7 @@ use_cb(1, 1) # E: Argument 2 to "use_cb" has incompatible type "int"; expected " my_cb = None # type: C2[int] use_cb('x', my_cb) # E: Argument 2 to "use_cb" has incompatible type "Callable[[int, int], Node[int]]"; expected "Callable[[str, str], Node[str]]" reveal_type(use_cb(1, my_cb)) # N: Revealed type is '__main__.Node[builtins.int]' +[builtins fixtures/tuple.pyi] [out] @@ -899,6 +905,7 @@ T = TypeVar('T') R = TypeVar('R') Transform = Callable[[T, int], Tuple[T, R]] +[builtins fixtures/tuple.pyi] [out] [case testGenericTypeAliasesImportingWithoutTypeVarError] @@ -1028,6 +1035,7 @@ reveal_type(us) # N: Revealed type is 'Any' xx = CA[str] + 1 # E: Type application is only supported for generic classes yy = TA[str]() # E: Type application is only supported for generic classes zz = UA[str].x # E: Type application is only supported for generic classes +[builtins fixtures/tuple.pyi] [out] @@ -1740,6 +1748,7 @@ T = TypeVar('T') def f(x: Container[T]) -> T: ... reveal_type(f((1, 2))) # N: Revealed type is 'builtins.int*' [typing fixtures/typing-full.pyi] +[builtins fixtures/tuple.pyi] [case testClassMethodInGenericClassWithGenericConstructorArg] from typing import TypeVar, Generic diff --git a/test-data/unit/check-incomplete-fixture.test b/test-data/unit/check-incomplete-fixture.test index e560f6294362..44683ae295cf 100644 --- a/test-data/unit/check-incomplete-fixture.test +++ b/test-data/unit/check-incomplete-fixture.test @@ -58,17 +58,24 @@ main:1: error: Name 'isinstance' is not defined main:1: note: Maybe your test fixture does not define "builtins.isinstance"? main:1: note: Consider adding [builtins fixtures/isinstancelist.pyi] to your test description -[case testInvalidTupleDefinitionFromStubs] +[case testTupleMissingFromStubs1] +tuple() +[out] +main:1: error: Name 'tuple' is not defined +main:1: note: Maybe your test fixture does not define "builtins.tuple"? +main:1: note: Consider adding [builtins fixtures/tuple.pyi] to your test description +main:1: note: Did you forget to import it from "typing"? (Suggestion: "from typing import Tuple") + +[case testTupleMissingFromStubs2] +tuple() from typing import Tuple -x: Tuple[int, ...] -x[0] -for y in x: - pass +x: Tuple[int, str] [out] --- These errors are pretty bad, but keeping this test anyway to --- avoid things getting worse. -main:3: error: Value of type "Tuple[int, ...]" is not indexable -main:4: error: "Tuple[int, ...]" has no attribute "__iter__" (not iterable) +main:1: error: Name 'tuple' is not defined +main:1: note: Maybe your test fixture does not define "builtins.tuple"? +main:1: note: Consider adding [builtins fixtures/tuple.pyi] to your test description +main:1: note: Did you forget to import it from "typing"? (Suggestion: "from typing import Tuple") +main:3: error: Name 'tuple' is not defined [case testClassmethodMissingFromStubs] class A: diff --git a/test-data/unit/check-incremental.test b/test-data/unit/check-incremental.test index 6439e32b678b..edf536ac9306 100644 --- a/test-data/unit/check-incremental.test +++ b/test-data/unit/check-incremental.test @@ -1637,6 +1637,7 @@ MyTuple = NamedTuple('MyTuple', [ [rechecked bar, mid, foo] [stale bar] +[builtins fixtures/tuple.pyi] [out2] tmp/foo.py:3: error: Argument 1 to "accept_int" has incompatible type "str"; expected "int" @@ -1671,6 +1672,7 @@ class Outer: [rechecked bar, mid, foo] [stale bar] +[builtins fixtures/tuple.pyi] [out2] tmp/foo.py:3: error: Argument 1 to "accept_int" has incompatible type "str"; expected "int" @@ -1817,6 +1819,7 @@ from typing import NamedTuple class C: def f(self) -> None: A = NamedTuple('A', [('x', int), ('y', int)]) +[builtins fixtures/tuple.pyi] [out1] main:1: error: Module 'ntcrash' has no attribute 'nope' [out2] @@ -1830,6 +1833,7 @@ class C: class D: def f(self) -> None: A = NamedTuple('A', [('x', int), ('y', int)]) +[builtins fixtures/tuple.pyi] [out1] main:1: error: Module 'ntcrash' has no attribute 'nope' [out2] @@ -1844,6 +1848,7 @@ class C: class D: def f(self) -> None: A = NamedTuple('A', [('x', int), ('y', int)]) +[builtins fixtures/tuple.pyi] [out1] main:1: error: Module 'ntcrash' has no attribute 'nope' [out2] @@ -2409,6 +2414,7 @@ class Pair(NamedTuple): last: str Person(name=Pair(first="John", last="Doe")) +[builtins fixtures/tuple.pyi] [out] [case testNoCrashForwardRefToBrokenDoubleNewTypeIncremental] @@ -2463,6 +2469,7 @@ class N(NamedTuple): x: NT = N(1) # type: ignore x = NT(N(1)) +[builtins fixtures/tuple.pyi] [out] [case testNewTypeFromForwardTypedDictIncremental] @@ -2533,6 +2540,7 @@ A = Union[B, int] # type: ignore B = Callable[[C], int] # type: ignore class C(NamedTuple): # type: ignore x: A +[builtins fixtures/tuple.pyi] [out] [case testGenericTypeAliasesForwardAnyIncremental1] @@ -2593,6 +2601,7 @@ yg: G[M] z: int = G[M]().x.x z = G[M]().x[0] M = NamedTuple('M', [('x', int)]) +[builtins fixtures/tuple.pyi] [out] [case testSelfRefNTIncremental1] @@ -3650,6 +3659,7 @@ cache_fine_grained = False cache_fine_grained = True [rechecked a, builtins, typing] [stale a, builtins, typing] +[builtins fixtures/tuple.pyi] [case testIncrementalPackageNameOverload] # cmd: mypy -m main a @@ -3696,6 +3706,7 @@ import b -- Every file should get reloaded, since the cache was invalidated [stale a, b, builtins, typing] [rechecked a, b, builtins, typing] +[builtins fixtures/tuple.pyi] [case testIncrementalBustedFineGrainedCache2] # flags2: --cache-fine-grained @@ -3708,6 +3719,7 @@ import b -- Every file should get reloaded, since the settings changed [stale a, b, builtins, typing] [rechecked a, b, builtins, typing] +[builtins fixtures/tuple.pyi] [case testIncrementalBustedFineGrainedCache3] # flags: --cache-fine-grained --no-sqlite-cache @@ -3723,6 +3735,7 @@ import b -- Every file should get reloaded, since the cache was invalidated [stale a, b, builtins, typing] [rechecked a, b, builtins, typing] +[builtins fixtures/tuple.pyi] [case testIncrementalWorkingFineGrainedCache] # flags: --cache-fine-grained @@ -4987,6 +5000,7 @@ a = 1 [file mod.py.2] from typing_extensions import Literal a: Literal[2] = 2 +[builtins fixtures/tuple.pyi] [out] main:2: note: Revealed type is 'builtins.int' [out2] @@ -5019,6 +5033,7 @@ reveal_type(x) [file b.py] from typing import NamedTuple NT = NamedTuple('BadName', [('x', int)]) +[builtins fixtures/tuple.pyi] [out] [out2] tmp/a.py:3: note: Revealed type is 'Tuple[builtins.int, fallback=b.BadName@2]' @@ -5058,6 +5073,7 @@ class C: def __init__(self) -> None: self.h: Hidden Hidden = NamedTuple('Hidden', [('x', int)]) +[builtins fixtures/tuple.pyi] [out] [out2] tmp/a.py:3: note: Revealed type is 'Tuple[builtins.int, fallback=b.C.Hidden@5]' diff --git a/test-data/unit/check-inference-context.test b/test-data/unit/check-inference-context.test index dfb56e79b056..bddf254c2721 100644 --- a/test-data/unit/check-inference-context.test +++ b/test-data/unit/check-inference-context.test @@ -649,6 +649,7 @@ reveal_type(lambda x: 1) # N: Revealed type is 'def (x: Any) -> Literal[1]?' from typing import Callable def f(t: Callable[[str], str]) -> str: '' f(lambda *_: '') +[builtins fixtures/tuple.pyi] [case testInvalidContextForLambda] from typing import Callable diff --git a/test-data/unit/check-inference.test b/test-data/unit/check-inference.test index d482e90e2fa4..e762de9be3e6 100644 --- a/test-data/unit/check-inference.test +++ b/test-data/unit/check-inference.test @@ -195,6 +195,7 @@ def f() -> None: class A: pass class B: pass +[builtins fixtures/tuple.pyi] [out] [case testInferringLvarTypesInNestedTupleAssignment1] @@ -212,6 +213,7 @@ def f() -> None: class A: pass class B: pass +[builtins fixtures/tuple.pyi] [out] [case testInferringLvarTypesInNestedTupleAssignment2] @@ -666,6 +668,7 @@ def f(a: T, b: T) -> T: pass class A: pass class B(A, I, J): pass class C(A, I, J): pass +[builtins fixtures/tuple.pyi] -- Generic function inference with function arguments @@ -1143,6 +1146,7 @@ def f(x: AnyStr) -> Tuple[AnyStr]: pass x = None (x,) = f('') reveal_type(x) # N: Revealed type is 'builtins.str' +[builtins fixtures/tuple.pyi] -- Inferring attribute types @@ -2174,6 +2178,7 @@ def f(): pass def g(x: Union[int, str]): pass c = a if f() else b g(c) # E: Argument 1 to "g" has incompatible type "Union[int, str, Tuple[Any, ...]]"; expected "Union[int, str]" +[builtins fixtures/tuple.pyi] [case testUnificationMultipleInheritance] class A: pass @@ -2291,6 +2296,7 @@ def f(x: T) -> Tuple[T]: x = None (x,) = f('') reveal_type(x) # N: Revealed type is 'builtins.str' +[builtins fixtures/tuple.pyi] [out] [case testNoCrashOnPartialVariable2] @@ -2302,6 +2308,7 @@ def f() -> Tuple[T]: x = None if int(): (x,) = f() +[builtins fixtures/tuple.pyi] [out] [case testNoCrashOnPartialVariable3] @@ -2313,6 +2320,7 @@ def f(x: T) -> Tuple[T, T]: x = None (x, x) = f('') reveal_type(x) # N: Revealed type is 'builtins.str' +[builtins fixtures/tuple.pyi] [out] [case testInferenceNestedTuplesFromGenericIterable] @@ -2688,6 +2696,7 @@ def bar() -> None: _, _ = t _ = 0 _ = '' +[builtins fixtures/tuple.pyi] [case testUnusedTargetMultipleTargets] def foo() -> None: @@ -2766,6 +2775,7 @@ def f() -> None: with C() as _: pass _ = 0 _ = '' +[builtins fixtures/tuple.pyi] [case testUnusedTargetNotExceptClause] # Things don't work for except clauses. @@ -2967,6 +2977,7 @@ def f() -> None: class C: def __init__(self, a: int) -> None: self.a = a +[builtins fixtures/tuple.pyi] [case testUnionGenericWithBoundedVariable] from typing import Generic, TypeVar, Union diff --git a/test-data/unit/check-isinstance.test b/test-data/unit/check-isinstance.test index 9c78bb382cc8..09c174a5d41a 100644 --- a/test-data/unit/check-isinstance.test +++ b/test-data/unit/check-isinstance.test @@ -5,6 +5,7 @@ def f(): x, y # Prevent independent redefinition y = x # E: Incompatible types in assignment (expression has type "object", variable has type "int") x = 2 y = x +[builtins fixtures/tuple.pyi] [case testJoinAny] from typing import List, Any diff --git a/test-data/unit/check-kwargs.test b/test-data/unit/check-kwargs.test index 1574bb849e0a..1dd450caae1b 100644 --- a/test-data/unit/check-kwargs.test +++ b/test-data/unit/check-kwargs.test @@ -119,17 +119,20 @@ f(otter=x) # E: Unexpected keyword argument "otter" for "f"; did you mean "btter def f(other: 'A', *atter: 'A') -> None: pass # N: "f" defined here f(otter=A()) # E: Unexpected keyword argument "otter" for "f"; did you mean "other"? class A: pass +[builtins fixtures/tuple.pyi] [case testKeywordMisspellingOnlyVarArgs] def f(*other: 'A') -> None: pass # N: "f" defined here f(otter=A()) # E: Unexpected keyword argument "otter" for "f" class A: pass +[builtins fixtures/tuple.pyi] [case testKeywordMisspellingVarArgsDifferentTypes] def f(other: 'B', *atter: 'A') -> None: pass # N: "f" defined here f(otter=A()) # E: Unexpected keyword argument "otter" for "f"; did you mean "other"? class A: pass class B: pass +[builtins fixtures/tuple.pyi] [case testKeywordMisspellingVarKwargs] def f(other: 'A', **atter: 'A') -> None: pass diff --git a/test-data/unit/check-literal.test b/test-data/unit/check-literal.test index 96a08f344c45..773a2e36f6a0 100644 --- a/test-data/unit/check-literal.test +++ b/test-data/unit/check-literal.test @@ -14,6 +14,7 @@ def f2(x: 'A B') -> None: pass # E: Invalid type comment or annotation def g2(x: Literal['A B']) -> None: pass reveal_type(f2) # N: Revealed type is 'def (x: Any)' reveal_type(g2) # N: Revealed type is 'def (x: Literal['A B'])' +[builtins fixtures/tuple.pyi] [out] [case testLiteralInvalidTypeComment] @@ -34,6 +35,7 @@ def g(x): reveal_type(f) # N: Revealed type is 'def (x: Any)' reveal_type(g) # N: Revealed type is 'def (x: Literal['A['])' +[builtins fixtures/tuple.pyi] [out] [case testLiteralFromTypingWorks] @@ -78,6 +80,7 @@ def bar(x: Tuple[Literal[2]]) -> None: ... reveal_type(x) # N: Revealed type is 'Tuple[Any]' reveal_type(y) # N: Revealed type is 'Tuple[Literal[2]]' reveal_type(bar) # N: Revealed type is 'def (x: Tuple[Literal[2]])' +[builtins fixtures/tuple.pyi] [out] [case testLiteralInsideOtherTypesPython2] @@ -116,6 +119,7 @@ def bar(x): reveal_type(x) # N: Revealed type is 'Union[Tuple[Any], None]' reveal_type(y) # N: Revealed type is 'Union[Tuple[Literal[2]], None]' reveal_type(bar) # N: Revealed type is 'def (x: Tuple[Literal[2]])' +[builtins fixtures/tuple.pyi] [out] [case testLiteralValidExpressionsInStringsPython3] @@ -286,6 +290,7 @@ accepts_bytes(c_hint) accepts_bytes(a_alias) # E: Argument 1 to "accepts_bytes" has incompatible type "Literal['foo']"; expected "Literal[b'foo']" accepts_bytes(b_alias) # E: Argument 1 to "accepts_bytes" has incompatible type "Literal['foo']"; expected "Literal[b'foo']" accepts_bytes(c_alias) +[builtins fixtures/tuple.pyi] [out] [case testLiteralMixingUnicodeAndBytesPython2] @@ -461,6 +466,7 @@ reveal_type(c_str_wrapper_alias) # N: Revealed type is '__main__.Wrap[Liter reveal_type(a_bytes_wrapper_alias) # N: Revealed type is '__main__.Wrap[Literal['foo']]' reveal_type(b_bytes_wrapper_alias) # N: Revealed type is '__main__.Wrap[Literal['foo']]' reveal_type(c_bytes_wrapper_alias) # N: Revealed type is '__main__.Wrap[Literal[b'foo']]' +[builtins fixtures/tuple.pyi] [out] [case testLiteralMixingUnicodeAndBytesPython2ForwardStrings] @@ -657,6 +663,7 @@ a1 = c2 # E: Incompatible types in assignment (expression has type "Literal['¬ a1 = a3 a1 = b3 a1 = c3 # E: Incompatible types in assignment (expression has type "Literal['¬b ∧ λ(p)']", variable has type "Literal['\x00¬b ∧ λ(p)']") +[builtins fixtures/tuple.pyi] [out skip-path-normalization] @@ -668,6 +675,7 @@ reveal_type(x) # N: Revealed type is 'Literal[3]' y: Foo["hello"] reveal_type(y) # N: Revealed type is 'Literal['hello']' +[builtins fixtures/tuple.pyi] [out] [case testLiteralRenamingImportViaAnotherImportWorks] @@ -682,6 +690,7 @@ reveal_type(y) # N: Revealed type is 'Literal[4]' [file other_module.py] from typing_extensions import Literal as Foo Bar = Foo[4] +[builtins fixtures/tuple.pyi] [out] [case testLiteralRenamingImportNameConfusion] @@ -691,6 +700,7 @@ x: Foo["Foo"] reveal_type(x) # N: Revealed type is 'Literal['Foo']' y: Foo[Foo] # E: Literal[...] must have at least one parameter +[builtins fixtures/tuple.pyi] [out] [case testLiteralBadRawExpressionWithBadType] @@ -741,6 +751,7 @@ def f3(x: Literal[-300]) -> Literal[-300]: pass reveal_type(f1) # N: Revealed type is 'def (x: Literal[4]) -> Literal[4]' reveal_type(f2) # N: Revealed type is 'def (x: Literal[42]) -> Literal[42]' reveal_type(f3) # N: Revealed type is 'def (x: Literal[-300]) -> Literal[-300]' +[builtins fixtures/tuple.pyi] [out] [case testLiteralBasicBoolUsage] @@ -794,6 +805,7 @@ reveal_type(f2) # N: Revealed type is 'def (x: Literal[' foo bar ']) -> Liter reveal_type(f3) # N: Revealed type is 'def (x: Literal[' foo bar ']) -> Literal[' foo bar ']' reveal_type(f4) # N: Revealed type is 'def (x: Literal['foo']) -> Literal['foo']' reveal_type(f5) # N: Revealed type is 'def (x: Literal['foo']) -> Literal['foo']' +[builtins fixtures/tuple.pyi] [out] [case testLiteralBasicStrUsageSlashes] @@ -804,6 +816,7 @@ b: Literal["foo\nbar"] reveal_type(a) reveal_type(b) +[builtins fixtures/tuple.pyi] [out skip-path-normalization] main:6: note: Revealed type is 'Literal['foo\\nbar']' main:7: note: Revealed type is 'Literal['foo\nbar']' @@ -821,6 +834,7 @@ def f3(x: Literal[None]) -> Literal[None]: pass reveal_type(f1) # N: Revealed type is 'def (x: None)' reveal_type(f2) # N: Revealed type is 'def (x: None)' reveal_type(f3) # N: Revealed type is 'def (x: None)' +[builtins fixtures/tuple.pyi] [out] [case testLiteralCallingUnionFunction] @@ -846,6 +860,7 @@ func(c) func(d) func(e) func(f) # E: Argument 1 to "func" has incompatible type "Union[Literal['foo'], Literal['bar'], Literal['baz']]"; expected "Union[Literal['foo'], Literal['bar'], Literal[' foo ']]" +[builtins fixtures/tuple.pyi] [out] [case testLiteralDisallowAny] @@ -859,6 +874,7 @@ b: Literal[BadAlias] # E: Parameter 1 of Literal[...] cannot reveal_type(a) # N: Revealed type is 'Any' reveal_type(b) # N: Revealed type is 'Any' +[builtins fixtures/tuple.pyi] [out] [case testLiteralDisallowActualTypes] @@ -906,6 +922,7 @@ b: Literal[" foo ".trim()] # E: Invalid type: Literal[...] cannot contain a c: Literal[+42] # E: Invalid type: Literal[...] cannot contain arbitrary expressions d: Literal[~12] # E: Invalid type: Literal[...] cannot contain arbitrary expressions e: Literal[dummy()] # E: Invalid type: Literal[...] cannot contain arbitrary expressions +[builtins fixtures/tuple.pyi] [out] [case testLiteralDisallowCollections] @@ -914,6 +931,7 @@ a: Literal[{"a": 1, "b": 2}] # E: Invalid type: Literal[...] cannot contain a b: Literal[{1, 2, 3}] # E: Invalid type: Literal[...] cannot contain arbitrary expressions c: {"a": 1, "b": 2} # E: Invalid type comment or annotation d: {1, 2, 3} # E: Invalid type comment or annotation +[builtins fixtures/tuple.pyi] [case testLiteralDisallowCollections2] @@ -923,6 +941,7 @@ a: (1, 2, 3) # E: Syntax error in type annotation \ b: Literal[[1, 2, 3]] # E: Parameter 1 of Literal[...] is invalid c: [1, 2, 3] # E: Bracketed expression "[...]" is not valid as a type \ # N: Did you mean "List[...]"? +[builtins fixtures/tuple.pyi] [out] [case testLiteralDisallowCollectionsTypeAlias] @@ -955,6 +974,7 @@ at = Literal[T] # E: Parameter 1 of Literal[...] is invalid a: at def foo(b: Literal[T]) -> T: pass # E: Parameter 1 of Literal[...] is invalid +[builtins fixtures/tuple.pyi] [out] @@ -989,6 +1009,7 @@ a: Literal[1, 2, 3] b: Literal[(1, 2, 3)] reveal_type(a) # N: Revealed type is 'Union[Literal[1], Literal[2], Literal[3]]' reveal_type(b) # N: Revealed type is 'Union[Literal[1], Literal[2], Literal[3]]' +[builtins fixtures/tuple.pyi] [out] [case testLiteralNestedUsage] @@ -1010,6 +1031,7 @@ basic_mode = Literal["r", "w", "a"] basic_with_plus = Literal["r+", "w+", "a+"] combined: Literal[basic_mode, basic_with_plus] reveal_type(combined) # N: Revealed type is 'Union[Literal['r'], Literal['w'], Literal['a'], Literal['r+'], Literal['w+'], Literal['a+']]' +[builtins fixtures/tuple.pyi] [out] [case testLiteralBiasTowardsAssumingForwardReference] @@ -1027,6 +1049,7 @@ d: "Literal['Foo']" reveal_type(d) # N: Revealed type is 'Literal['Foo']' class Foo: pass +[builtins fixtures/tuple.pyi] [out] [case testLiteralBiasTowardsAssumingForwardReferenceForTypeAliases] @@ -1048,6 +1071,7 @@ e: Literal[Foo, 'Foo'] reveal_type(e) # N: Revealed type is 'Union[Literal[5], Literal['Foo']]' Foo = Literal[5] +[builtins fixtures/tuple.pyi] [out] [case testLiteralBiasTowardsAssumingForwardReferencesForTypeComments] @@ -1065,6 +1089,7 @@ reveal_type(c) # N: Revealed type is 'Literal['Foo']' d = None # type: Literal[Foo] # E: Parameter 1 of Literal[...] is invalid class Foo: pass +[builtins fixtures/tuple.pyi] [out] @@ -1083,6 +1108,7 @@ c: int foo(a) # E: Argument 1 to "foo" has incompatible type "Literal[1]"; expected "Literal[3]" foo(b) # E: Argument 1 to "foo" has incompatible type "Literal[2]"; expected "Literal[3]" foo(c) # E: Argument 1 to "foo" has incompatible type "int"; expected "Literal[3]" +[builtins fixtures/tuple.pyi] [out] [case testLiteralCallingFunctionWithUnionLiteral] @@ -1098,6 +1124,7 @@ foo(a) foo(b) foo(c) # E: Argument 1 to "foo" has incompatible type "Union[Literal[4], Literal[5]]"; expected "Union[Literal[1], Literal[2], Literal[3]]" foo(d) # E: Argument 1 to "foo" has incompatible type "int"; expected "Union[Literal[1], Literal[2], Literal[3]]" +[builtins fixtures/tuple.pyi] [out] [case testLiteralCallingFunctionWithStandardBase] @@ -1111,6 +1138,7 @@ c: Literal[4, 'foo'] foo(a) foo(b) foo(c) # E: Argument 1 to "foo" has incompatible type "Union[Literal[4], Literal['foo']]"; expected "int" +[builtins fixtures/tuple.pyi] [out] [case testLiteralCheckSubtypingStrictOptional] @@ -1136,6 +1164,7 @@ fc(lit) # E: Argument 1 to "fc" has incompatible type "Literal[1]"; expected "N f_lit(a) f_lit(b) f_lit(c) # E: Argument 1 to "f_lit" has incompatible type "None"; expected "Literal[1]" +[builtins fixtures/tuple.pyi] [out] [case testLiteralCheckSubtypingNoStrictOptional] @@ -1161,6 +1190,7 @@ fc(lit) # E: Argument 1 to "fc" has incompatible type "Literal[1]"; expected "N f_lit(a) f_lit(b) f_lit(c) +[builtins fixtures/tuple.pyi] [out] [case testLiteralCallingOverloadedFunction] @@ -1225,6 +1255,7 @@ c2: Contravariant[Literal[1, 2]] c3: Contravariant[Literal[1, 2, 3]] c2 = c1 # E: Incompatible types in assignment (expression has type "Contravariant[Literal[1]]", variable has type "Contravariant[Union[Literal[1], Literal[2]]]") c2 = c3 +[builtins fixtures/tuple.pyi] [out] [case testLiteralInListAndSequence] @@ -1264,6 +1295,7 @@ from typing_extensions import Literal Bar1 = Literal[15] Bar2 = Literal[14] c: Literal[15] +[builtins fixtures/tuple.pyi] -- @@ -1423,6 +1455,7 @@ def f4(x: Literal[1]) -> Literal[1]: def f5(x: Literal[2]) -> Literal[1]: return x # E: Incompatible return value type (got "Literal[2]", expected "Literal[1]") +[builtins fixtures/tuple.pyi] [out] @@ -1524,6 +1557,7 @@ reveal_type(func(b)) # N: Revealed type is 'builtins.int' # with the output we would have gotten if we replaced int and the # Literal types here with regular classes/subclasses. reveal_type(func(c)) # N: Revealed type is 'builtins.object' +[builtins fixtures/tuple.pyi] [out] [case testLiteralOverloadProhibitUnsafeOverlaps] @@ -1548,6 +1582,7 @@ def func3(x: Literal['a']) -> Literal[2]: ... @overload def func3(x: str) -> int: ... def func3(x): pass +[builtins fixtures/tuple.pyi] [out] [case testLiteralInferredInOverloadContextUnionMath] @@ -1592,6 +1627,7 @@ reveal_type(func(f)) # E: No overload variant of "func" matches argument type " # N: def func(x: Union[Literal[3], Literal[4], Literal[5], Literal[6]]) -> B \ # N: def func(x: Literal['foo']) -> C \ # N: Revealed type is 'Any' +[builtins fixtures/tuple.pyi] [out] [case testLiteralInferredInOverloadContextUnionMathOverloadingReturnsBestType] @@ -1615,6 +1651,7 @@ reveal_type(f(2)) # N: Revealed type is 'builtins.int' reveal_type(f(y)) # N: Revealed type is 'builtins.object' reveal_type(f(z)) # N: Revealed type is 'builtins.int' \ # E: Argument 1 to "f" has incompatible type "Union[Literal[1], Literal[2], Literal['three']]"; expected "Union[Literal[1], Literal[2]]" +[builtins fixtures/tuple.pyi] [out] [case testLiteralInferredInOverloadContextWithTypevars] @@ -1665,6 +1702,7 @@ reveal_type(f4("foo")) # N: Revealed type is 'builtins.str' # Note: first overload is selected and prevents the typevar from # ever inferring a Literal["something"]. reveal_type(f4(b)) # N: Revealed type is 'builtins.str' +[builtins fixtures/tuple.pyi] [out] [case testLiteralInferredInOverloadContextUnionMathTrickyOverload] @@ -1683,6 +1721,7 @@ x: Literal['a', 'b'] y: Literal['a', 'b'] f(x, y) # E: Argument 1 to "f" has incompatible type "Union[Literal['a'], Literal['b']]"; expected "Literal['a']" \ # E: Argument 2 to "f" has incompatible type "Union[Literal['a'], Literal['b']]"; expected "Literal['a']" \ +[builtins fixtures/tuple.pyi] [out] @@ -1793,6 +1832,7 @@ class Bad1(Literal[3]): pass # E: Invalid base class "Literal" class Bad2(Renamed[3]): pass # E: Invalid base class "Renamed" class Bad3(indirect.Literal[3]): pass # E: Invalid base class "indirect.Literal" class Bad4(Alias): pass # E: Invalid base class "Alias" +[builtins fixtures/tuple.pyi] [out] [case testLiteralErrorsWhenInvoked-skip] @@ -1850,6 +1890,7 @@ expects_literal(foo(foo(5))) # E: Argument 1 to "foo" has incompatible type " expects_int(a) expects_int(foo(a)) expects_int(foo(foo(a))) +[builtins fixtures/tuple.pyi] [out] [case testLiteralAndGenericWithUnion] @@ -1861,6 +1902,7 @@ def identity(x: T) -> T: return x a: Union[int, Literal['foo']] = identity('foo') b: Union[int, Literal['foo']] = identity('bar') # E: Argument 1 to "identity" has incompatible type "Literal['bar']"; expected "Union[int, Literal['foo']]" +[builtins fixtures/tuple.pyi] [out] [case testLiteralAndGenericsNoMatch] @@ -1914,6 +1956,7 @@ expects_literal(Wrapper(5).inner()) # E: Argument 1 to "expects_literal" has in expects_literal_wrapper(Wrapper(a)) expects_literal_wrapper(Wrapper(3)) expects_literal_wrapper(Wrapper(5)) # E: Argument 1 to "Wrapper" has incompatible type "Literal[5]"; expected "Literal[3]" +[builtins fixtures/tuple.pyi] [out] [case testLiteralAndGenericsRespectsUpperBound] @@ -1953,6 +1996,7 @@ reveal_type(func2(a)) # N: Revealed type is 'Literal[3]' reveal_type(func2(4)) # N: Revealed type is 'builtins.int*' reveal_type(func2(b)) # N: Revealed type is 'Literal[4]' reveal_type(func2(c)) # N: Revealed type is 'builtins.int*' +[builtins fixtures/tuple.pyi] [out] [case testLiteralAndGenericsRespectsValueRestriction] @@ -2009,6 +2053,7 @@ reveal_type(func2("foo")) # N: Revealed type is 'builtins.str*' reveal_type(func2(s1)) # N: Revealed type is 'builtins.str*' reveal_type(func2("bar")) # N: Revealed type is 'builtins.str*' reveal_type(func2(s2)) # N: Revealed type is 'builtins.str*' +[builtins fixtures/tuple.pyi] [out] [case testLiteralAndGenericsWithOverloads] @@ -2031,6 +2076,7 @@ reveal_type(func1(identity(4))) # N: Revealed type is 'Literal[19]' reveal_type(func1(identity(5))) # N: Revealed type is 'builtins.int' reveal_type(func1(identity(a))) # N: Revealed type is 'Literal[19]' reveal_type(func1(identity(b))) # N: Revealed type is 'builtins.int' +[builtins fixtures/tuple.pyi] -- -- Interactions with meets @@ -2694,6 +2740,7 @@ force_bytes(reveal_type(a)) # E: Argument 1 to "force_bytes" has incompatible force_bytes(reveal_type(b)) # E: Argument 1 to "force_bytes" has incompatible type "Literal['foo']"; expected "Literal[b'foo']" \ # N: Revealed type is 'Literal['foo']' force_bytes(reveal_type(c)) # N: Revealed type is 'Literal[b'foo']' +[builtins fixtures/tuple.pyi] [out] [case testLiteralFinalStringTypesPython2UnicodeLiterals] @@ -2804,6 +2851,7 @@ over_int(reveal_type(w3)) # E: Argument 1 to "over_int" ha over_literal(reveal_type(w3)) # N: Revealed type is '__main__.WrapperClass[Literal[99]]' over_int(reveal_type(WrapperClass(var3))) # N: Revealed type is '__main__.WrapperClass[builtins.int]' over_literal(reveal_type(WrapperClass(var3))) # N: Revealed type is '__main__.WrapperClass[Literal[99]]' +[builtins fixtures/tuple.pyi] [out] [case testLiteralFinalUsedInLiteralType] @@ -2827,6 +2875,7 @@ c_wrap: Literal[4, c] # E: Parameter 2 of Literal[...] is invalid \ # E: Variable "__main__.c" is not valid as a type d_wrap: Literal[4, d] # E: Parameter 2 of Literal[...] is invalid \ # E: Variable "__main__.d" is not valid as a type +[builtins fixtures/tuple.pyi] [out] [case testLiteralWithFinalPropagation] @@ -2840,6 +2889,7 @@ def expect_3(x: Literal[3]) -> None: pass expect_3(a) expect_3(b) expect_3(c) # E: Argument 1 to "expect_3" has incompatible type "int"; expected "Literal[3]" +[builtins fixtures/tuple.pyi] [out] [case testLiteralWithFinalPropagationIsNotLeaking] @@ -2907,6 +2957,7 @@ expects_red(b) # E: Argument 1 to "expects_red" has incompatible type "Literal[ reveal_type(expects_red) # N: Revealed type is 'def (x: Literal[__main__.Color.RED])' reveal_type(r) # N: Revealed type is 'Literal[__main__.Color.RED]' reveal_type(r.func()) # N: Revealed type is 'builtins.int' +[builtins fixtures/tuple.pyi] [out] [case testLiteralWithEnumsDefinedInClass] @@ -2929,6 +2980,7 @@ foo(g) # E: Argument 1 to "foo" has incompatible type "Literal[Color.GREEN]"; e reveal_type(foo) # N: Revealed type is 'def (x: Literal[__main__.Wrapper.Color.RED])' reveal_type(r) # N: Revealed type is 'Literal[__main__.Wrapper.Color.RED]' +[builtins fixtures/tuple.pyi] [out] [case testLiteralWithEnumsSimilarDefinitions] @@ -2963,6 +3015,7 @@ from enum import Enum class Test(Enum): FOO = 1 BAR = 2 +[builtins fixtures/tuple.pyi] [out] [case testLiteralWithEnumsDeclaredUsingCallSyntax] @@ -3013,6 +3066,7 @@ expects_int(a) # E: Argument 1 to "expects_int" has incompatible type "Literal[ expects_int(b) expects_int(c) expects_int(d) # E: Argument 1 to "expects_int" has incompatible type "Literal[D.FOO]"; expected "int" +[builtins fixtures/tuple.pyi] [out] [case testLiteralWithEnumsAliases] @@ -3027,6 +3081,7 @@ Alias = Test x: Literal[Alias.FOO] reveal_type(x) # N: Revealed type is 'Literal[__main__.Test.FOO]' +[builtins fixtures/tuple.pyi] [out] [case testLiteralUsingEnumAttributesInLiteralContexts] @@ -3060,6 +3115,7 @@ var2 = Test2.FOO final2: Final = Test2.FOO expects_test2_foo(var2) # E: Argument 1 to "expects_test2_foo" has incompatible type "Test2"; expected "Literal[Test2.FOO]" expects_test2_foo(final2) +[builtins fixtures/tuple.pyi] [out] [case testLiteralUsingEnumAttributeNamesInLiteralContexts] @@ -3093,6 +3149,7 @@ reveal_type(Test2.FOO.name) # N: Revealed type is 'Literal['FOO']?' reveal_type(Test3.FOO.name) # N: Revealed type is 'Literal['FOO']?' reveal_type(Test4.FOO.name) # N: Revealed type is 'Literal['FOO']?' reveal_type(Test5.FOO.name) # N: Revealed type is 'Literal['FOO']?' +[builtins fixtures/tuple.pyi] [out] [case testLiteralBinderLastValueErased] @@ -3159,3 +3216,4 @@ x: Literal[Foo.A] y: Literal[F.A] reveal_type(x) # N: Revealed type is 'Literal[__main__.Foo.A]' reveal_type(y) # N: Revealed type is 'Literal[__main__.Foo.A]' +[builtins fixtures/tuple.pyi] diff --git a/test-data/unit/check-modules.test b/test-data/unit/check-modules.test index bb14c1c007ba..b7f7c9c47036 100644 --- a/test-data/unit/check-modules.test +++ b/test-data/unit/check-modules.test @@ -2473,6 +2473,7 @@ x = m.One(name="Foo") reveal_type(x.name) class Two: pass +[builtins fixtures/tuple.pyi] [out] tmp/m/two.py:3: note: Revealed type is 'builtins.str' @@ -2490,6 +2491,7 @@ x = m.One(name="Foo") reveal_type(x.name) class Two: pass +[builtins fixtures/tuple.pyi] [out] tmp/m/two.py:3: note: Revealed type is 'builtins.str' @@ -2807,3 +2809,4 @@ CustomDict = TypedDict( }, ) [typing fixtures/typing-full.pyi] +[builtins fixtures/tuple.pyi] diff --git a/test-data/unit/check-multiple-inheritance.test b/test-data/unit/check-multiple-inheritance.test index 42e7b13ef34b..2605b8a1d340 100644 --- a/test-data/unit/check-multiple-inheritance.test +++ b/test-data/unit/check-multiple-inheritance.test @@ -644,6 +644,7 @@ class OrderedItemsView(ItemsView[K, V], Sequence[Tuple[K, V]]): class OrderedItemsViewDirect(ItemsView[K, V], Sequence[Tuple[K, V]]): pass +[builtins fixtures/tuple.pyi] [case testGenericMultipleOverrideReplace] from typing import TypeVar, Generic, Union diff --git a/test-data/unit/check-namedtuple.test b/test-data/unit/check-namedtuple.test index e19589fba35d..41dd49cd7626 100644 --- a/test-data/unit/check-namedtuple.test +++ b/test-data/unit/check-namedtuple.test @@ -8,6 +8,7 @@ b = x[0] a = x[1] a, b, c = x # E: Need more than 2 values to unpack (3 expected) x[2] # E: Tuple index out of range +[builtins fixtures/tuple.pyi] [case testNamedTupleWithTupleFieldNamesUsedAsTuple] from collections import namedtuple @@ -19,6 +20,7 @@ b = x[0] a = x[1] a, b, c = x # E: Need more than 2 values to unpack (3 expected) x[2] # E: Tuple index out of range +[builtins fixtures/tuple.pyi] [case testNamedTupleUnicode_python2] from __future__ import unicode_literals @@ -34,6 +36,7 @@ X = namedtuple('X', ('x', 'y')) # type: ignore from collections import namedtuple X = namedtuple('X', 'x, _y, _z') # E: namedtuple() field names cannot start with an underscore: _y, _z +[builtins fixtures/tuple.pyi] [case testNamedTupleAccessingAttributes] from collections import namedtuple @@ -43,6 +46,7 @@ x = None # type: X x.x x.y x.z # E: "X" has no attribute "z" +[builtins fixtures/tuple.pyi] [case testNamedTupleClassPython35] # flags: --python-version 3.5 @@ -50,6 +54,7 @@ from typing import NamedTuple class A(NamedTuple): x = 3 # type: int +[builtins fixtures/tuple.pyi] [out] main:4: error: NamedTuple class syntax is only supported in Python 3.6 @@ -62,6 +67,7 @@ from typing import NamedTuple class A(NamedTuple): x: int +[builtins fixtures/tuple.pyi] [case testNamedTupleAttributesAreReadOnly] from collections import namedtuple @@ -77,6 +83,7 @@ a = None # type: A a.x = 5 # E: Property "x" defined in "X" is read-only a.y = 5 # E: Property "y" defined in "X" is read-only -- a.z = 5 # not supported yet +[builtins fixtures/tuple.pyi] [case testTypingNamedTupleAttributesAreReadOnly] @@ -91,6 +98,7 @@ class A(NamedTuple): a: HasX = A("foo") a.x = "bar" +[builtins fixtures/tuple.pyi] [out] main:10: error: Incompatible types in assignment (expression has type "A", variable has type "HasX") main:10: note: Protocol member HasX.x expected settable variable, got read-only attribute @@ -105,6 +113,7 @@ x.x x.z # E: "X" has no attribute "z" x = X(1) # E: Too few arguments for "X" x = X(1, 2, 3) # E: Too many arguments for "X" +[builtins fixtures/tuple.pyi] [case testCreateNamedTupleWithKeywordArguments] from collections import namedtuple @@ -114,6 +123,7 @@ x = X(x=1, y='x') x = X(1, y='x') x = X(x=1, z=1) # E: Unexpected keyword argument "z" for "X" x = X(y=1) # E: Missing positional argument "x" in call to "X" +[builtins fixtures/tuple.pyi] [case testNamedTupleCreateAndUseAsTuple] from collections import namedtuple @@ -122,6 +132,7 @@ X = namedtuple('X', 'x y') x = X(1, 'x') a, b = x a, b, c = x # E: Need more than 2 values to unpack (3 expected) +[builtins fixtures/tuple.pyi] [case testNamedTupleAdditionalArgs] from collections import namedtuple @@ -169,6 +180,7 @@ x, y = n if int(): x = y # E: Incompatible types in assignment (expression has type "str", variable has type "int") [targets __main__, __main__.N.__new__, __main__.N._asdict, __main__.N._make, __main__.N._replace] +[builtins fixtures/tuple.pyi] [case testNamedTupleWithTupleFieldNamesWithItemTypes] @@ -183,6 +195,7 @@ i = n.b # type: int # E: Incompatible types in assignment (expression has type x, y = n if int(): x = y # E: Incompatible types in assignment (expression has type "str", variable has type "int") +[builtins fixtures/tuple.pyi] [case testNamedTupleConstructorArgumentTypes] @@ -193,6 +206,7 @@ n = N('x', 'x') # E: Argument 1 to "N" has incompatible type "str"; expected "in n = N(1, b=2) # E: Argument "b" to "N" has incompatible type "int"; expected "str" N(1, 'x') N(b='x', a=1) +[builtins fixtures/tuple.pyi] [case testNamedTupleAsBaseClass] from typing import NamedTuple @@ -209,6 +223,7 @@ if int(): i, s = x if int(): s, s = x # E: Incompatible types in assignment (expression has type "int", variable has type "str") +[builtins fixtures/tuple.pyi] [case testNamedTupleAsBaseClass2] from typing import NamedTuple @@ -224,6 +239,7 @@ if int(): i, s = x if int(): s, s = x # E: Incompatible types in assignment (expression has type "int", variable has type "str") +[builtins fixtures/tuple.pyi] [case testNamedTuplesTwoAsBaseClasses] @@ -232,6 +248,7 @@ A = NamedTuple('A', [('a', int)]) B = NamedTuple('B', [('a', int)]) class X(A, B): # E: Class has two incompatible bases derived from tuple pass +[builtins fixtures/tuple.pyi] [case testNamedTuplesTwoAsBaseClasses2] @@ -239,6 +256,7 @@ from typing import NamedTuple A = NamedTuple('A', [('a', int)]) class X(A, NamedTuple('B', [('a', int)])): # E: Class has two incompatible bases derived from tuple pass +[builtins fixtures/tuple.pyi] [case testNamedTupleSelfTypeWithNamedTupleAsBase] @@ -254,6 +272,7 @@ class B(A): i, s = self i, i = self # E: Incompatible types in assignment (expression has type "str", \ variable has type "int") +[builtins fixtures/tuple.pyi] [out] @@ -273,6 +292,7 @@ class B(A): variable has type "str") i, i = self # E: Incompatible types in assignment (expression has type "str", \ variable has type "int") +[builtins fixtures/tuple.pyi] [out] @@ -297,6 +317,7 @@ if int(): t = b if int(): a = b +[builtins fixtures/tuple.pyi] [case testNamedTupleSimpleTypeInference] @@ -326,6 +347,7 @@ MyNamedTuple.x # E: "Type[MyNamedTuple]" has no attribute "x" [case testNamedTupleEmptyItems] from typing import NamedTuple A = NamedTuple('A', []) +[builtins fixtures/tuple.pyi] [case testNamedTupleProperty] @@ -383,6 +405,7 @@ x = None # type: X reveal_type(x._replace()) # N: Revealed type is 'Tuple[builtins.int, builtins.str, fallback=__main__.X]' x._replace(x=5) x._replace(y=5) # E: Argument "y" to "_replace" of "X" has incompatible type "int"; expected "str" +[builtins fixtures/tuple.pyi] [case testNamedTupleMake] from typing import NamedTuple @@ -403,6 +426,7 @@ from typing import NamedTuple X = NamedTuple('X', [('x', int), ('y', str)]) reveal_type(X._fields) # N: Revealed type is 'Tuple[builtins.str, builtins.str]' +[builtins fixtures/tuple.pyi] [case testNamedTupleSource] from typing import NamedTuple @@ -411,6 +435,7 @@ X = NamedTuple('X', [('x', int), ('y', str)]) reveal_type(X._source) # N: Revealed type is 'builtins.str' x = None # type: X reveal_type(x._source) # N: Revealed type is 'builtins.str' +[builtins fixtures/tuple.pyi] [case testNamedTupleUnit] from typing import NamedTuple @@ -419,6 +444,7 @@ X = NamedTuple('X', []) x = X() # type: X x._replace() x._fields[0] # E: Tuple index out of range +[builtins fixtures/tuple.pyi] [case testNamedTupleJoinNamedTuple] from typing import NamedTuple @@ -469,6 +495,7 @@ g(D()) # E: Argument 1 to "g" has incompatible type "D"; expected "C" y = None # type: C if int(): y = D() # E: Incompatible types in assignment (expression has type "D", variable has type "C") +[builtins fixtures/tuple.pyi] [case testNamedTupleSelfTypeMethod] from typing import TypeVar, NamedTuple @@ -488,6 +515,7 @@ b = None # type: B b = B('').member() a = B('') a = B('').member() +[builtins fixtures/tuple.pyi] [case testNamedTupleSelfTypeReplace] from typing import NamedTuple, TypeVar @@ -502,6 +530,7 @@ class B(A): reveal_type(B('hello')._replace(x='')) # N: Revealed type is 'Tuple[builtins.str, fallback=__main__.B]' b = None # type: B b = B('hello')._replace(x='') +[builtins fixtures/tuple.pyi] [case testNamedTupleSelfTypeMake] from typing import NamedTuple, TypeVar @@ -520,7 +549,7 @@ b = B._make(['']) # type: B [case testNamedTupleIncompatibleRedefinition] from typing import NamedTuple class Crash(NamedTuple): - count: int # E: Incompatible types in assignment (expression has type "int", base class "tuple" defined the type as "Callable[[Tuple[int, ...], Any], int]") + count: int # E: Incompatible types in assignment (expression has type "int", base class "tuple" defined the type as "Callable[[Tuple[int, ...], object], int]") [builtins fixtures/tuple.pyi] [case testNamedTupleInClassNamespace] @@ -532,12 +561,14 @@ class C: def g(self): A = NamedTuple('A', [('y', int)]) C.A # E: "Type[C]" has no attribute "A" +[builtins fixtures/tuple.pyi] [case testNamedTupleInFunction] from typing import NamedTuple def f() -> None: A = NamedTuple('A', [('x', int)]) A # E: Name 'A' is not defined +[builtins fixtures/tuple.pyi] [case testNamedTupleForwardAsUpperBound] from typing import NamedTuple, TypeVar, Generic @@ -551,6 +582,7 @@ reveal_type(G[M]().x.x) # N: Revealed type is 'builtins.int' reveal_type(G[M]().x[0]) # N: Revealed type is 'builtins.int' M = NamedTuple('M', [('x', int)]) +[builtins fixtures/tuple.pyi] [out] [case testNamedTupleWithImportCycle] @@ -569,6 +601,7 @@ def f(x: a.X) -> None: reveal_type(x) x = a.X(1) reveal_type(x) +[builtins fixtures/tuple.pyi] [out] tmp/b.py:4: note: Revealed type is 'Tuple[Any, fallback=a.X]' tmp/b.py:6: note: Revealed type is 'Tuple[Any, fallback=a.X]' @@ -588,6 +621,7 @@ def f(x: a.N) -> None: if int(): x = a.N(1) reveal_type(x) +[builtins fixtures/tuple.pyi] [out] tmp/b.py:4: note: Revealed type is 'Tuple[Any, fallback=a.N]' tmp/b.py:7: note: Revealed type is 'Tuple[Any, fallback=a.N]' @@ -603,6 +637,7 @@ def bar(nt: MyNamedTuple) -> MyNamedTuple: x: MyNamedTuple reveal_type(x.parent) # N: Revealed type is 'Any' +[builtins fixtures/tuple.pyi] -- Some crazy self-referential named tuples and types dicts -- to be sure that everything works @@ -626,6 +661,7 @@ class B: return 'b' def aWithTuple(self, atuple: 'a.ATuple') -> str: return 'a' +[builtins fixtures/tuple.pyi] [out] [case testSelfRefNT1] @@ -751,6 +787,7 @@ from a import C from typing import NamedTuple tp = NamedTuple('tp', [('x', int)]) +[builtins fixtures/tuple.pyi] [out] [case testSubclassOfRecursiveNamedTuple] @@ -775,6 +812,7 @@ class Real(NamedTuple): def __sub__(self, other: Real) -> str: return "" class Fraction(Real): def __rsub__(self, other: Real) -> Real: return other # E: Signatures of "__rsub__" of "Fraction" and "__sub__" of "Real" are unsafely overlapping +[builtins fixtures/tuple.pyi] [case testForwardReferenceInNamedTuple] from typing import NamedTuple @@ -785,6 +823,7 @@ class A(NamedTuple): class B: pass +[builtins fixtures/tuple.pyi] [case testTypeNamedTupleClassmethod] from typing import Type, NamedTuple @@ -809,6 +848,7 @@ class CallableTuple(Thing): o = CallableTuple('hello ', 12) o() +[builtins fixtures/tuple.pyi] [case testNamedTupleSubclassMulti] from typing import NamedTuple @@ -837,6 +877,7 @@ class Child(Base): Base(param=10) Child(param=10) +[builtins fixtures/tuple.pyi] [case testNamedTupleClassMethodWithGenericReturnValue] from typing import TypeVar, Type, NamedTuple @@ -896,6 +937,7 @@ reveal_type(u.field_1) # N: Revealed type is 'typing.Mapping[Tuple[builtins.int reveal_type(u.field_2) # N: Revealed type is 'Tuple[builtins.int, builtins.int, fallback=__main__.MyBaseTuple]' reveal_type(u[0]) # N: Revealed type is 'typing.Mapping[Tuple[builtins.int, builtins.int, fallback=__main__.MyBaseTuple], builtins.int]' reveal_type(u[1]) # N: Revealed type is 'Tuple[builtins.int, builtins.int, fallback=__main__.MyBaseTuple]' +[builtins fixtures/tuple.pyi] [case testAssignNamedTupleAsAttribute] from typing import NamedTuple @@ -905,3 +947,4 @@ class A: self.b = NamedTuple('x', [('s', str), ('n', int)]) # E: NamedTuple type as an attribute is not supported reveal_type(A().b) # N: Revealed type is 'Any' +[builtins fixtures/tuple.pyi] diff --git a/test-data/unit/check-narrowing.test b/test-data/unit/check-narrowing.test index 6c64b241eaaa..d1bfda860a21 100644 --- a/test-data/unit/check-narrowing.test +++ b/test-data/unit/check-narrowing.test @@ -168,6 +168,7 @@ if x5["key"] is Key.A: reveal_type(x5) # N: Revealed type is 'TypedDict('__main__.TypedDict1', {'key': Literal[__main__.Key.A], 'foo': builtins.int})' else: reveal_type(x5) # N: Revealed type is 'TypedDict('__main__.TypedDict2', {'key': Literal[__main__.Key.B], 'foo': builtins.str})' +[builtins fixtures/tuple.pyi] [case testNarrowingParentWithIsInstanceBasic] from dataclasses import dataclass @@ -266,6 +267,7 @@ if x.key is Key.D: reveal_type(x) # E: Statement is unreachable else: reveal_type(x) # N: Revealed type is 'Union[__main__.Object1, __main__.Object2]' +[builtins fixtures/tuple.pyi] [case testNarrowingTypedDictParentMultipleKeys] # flags: --warn-unreachable @@ -492,6 +494,7 @@ else: # TODO: Is this a bug? Should we skip inferring Any for singleton types? reveal_type(x.key) # N: Revealed type is 'Union[Any, Literal[__main__.Key.B]]' reveal_type(x) # N: Revealed type is 'Union[__main__.Object1, __main__.Object2, Any]' +[builtins fixtures/tuple.pyi] [case testNarrowingParentsHierarchy] from typing import Union @@ -549,6 +552,7 @@ if y.child.same_for_1_and_2 is Key.A: else: reveal_type(y) # N: Revealed type is '__main__.Parent2' reveal_type(y.child) # N: Revealed type is '__main__.Child3' +[builtins fixtures/tuple.pyi] [case testNarrowingParentsHierarchyGenerics] from typing import Generic, TypeVar, Union @@ -610,6 +614,7 @@ if y["model"]["key"] is Key.C: else: reveal_type(y) # N: Revealed type is 'Union[TypedDict('__main__.Parent1', {'model': TypedDict('__main__.Model1', {'key': Literal[__main__.Key.A]}), 'foo': builtins.int}), TypedDict('__main__.Parent2', {'model': TypedDict('__main__.Model2', {'key': Literal[__main__.Key.B]}), 'bar': builtins.str})]' reveal_type(y["model"]) # N: Revealed type is 'Union[TypedDict('__main__.Model1', {'key': Literal[__main__.Key.A]}), TypedDict('__main__.Model2', {'key': Literal[__main__.Key.B]})]' +[builtins fixtures/tuple.pyi] [case testNarrowingParentsHierarchyTypedDictWithStr] # flags: --warn-unreachable diff --git a/test-data/unit/check-newsemanal.test b/test-data/unit/check-newsemanal.test index a51ef3d4d00c..58153c58c37d 100644 --- a/test-data/unit/check-newsemanal.test +++ b/test-data/unit/check-newsemanal.test @@ -246,6 +246,7 @@ reveal_type(T2(x=2)) # E [file b.py] from a import TypedDict as TD1 from a import TD2 as TD3 +[builtins fixtures/tuple.pyi] [out] tmp/a.py:5: note: Revealed type is 'TypedDict('a.T2', {'x': builtins.int})' @@ -274,6 +275,7 @@ x: T2 reveal_type(x) # N: Revealed type is 'TypedDict('__main__.T2', {'x': builtins.str, 'y': builtins.int})' y: T4 reveal_type(y) # N: Revealed type is 'TypedDict('__main__.T4', {'x': builtins.str, 'y': __main__.A})' +[builtins fixtures/tuple.pyi] [case testNewAnalyzerRedefinitionAndDeferral1a] import a @@ -864,6 +866,7 @@ reveal_type(i.t) # N: Revealed type is '__main__.Other' In = NamedTuple('In', [('s', str), ('t', Other)]) class Other: pass +[builtins fixtures/tuple.pyi] [case testNewAnalyzerNamedTupleClass] from typing import NamedTuple @@ -885,6 +888,7 @@ class In(NamedTuple): s: str t: Other class Other: pass +[builtins fixtures/tuple.pyi] [case testNewAnalyzerNamedTupleCallNested] from typing import NamedTuple @@ -902,6 +906,7 @@ class C: In = NamedTuple('In', [('s', str), ('t', Other)]) Out = NamedTuple('Out', [('x', In), ('y', Other)]) class Other: pass +[builtins fixtures/tuple.pyi] [case testNewAnalyzerNamedTupleClassNested] @@ -924,6 +929,7 @@ class C: s: str t: C.Other class Other: pass +[builtins fixtures/tuple.pyi] [case testNewAnalyzerNamedTupleCallNestedMethod] from typing import NamedTuple @@ -938,6 +944,7 @@ class C: Out = NamedTuple('Out', [('x', In), ('y', Other)]) In = NamedTuple('In', [('s', str), ('t', Other)]) class Other: pass +[builtins fixtures/tuple.pyi] [case testNewAnalyzerNamedTupleClassNestedMethod] from typing import NamedTuple @@ -958,6 +965,7 @@ class C: s: str t: Other class Other: pass +[builtins fixtures/tuple.pyi] [case testNewAnalyzerNamedTupleClassForwardMethod] from typing import NamedTuple @@ -973,6 +981,7 @@ class NT(NamedTuple): class Other(NamedTuple): s: str +[builtins fixtures/tuple.pyi] [case testNewAnalyzerNamedTupleSpecialMethods] from typing import NamedTuple @@ -987,6 +996,7 @@ class SubO(Out): pass Out = NamedTuple('Out', [('x', In), ('y', Other)]) In = NamedTuple('In', [('s', str), ('t', Other)]) class Other: pass +[builtins fixtures/tuple.pyi] [case testNewAnalyzerNamedTupleBaseClass] from typing import NamedTuple @@ -1004,6 +1014,7 @@ class In(NamedTuple): s: str t: Other class Other: pass +[builtins fixtures/tuple.pyi] [case testNewAnalyzerIncompleteRefShadowsBuiltin1] import a @@ -1158,6 +1169,7 @@ class B(type): return 0 reveal_type(A.f()) # N: Revealed type is 'builtins.int' +[builtins fixtures/tuple.pyi] [case testNewAnalyzerMetaclassSix2] import six @@ -1171,6 +1183,7 @@ class B(type): return 0 reveal_type(A.f()) # N: Revealed type is 'builtins.int' +[builtins fixtures/tuple.pyi] [case testNewAnalyzerMetaclassSix3] import six @@ -1187,6 +1200,7 @@ class Defer: reveal_type(A.f()) # N: Revealed type is 'builtins.int' reveal_type(A.x) # N: Revealed type is 'builtins.str' +[builtins fixtures/tuple.pyi] [case testNewAnalyzerMetaclassSix4] import six @@ -1203,6 +1217,7 @@ class A(six.with_metaclass(B, Defer)): class Defer: x: str +[builtins fixtures/tuple.pyi] [case testNewAnalyzerMetaclassFuture1] import future.utils @@ -1215,6 +1230,7 @@ class B(type): return 0 reveal_type(A.f()) # N: Revealed type is 'builtins.int' +[builtins fixtures/tuple.pyi] [case testNewAnalyzerMetaclassFuture3] import future.utils @@ -1231,6 +1247,7 @@ class Defer: reveal_type(A.f()) # N: Revealed type is 'builtins.int' reveal_type(A.x) # N: Revealed type is 'builtins.str' +[builtins fixtures/tuple.pyi] [case testNewAnalyzerMetaclassFuture4] import future.utils @@ -1247,6 +1264,7 @@ class A(future.utils.with_metaclass(B, Defer)): class Defer: x: str +[builtins fixtures/tuple.pyi] [case testNewAnalyzerMetaclass1_python2] class A: @@ -1716,6 +1734,7 @@ def g(x: int) -> int: ... def g(x: Union[C[str], int]) -> int: # E: Type argument "builtins.str" of "C" must be a subtype of "builtins.int" y: C[object] # E: Type argument "builtins.object" of "C" must be a subtype of "builtins.int" return 0 +[builtins fixtures/tuple.pyi] [case testNewAnalyzerTypeArgBoundCheckWithStrictOptional] # flags: --config-file tmp/mypy.ini @@ -1818,6 +1837,7 @@ reveal_type(x) # N: Revealed type is 'Tuple[builtins.int, builtins.int]' [file b.py] import a x = (1, 2) +[builtins fixtures/tuple.pyi] [case testNewAnalyzerImportPriosA] import a @@ -1827,6 +1847,7 @@ reveal_type(x) # N: Revealed type is 'Tuple[builtins.int, builtins.int]' [file b.py] import a x = (1, 2) +[builtins fixtures/tuple.pyi] [case testNewAnalyzerConditionalFunc] if int(): @@ -2015,6 +2036,7 @@ from typing import Tuple c: C class C(Tuple[int, str]): def __init__(self) -> None: pass +[builtins fixtures/tuple.pyi] [case testNewAnalyzerNotAnAlias] class Meta(type): @@ -2268,6 +2290,7 @@ reveal_type(x) # N: Revealed type is 'Tuple[builtins.int, fallback=__main__.C]' reveal_type(x.x) # N: Revealed type is 'builtins.int' C = NamedTuple('C', [('x', int)]) +[builtins fixtures/tuple.pyi] [case testNewAnalyzerApplicationForward1] from typing import Generic, TypeVar @@ -2487,6 +2510,7 @@ var1: Final = 1 def force1(x: Literal[1]) -> None: pass force1(reveal_type(var1)) # N: Revealed type is 'Literal[1]' +[builtins fixtures/tuple.pyi] [case testNewAnalyzerReportLoopInMRO] class A(A): ... # E: Cannot resolve name "A" (possible cyclic definition) @@ -2748,6 +2772,7 @@ def force(x: Literal[42]) -> None: pass force(reveal_type(var)) # N: Revealed type is 'Literal[42]' class Yes: ... +[builtins fixtures/tuple.pyi] [case testNewAnalyzerImportCycleWithIgnoreMissingImports] # flags: --ignore-missing-imports diff --git a/test-data/unit/check-optional.test b/test-data/unit/check-optional.test index 14f022c13f44..74a27093a22b 100644 --- a/test-data/unit/check-optional.test +++ b/test-data/unit/check-optional.test @@ -234,6 +234,7 @@ class C: def __init__(self) -> None: self.x = None # E: Incompatible types in assignment (expression has type "None", variable has type "int") self.y = None # E: Incompatible types in assignment (expression has type "None", variable has type "str") +[builtins fixtures/tuple.pyi] [out] [case testOverloadWithNone] diff --git a/test-data/unit/check-overloading.test b/test-data/unit/check-overloading.test index ae2ef0c07bd3..dc9a94b4a28f 100644 --- a/test-data/unit/check-overloading.test +++ b/test-data/unit/check-overloading.test @@ -471,6 +471,7 @@ def f(x: 'A') -> 'A': pass def f(x: 'B') -> 'B': pass class A: pass class B: pass +[builtins fixtures/tuple.pyi] [case testCallToOverloadedMethod] from foo import * @@ -512,6 +513,7 @@ class A: @overload def f(self, x: 'B') -> 'B': pass class B: pass +[builtins fixtures/tuple.pyi] [case testOverloadsWithDifferentArgumentCounts] from foo import * @@ -545,6 +547,7 @@ def f(x: 'A') -> 'A': pass def f(x: 'B', y: 'A') -> 'B': pass class A: pass class B: pass +[builtins fixtures/tuple.pyi] [case testGenericOverloadVariant] from foo import * @@ -565,6 +568,7 @@ def f(x: 'B') -> 'B': pass class A(Generic[t]): pass class B: pass class C: pass +[builtins fixtures/tuple.pyi] [case testOverloadedInit] from foo import * @@ -584,6 +588,7 @@ class A: @overload def __init__(self, b: 'B') -> None: pass class B: pass +[builtins fixtures/tuple.pyi] [case testIntersectionTypeCompatibility] from foo import * @@ -619,6 +624,7 @@ class A: @overload def __init__(self, a: 'B') -> None: pass class B: pass +[builtins fixtures/tuple.pyi] [case testOverloadedGetitem] from foo import * @@ -639,6 +645,7 @@ class A: def __getitem__(self, a: int) -> int: pass @overload def __getitem__(self, b: str) -> str: pass +[builtins fixtures/tuple.pyi] [case testOverloadedGetitemWithGenerics] from foo import * @@ -660,6 +667,7 @@ class C(Generic[t]): def __getitem__(self, b: 'B') -> t: pass class A: pass class B: pass +[builtins fixtures/tuple.pyi] [case testImplementingOverloadedMethod] from foo import * @@ -774,6 +782,7 @@ class A: def __init__(self, a: 'A') -> None: pass class B: pass +[builtins fixtures/tuple.pyi] [case testOverlappingErasedSignatures] from foo import * @@ -1174,6 +1183,7 @@ f(*(1, '', 1))() # E: No overload variant of "f" matches argument type "Tuple[in # N: Possible overload variant: \ # N: def f(*x: str) -> str \ # N: <1 more non-matching overload not shown> +[builtins fixtures/tuple.pyi] [case testPreferExactSignatureMatchInOverload] from foo import * @@ -1428,6 +1438,7 @@ class A(Generic[T]): b = A() # type: A[Tuple[int, int]] b.f((0, 0)) b.f((0, '')) # E: Argument 1 to "f" of "A" has incompatible type "Tuple[int, str]"; expected "Tuple[int, int]" +[builtins fixtures/tuple.pyi] [case testSingleOverloadStub] from foo import * @@ -1834,6 +1845,7 @@ class MyInt: def __init__(self, x: str) -> None: pass @overload def __init__(self, x: str, y: int) -> None: pass +[builtins fixtures/tuple.pyi] [out] [case testOverloadTupleInstance] @@ -1858,6 +1870,7 @@ def f(x: Tuple[A, int]) -> D: ... @overload def f(x: Tuple[()]) -> D: ... def f(x: Any) -> Any:... +[builtins fixtures/tuple.pyi] [case testOverloadTupleEllipsisNumargs] from typing import overload, Tuple, Any @@ -1953,7 +1966,7 @@ class Child4(Parent): @overload def f(self, arg: str) -> str: ... def f(self, arg: Union[int, str]) -> Union[int, str]: - return True # E: Incompatible return value type (got "bool", expected "Union[int, str]") + return b'' # E: Incompatible return value type (got "bytes", expected "Union[int, str]") [builtins fixtures/tuple.pyi] @@ -2082,6 +2095,7 @@ def bar1(*x: int) -> int: ... def bar2(x: int, y: str, z: int) -> str: ... @overload def bar2(*x: int) -> int: ... +[builtins fixtures/tuple.pyi] [case testOverloadDetectsPossibleMatchesWithGenerics] from typing import overload, TypeVar, Generic @@ -2169,6 +2183,7 @@ def foo2(*args2: str) -> int: ... def foo3(*args: int) -> str: ... @overload def foo3(*args: str) -> int: ... +[builtins fixtures/tuple.pyi] [case testOverloadPossibleOverlapWithVarargs2] from wrapper import * @@ -2194,6 +2209,7 @@ def foo3(x: int, *args2: int) -> str: ... def foo4(x: int, *args: int) -> str: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def foo4(*args2: int) -> int: ... +[builtins fixtures/tuple.pyi] [case testOverloadPossibleOverlapWithVarargs3] from wrapper import * @@ -2221,6 +2237,7 @@ def foo3(*args: str) -> int: ... def foo4(*args: int) -> str: ... @overload def foo4(x: Other = ..., *args: str) -> int: ... +[builtins fixtures/tuple.pyi] [case testOverloadPossibleOverlapWithVarargs4] from typing import overload @@ -2236,6 +2253,7 @@ def foo2(*xs: int) -> str: ... # E: Overloaded function signatures 1 and 2 over @overload def foo2(x: int = 0, y: int = 0) -> int: ... def foo2(*args): pass +[builtins fixtures/tuple.pyi] [case testOverloadPossibleOverlapWithKwargs] from wrapper import * @@ -2272,6 +2290,7 @@ def foo1(*x: str) -> int: ... def foo2(*x: str) -> int: ... @overload def foo2(x: str, *, y: str) -> str: ... +[builtins fixtures/tuple.pyi] [case testOverloadPossibleOverlapMixingOptionalArgsWithVarargs] from wrapper import * @@ -2292,6 +2311,7 @@ def foo2(x: str, y: str = ..., z: str = ...) -> str: ... def foo3(x: int, y: str = ..., z: str = ...) -> str: ... @overload def foo3(*x: str) -> int: ... +[builtins fixtures/tuple.pyi] [case testOverloadPossibleOverlapMixingOptionalArgsWithVarargs2] from wrapper import * @@ -2307,6 +2327,7 @@ def foo1(*x: str) -> int: ... def foo2(x: str, y: str = ..., z: int = ...) -> str: ... # E: Overloaded function signatures 1 and 2 overlap with incompatible return types @overload def foo2(*x: str) -> int: ... +[builtins fixtures/tuple.pyi] [case testOverloadPossibleOverlapMixingNamedArgsWithKwargs] from wrapper import * @@ -3153,6 +3174,7 @@ def f(*args): ... x: Union[A, B] reveal_type(f(x)) # N: Revealed type is '__main__.Parent' f(x, B()) # E: Argument 1 to "f" has incompatible type "Union[A, B]"; expected "B" +[builtins fixtures/tuple.pyi] [case testOverloadInferUnionWithMixOfPositionalAndOptionalArgs] # flags: --strict-optional @@ -3174,6 +3196,7 @@ reveal_type(f(x)) # N: Revealed type is 'Union[builtins.int, builtins.str]' reveal_type(f(y)) # N: Revealed type is 'Union[builtins.int, builtins.str]' reveal_type(f(z)) # N: Revealed type is 'Union[builtins.int, builtins.str]' reveal_type(f()) # N: Revealed type is 'builtins.str' +[builtins fixtures/tuple.pyi] [case testOverloadingInferUnionReturnWithTypevarWithValueRestriction] from typing import overload, Union, TypeVar, Generic @@ -4562,6 +4585,7 @@ def f(*args): x: Union[int, str] f(x, x, x, x, x, x, x, x) +[builtins fixtures/tuple.pyi] [out] main:11: error: Not all union combinations were tried because there are too many unions main:11: error: Argument 1 to "f" has incompatible type "Union[int, str]"; expected "int" @@ -4665,6 +4689,7 @@ g(3) # E: No overload variant of "g" matches argument type "int" \ # N: def g(x: A) -> None \ # N: def g(x: B) -> None \ # N: def g(x: C) -> None +[builtins fixtures/tuple.pyi] [case testOverloadedInIter] from lib import f, g @@ -5027,6 +5052,7 @@ def asdf() -> None: @dec def lol(x: int, y: int) -> int: pass +[builtins fixtures/tuple.pyi] [case testVeryBrokenOverload] import lib @@ -5072,3 +5098,4 @@ def foo(x: Literal[0]) -> None: ... # E: Overloaded function signatures 1 and 2 def foo(x: MyInt) -> int: ... def foo(x): ... +[builtins fixtures/tuple.pyi] diff --git a/test-data/unit/check-protocols.test b/test-data/unit/check-protocols.test index 02e3b8d4c869..0081394541b0 100644 --- a/test-data/unit/check-protocols.test +++ b/test-data/unit/check-protocols.test @@ -2134,6 +2134,7 @@ class MockDict(MockMapping[T]): def f(x: MockMapping[int]) -> None: pass x: MockDict[str] f(x) # E: Argument 1 to "f" has incompatible type "MockDict[str]"; expected "MockMapping[int]" +[builtins fixtures/tuple.pyi] [case testProtocolNotesForComplexSignatures] from typing import Protocol, Optional @@ -2238,6 +2239,7 @@ def func(caller: Caller) -> None: func(call) func(bad) # E: Argument 1 to "func" has incompatible type "Callable[[int, VarArg(str)], None]"; expected "Caller" +[builtins fixtures/tuple.pyi] [out] [case testCallableImplementsProtocolGeneric] @@ -2334,6 +2336,7 @@ def bad(x: int, *args: str) -> None: cb: Caller = bad # E: Incompatible types in assignment (expression has type "Callable[[int, VarArg(str)], None]", variable has type "Caller") \ # N: "Caller.__call__" has type "Callable[[Arg(str, 'x'), VarArg(int)], None]" +[builtins fixtures/tuple.pyi] [out] [case testCallableImplementsProtocolArgName] diff --git a/test-data/unit/check-python38.test b/test-data/unit/check-python38.test index a4388aeb0299..f0a346a40c4c 100644 --- a/test-data/unit/check-python38.test +++ b/test-data/unit/check-python38.test @@ -77,6 +77,7 @@ def g(x: int): ... g("IGNORE"), # type: ignore f("ERROR"), # E: Argument 1 to "f" has incompatible type "str"; expected "int" ) +[builtins fixtures/tuple.pyi] [case testIgnoreScopeNestedOverlapping] def f(x: int): ... @@ -86,6 +87,7 @@ def g(x: int): ... "IGNORE" # type: ignore ), f("ERROR"), # E: Argument 1 to "f" has incompatible type "str"; expected "int" ) +[builtins fixtures/tuple.pyi] [case testIgnoreScopeUnused1] # flags: --warn-unused-ignores diff --git a/test-data/unit/check-selftype.test b/test-data/unit/check-selftype.test index d99ad2282735..8b806a3ddebc 100644 --- a/test-data/unit/check-selftype.test +++ b/test-data/unit/check-selftype.test @@ -523,6 +523,7 @@ ci: C[int] cs: C[str] reveal_type(ci.from_item) # N: Revealed type is 'def (item: Tuple[builtins.int])' reveal_type(cs.from_item) # N: Revealed type is 'def (item: builtins.str)' +[builtins fixtures/tuple.pyi] [case testSelfTypeRestrictedMethodOverloadFallback] from typing import TypeVar, Generic, overload, Callable @@ -675,6 +676,7 @@ b.atomic_close() # E: Invalid self argument "Bad" to attribute function "atomic reveal_type(f.copy()) # N: Revealed type is '__main__.File*' b.copy() # E: Invalid self argument "Bad" to attribute function "copy" with type "Callable[[T], T]" +[builtins fixtures/tuple.pyi] [case testBadClassLevelDecoratorHack] from typing_extensions import Protocol @@ -692,6 +694,7 @@ class Test: reveal_type(Test().meth) # N: Revealed type is 'def (x: builtins.str) -> builtins.int' Test()._deco # E: Invalid self argument "Test" to attribute function "_deco" with type "Callable[[F], F]" +[builtins fixtures/tuple.pyi] [case testSelfTypeTrickyExample] from typing import * @@ -742,6 +745,7 @@ c: Lnk[int, float] = Lnk() d: Lnk[str, float] = b >> c # OK e: Lnk[str, Tuple[int, float]] = a >> (b, c) # OK f: Lnk[str, Tuple[float, int]] = a >> (c, b) # E: Unsupported operand types for >> ("Lnk[str, Tuple[str, int]]" and "Tuple[Lnk[int, float], Lnk[str, int]]") +[builtins fixtures/tuple.pyi] [case testSelfTypeMutuallyExclusiveRestrictions] from typing import Generic, TypeVar @@ -863,6 +867,7 @@ class C(Generic[T]): def magic(self: C[Tuple[S, U]]) -> Tuple[T, S, U]: ... reveal_type(C[Tuple[int, str]]().magic()) # N: Revealed type is 'Tuple[Tuple[builtins.int, builtins.str], builtins.int, builtins.str]' +[builtins fixtures/tuple.pyi] [case testSelfTypeOnUnion] from typing import TypeVar, Union diff --git a/test-data/unit/check-semanal-error.test b/test-data/unit/check-semanal-error.test index 87cc0aed4cce..d47674a13475 100644 --- a/test-data/unit/check-semanal-error.test +++ b/test-data/unit/check-semanal-error.test @@ -112,4 +112,5 @@ class C: attr: int x: P[int] = C() +[builtins fixtures/tuple.pyi] [out] diff --git a/test-data/unit/check-serialize.test b/test-data/unit/check-serialize.test index 7c747534128d..88549ea4b146 100644 --- a/test-data/unit/check-serialize.test +++ b/test-data/unit/check-serialize.test @@ -710,6 +710,7 @@ class C: self.a = A(0) self.b = A(0) # type: A self.c = A +[builtins fixtures/tuple.pyi] [out1] main:2: note: Revealed type is 'Tuple[builtins.int, fallback=ntcrash.C.A@4]' main:3: note: Revealed type is 'Tuple[builtins.int, fallback=ntcrash.C.A@4]' @@ -924,6 +925,7 @@ b.N(x='') from typing import NamedTuple N = NamedTuple('N', [('x', int)]) x: N +[builtins fixtures/tuple.pyi] [out2] tmp/a.py:5: error: Incompatible types in assignment (expression has type "Tuple[int]", variable has type "N") tmp/a.py:6: error: Incompatible types in assignment (expression has type "Tuple[int]", variable has type "N") diff --git a/test-data/unit/check-statements.test b/test-data/unit/check-statements.test index 82d1b45e19a3..530588575f97 100644 --- a/test-data/unit/check-statements.test +++ b/test-data/unit/check-statements.test @@ -224,6 +224,7 @@ class B: def __add__(self, x: A) -> 'C': pass class C: pass +[builtins fixtures/tuple.pyi] [out] main:3: error: Unsupported operand types for + ("A" and "B") main:4: error: Incompatible types in assignment (expression has type "C", variable has type "B") @@ -244,6 +245,7 @@ class B: def __sub__(self, x: A) -> 'C': pass class C: pass +[builtins fixtures/tuple.pyi] [out] main:3: error: Unsupported operand types for - ("A" and "B") main:4: error: Incompatible types in assignment (expression has type "C", variable has type "B") @@ -260,6 +262,7 @@ class A: def __mul__(self, x: 'C') -> 'A': pass class C: pass +[builtins fixtures/tuple.pyi] [out] main:3: error: Unsupported operand types for * ("A" and "A") main:4: error: Unsupported left operand type for * ("C") @@ -274,6 +277,7 @@ class A: def __matmul__(self, x: 'C') -> 'A': pass class C: pass +[builtins fixtures/tuple.pyi] [case testDivAssign] @@ -286,6 +290,7 @@ class A: def __truediv__(self, x: 'C') -> 'A': pass class C: pass +[builtins fixtures/tuple.pyi] [out] main:3: error: Unsupported operand types for / ("A" and "A") main:4: error: Unsupported left operand type for / ("C") @@ -301,6 +306,7 @@ class A: def __pow__(self, x: 'C') -> 'A': pass class C: pass +[builtins fixtures/tuple.pyi] [out] main:3: error: Unsupported operand types for ** ("A" and "A") main:4: error: Unsupported left operand type for ** ("C") @@ -316,6 +322,7 @@ class A: def __add__(self, x: 'A') -> 'B': pass class B(A): pass +[builtins fixtures/tuple.pyi] [out] [case testAdditionalOperatorsInOpAssign] @@ -332,6 +339,7 @@ class A: def __rshift__(self, x: 'C') -> 'A': pass def __floordiv__(self, x: 'C') -> 'A': pass class C: pass +[builtins fixtures/tuple.pyi] [out] main:3: error: Unsupported operand types for & ("A" and "A") main:4: error: Unsupported operand types for >> ("A" and "A") @@ -1018,6 +1026,7 @@ del a[b] # E: "A" has no attribute "__delitem__" class B: def __delitem__(self, index: 'A'): pass class A: pass +[builtins fixtures/tuple.pyi] [case testDelStmtWithAttribute] class A: @@ -1033,6 +1042,7 @@ class A: x = 0 a = A() del a.x, a.y # E: "A" has no attribute "y" +[builtins fixtures/tuple.pyi] [case testDelStatementWithAssignmentSimple] diff --git a/test-data/unit/check-super.test b/test-data/unit/check-super.test index b7c2ca3df897..2cccfd3d6127 100644 --- a/test-data/unit/check-super.test +++ b/test-data/unit/check-super.test @@ -17,6 +17,7 @@ class A(B): a = super().g() # E: "g" undefined in superclass b = super().f() return a +[builtins fixtures/tuple.pyi] [out] [case testAccessingSuperTypeMethodWithArgs] @@ -30,6 +31,7 @@ class A(B): super().f(a) self.f(b) self.f(a) +[builtins fixtures/tuple.pyi] [out] [case testAccessingSuperInit] diff --git a/test-data/unit/check-tuples.test b/test-data/unit/check-tuples.test index 646fee10d5cf..5a792a77d856 100644 --- a/test-data/unit/check-tuples.test +++ b/test-data/unit/check-tuples.test @@ -456,6 +456,7 @@ if int(): if int(): a = 1 b = '' +[builtins fixtures/tuple.pyi] [case testMultipleAssignmentWithExtraParentheses] @@ -496,12 +497,14 @@ if int(): a = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int") if int(): b = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "str") +[builtins fixtures/tuple.pyi] [case testMultipleAssignmentWithMixedVariables] a = b, c = 1, 1 x, y = p, q = 1, 1 u, v, w = r, s = 1, 1 # E: Need more than 2 values to unpack (3 expected) d, e = f, g, h = 1, 1 # E: Need more than 2 values to unpack (3 expected) +[builtins fixtures/tuple.pyi] -- Assignment to starred expressions @@ -697,6 +700,7 @@ if int(): class A: pass class B: pass class C: pass +[builtins fixtures/tuple.pyi] [case testNestedTupleAssignment2] @@ -728,6 +732,7 @@ if int(): class A: pass class B: pass class C: pass +[builtins fixtures/tuple.pyi] -- Error messages @@ -888,6 +893,7 @@ B()[100] [case testValidTupleBaseClass] from typing import Tuple class A(tuple): pass +[builtins fixtures/tuple.pyi] [out] [case testTupleBaseClass2] @@ -960,18 +966,21 @@ from typing import Container a = None # type: Container[str] a = () [typing fixtures/typing-full.pyi] +[builtins fixtures/tuple.pyi] [case testSubtypingTupleIsSized] from typing import Sized a = None # type: Sized a = () [typing fixtures/typing-medium.pyi] +[builtins fixtures/tuple.pyi] [case testTupleWithStarExpr1] a = (1, 2) b = (*a, '') reveal_type(b) # N: Revealed type is 'Tuple[builtins.int, builtins.int, builtins.str]' +[builtins fixtures/tuple.pyi] [case testTupleWithStarExpr2] a = [1] @@ -1194,6 +1203,7 @@ x: Iterable[int] = () y: Tuple[int, int] = (1, 2) x = y reveal_type(x) # N: Revealed type is 'Tuple[builtins.int, builtins.int]' +[builtins fixtures/tuple.pyi] [case testTupleOverlapDifferentTuples] from typing import Optional, Tuple diff --git a/test-data/unit/check-type-aliases.test b/test-data/unit/check-type-aliases.test index 1c0803511be5..1e3c6f10a37b 100644 --- a/test-data/unit/check-type-aliases.test +++ b/test-data/unit/check-type-aliases.test @@ -14,6 +14,7 @@ f(1) f('') f(()) # E: Argument 1 to "f" has incompatible type "Tuple[]"; expected "Union[int, str]" [targets __main__, __main__.f] +[builtins fixtures/tuple.pyi] [case testTupleTypeAlias] from typing import Tuple @@ -22,6 +23,7 @@ def f(x: T) -> None: pass f((1, 'x')) f(1) # E: Argument 1 to "f" has incompatible type "int"; expected "Tuple[int, str]" [targets __main__, __main__.f] +[builtins fixtures/tuple.pyi] [case testCallableTypeAlias] from typing import Callable @@ -86,6 +88,7 @@ if int(): A = Union[T, int] # E: Cannot assign multiple types to name "A" without an explicit "Type[...]" annotation \ # E: Value of type "int" is not indexable # the second error is because of `Union = 0` in lib-stub/typing.pyi +[builtins fixtures/tuple.pyi] [out] [case testProhibitUsingVariablesAsTypesAndAllowAliasesAsTypes] @@ -180,6 +183,7 @@ def f(p: 'Alias[str]') -> None: reveal_type(f) # N: Revealed type is 'def (p: Tuple[builtins.int, builtins.str])' T = TypeVar('T') Alias = Tuple[int, T] +[builtins fixtures/tuple.pyi] [out] [case testRecursiveAliasesErrors1] diff --git a/test-data/unit/check-typeddict.test b/test-data/unit/check-typeddict.test index e87d20cf61a9..0a6b57d5cafa 100644 --- a/test-data/unit/check-typeddict.test +++ b/test-data/unit/check-typeddict.test @@ -1472,6 +1472,7 @@ f4(**a) # E: Extra argument "y" from **args for "f4" f5(**a) # E: Too few arguments for "f5" f6(**a) # E: Extra argument "y" from **args for "f6" f1(1, **a) # E: "f1" gets multiple values for keyword argument "x" +[builtins fixtures/tuple.pyi] [case testTypedDictAsStarStarArgConstraints] from typing import TypeVar, Union @@ -1484,6 +1485,7 @@ def f1(x: T, y: S) -> Union[T, S]: ... A = TypedDict('A', {'y': int, 'x': str}) a: A reveal_type(f1(**a)) # N: Revealed type is 'Union[builtins.str*, builtins.int*]' +[builtins fixtures/tuple.pyi] [case testTypedDictAsStarStarArgCalleeKwargs] from mypy_extensions import TypedDict @@ -1528,6 +1530,7 @@ f1(**a, **c) # E: "f1" gets multiple values for keyword argument "x" \ # E: Argument "x" to "f1" has incompatible type "str"; expected "int" f1(**c, **a) # E: "f1" gets multiple values for keyword argument "x" \ # E: Argument "x" to "f1" has incompatible type "str"; expected "int" +[builtins fixtures/tuple.pyi] [case testTypedDictNonMappingMethods] from typing import List diff --git a/test-data/unit/check-typevar-values.test b/test-data/unit/check-typevar-values.test index 80b764141e47..d70f7b240333 100644 --- a/test-data/unit/check-typevar-values.test +++ b/test-data/unit/check-typevar-values.test @@ -586,6 +586,7 @@ from typing import Sequence, Iterable, TypeVar S = TypeVar('S', Sequence, Iterable) def my_len(s: S) -> None: pass def crash() -> None: my_len((0,)) +[builtins fixtures/tuple.pyi] [case testReferenceToDecoratedFunctionAndTypeVarValues] from typing import TypeVar, Callable diff --git a/test-data/unit/check-unions.test b/test-data/unit/check-unions.test index 4ebc82568cfb..ffb162494c48 100644 --- a/test-data/unit/check-unions.test +++ b/test-data/unit/check-unions.test @@ -523,6 +523,7 @@ reveal_type(a1) # N: Revealed type is 'builtins.float' b: Union[Tuple[int], Tuple[str]] (b1,) = b reveal_type(b1) # N: Revealed type is 'Union[builtins.int, builtins.str]' +[builtins fixtures/tuple.pyi] [case testUnionMultiassignDouble] from typing import Union, Tuple @@ -531,6 +532,7 @@ c: Union[Tuple[int, int], Tuple[int, float]] (c1, c2) = c reveal_type(c1) # N: Revealed type is 'builtins.int' reveal_type(c2) # N: Revealed type is 'builtins.float' +[builtins fixtures/tuple.pyi] [case testUnionMultiassignGeneric] from typing import Union, Tuple, TypeVar @@ -543,6 +545,7 @@ def pack_two(x: T, y: S) -> Union[Tuple[T, T], Tuple[S, S]]: (x, y) = pack_two(1, 'a') reveal_type(x) # N: Revealed type is 'Union[builtins.int*, builtins.str*]' reveal_type(y) # N: Revealed type is 'Union[builtins.int*, builtins.str*]' +[builtins fixtures/tuple.pyi] [case testUnionMultiassignAny] from typing import Union, Tuple, Any @@ -554,6 +557,7 @@ reveal_type(d2) # N: Revealed type is 'Union[Any, builtins.float]' e: Union[Any, Tuple[float, float], int] (e1, e2) = e # E: 'builtins.int' object is not iterable +[builtins fixtures/tuple.pyi] [case testUnionMultiassignNotJoin] from typing import Union, List @@ -609,6 +613,7 @@ d1: object (d1, d2) = d reveal_type(d1) # N: Revealed type is 'builtins.int' reveal_type(d2) # N: Revealed type is 'builtins.float' +[builtins fixtures/tuple.pyi] [case testUnionMultiassignIndexed] from typing import Union, Tuple, List diff --git a/test-data/unit/check-unreachable-code.test b/test-data/unit/check-unreachable-code.test index 7eb23fe16d33..262ac86e49ad 100644 --- a/test-data/unit/check-unreachable-code.test +++ b/test-data/unit/check-unreachable-code.test @@ -1033,6 +1033,7 @@ def f_no_suppress_5() -> int: noop() # E: Statement is unreachable [typing fixtures/typing-medium.pyi] +[builtins fixtures/tuple.pyi] [case testUnreachableFlagContextManagersSuppressed] # flags: --warn-unreachable @@ -1079,6 +1080,7 @@ def f_mix() -> int: # E: Missing return statement return 3 noop() [typing fixtures/typing-medium.pyi] +[builtins fixtures/tuple.pyi] [case testUnreachableFlagContextManagersSuppressedNoStrictOptional] # flags: --warn-unreachable --no-strict-optional @@ -1120,6 +1122,7 @@ def f_suppress() -> int: # E: Missing return statement return 3 noop() [typing fixtures/typing-medium.pyi] +[builtins fixtures/tuple.pyi] [case testUnreachableFlagContextAsyncManagersNoSuppress] # flags: --warn-unreachable --python-version 3.7 @@ -1185,6 +1188,7 @@ async def f_no_suppress_5() -> int: noop() # E: Statement is unreachable [typing fixtures/typing-full.pyi] +[builtins fixtures/tuple.pyi] [case testUnreachableFlagContextAsyncManagersSuppressed] # flags: --warn-unreachable --python-version 3.7 @@ -1231,6 +1235,7 @@ async def f_mix() -> int: # E: Missing return statement return 3 noop() [typing fixtures/typing-full.pyi] +[builtins fixtures/tuple.pyi] [case testUnreachableFlagContextAsyncManagersAbnormal] # flags: --warn-unreachable --python-version 3.7 @@ -1282,3 +1287,4 @@ async def f_malformed_2() -> int: noop() # E: Statement is unreachable [typing fixtures/typing-full.pyi] +[builtins fixtures/tuple.pyi] diff --git a/test-data/unit/check-varargs.test b/test-data/unit/check-varargs.test index f9072a492587..3a21423b057c 100644 --- a/test-data/unit/check-varargs.test +++ b/test-data/unit/check-varargs.test @@ -462,6 +462,7 @@ def foo() -> None: pass foo(*()) +[builtins fixtures/tuple.pyi] -- Overloads + varargs -- ------------------- diff --git a/test-data/unit/check-warnings.test b/test-data/unit/check-warnings.test index 99b611529980..b4fb28905e52 100644 --- a/test-data/unit/check-warnings.test +++ b/test-data/unit/check-warnings.test @@ -173,6 +173,7 @@ typ = Tuple[int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int] def g() -> Any: pass def f() -> typ: return g() +[builtins fixtures/tuple.pyi] [out] main:11: error: Returning Any from function declared to return diff --git a/test-data/unit/deps-classes.test b/test-data/unit/deps-classes.test index 63823556577f..e8b2aaa7dcca 100644 --- a/test-data/unit/deps-classes.test +++ b/test-data/unit/deps-classes.test @@ -16,6 +16,7 @@ def f(a: Any) -> None: n.a [file a.py] class A: pass +[builtins fixtures/tuple.pyi] [out] -> m.f -> m.f @@ -35,6 +36,7 @@ def f(a: Any) -> None: [file a.py] class A: pass class B: pass +[builtins fixtures/tuple.pyi] [out] -> m.f -> m.f @@ -50,6 +52,7 @@ N = NamedTuple('N', [('x', int)]) x = N(1) M = NamedTuple('M', [('z', 'N')]) y = M(x) +[builtins fixtures/tuple.pyi] [out] -> m -> m @@ -71,6 +74,7 @@ def f(a: Any) -> None: n.a [file a.py] class A: pass +[builtins fixtures/tuple.pyi] [out] -> m.f -> m.f diff --git a/test-data/unit/deps-expressions.test b/test-data/unit/deps-expressions.test index 94a44cabbe21..dccae38de300 100644 --- a/test-data/unit/deps-expressions.test +++ b/test-data/unit/deps-expressions.test @@ -459,6 +459,7 @@ b = a def f(x: Alias) -> None: pass def g() -> Literal[1]: return b +[builtins fixtures/tuple.pyi] [out] -> , m, m.f -> m diff --git a/test-data/unit/deps-generics.test b/test-data/unit/deps-generics.test index e50497daab41..c78f3fad90c0 100644 --- a/test-data/unit/deps-generics.test +++ b/test-data/unit/deps-generics.test @@ -126,6 +126,7 @@ T = TypeVar('T', bound=Tuple[A, B]) def f(x: T) -> T: return x +[builtins fixtures/tuple.pyi] [out] -> , , m, m.A, m.f -> , , m, m.B, m.f diff --git a/test-data/unit/deps-types.test b/test-data/unit/deps-types.test index 36cffe2ec306..b13f33fe7017 100644 --- a/test-data/unit/deps-types.test +++ b/test-data/unit/deps-types.test @@ -20,6 +20,7 @@ class B: pass def f(x: Tuple[A, B]) -> None: pass +[builtins fixtures/tuple.pyi] [out] -> , m.A, m.f -> , m.B, m.f @@ -844,6 +845,7 @@ class P(NamedTuple): x: A [file mod.py] class I: pass +[builtins fixtures/tuple.pyi] [out] -> m -> m.P @@ -862,6 +864,7 @@ from mod import I A = I [file mod.py] class I: pass +[builtins fixtures/tuple.pyi] [out] -> m -> m diff --git a/test-data/unit/diff.test b/test-data/unit/diff.test index 53bbf55ba2a6..ee3519478c45 100644 --- a/test-data/unit/diff.test +++ b/test-data/unit/diff.test @@ -273,6 +273,7 @@ M = NamedTuple('M', [('x', int), ('y', str)]) from typing import NamedTuple N = NamedTuple('N', [('x', int), ('y', int)]) M = NamedTuple('M', [('x', int), ('y', str)]) +[builtins fixtures/tuple.pyi] [out] __main__.A __main__.N @@ -1180,6 +1181,7 @@ class C: self.y_instance: Literal[1] = 1 self.z_instance: Literal[2] = 2 self.same_instance: Literal[1] = 1 +[builtins fixtures/tuple.pyi] [out] __main__.C.x_class __main__.C.x_instance @@ -1440,6 +1442,7 @@ class C: def method_same(self, x: Literal[1]) -> int: ... def method_same(self, x): pass +[builtins fixtures/tuple.pyi] [out] __main__.C.method __main__.func @@ -1450,6 +1453,7 @@ x: Literal[1, '2'] [file next.py] from typing_extensions import Literal x: Literal[1, 2] +[builtins fixtures/tuple.pyi] [out] __main__.x @@ -1463,5 +1467,6 @@ from typing import Callable, Union from mypy_extensions import Arg x: Union[Callable[[Arg(int, 'y')], None], Callable[[int], None]] +[builtins fixtures/tuple.pyi] [out] __main__.x diff --git a/test-data/unit/fine-grained-blockers.test b/test-data/unit/fine-grained-blockers.test index 787bbce9d505..2b02dd689705 100644 --- a/test-data/unit/fine-grained-blockers.test +++ b/test-data/unit/fine-grained-blockers.test @@ -405,6 +405,7 @@ import blocker [file a.py.4] import sys 1() +[builtins fixtures/tuple.pyi] [out] == a.py:1: error: invalid syntax diff --git a/test-data/unit/fine-grained-modules.test b/test-data/unit/fine-grained-modules.test index 7c836aa46227..6fb947eb511a 100644 --- a/test-data/unit/fine-grained-modules.test +++ b/test-data/unit/fine-grained-modules.test @@ -744,6 +744,7 @@ a.x = 0 [file a.py.2] import sys x = sys.platform +[builtins fixtures/tuple.pyi] [out] main:1: error: Cannot find implementation or library stub for module named 'a' main:1: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports @@ -808,6 +809,7 @@ class Bar: class Baz: pass [delete c.py.2] +[builtins fixtures/tuple.pyi] [out] == diff --git a/test-data/unit/fine-grained-suggest.test b/test-data/unit/fine-grained-suggest.test index 816bd5787a9c..34bf0ff1ccf7 100644 --- a/test-data/unit/fine-grained-suggest.test +++ b/test-data/unit/fine-grained-suggest.test @@ -141,6 +141,7 @@ No guesses that match criteria! [file foo.py] def foo(): return 1, "1" +[builtins fixtures/tuple.pyi] [out] () -> Tuple[int, str] == @@ -152,6 +153,7 @@ from typing import NamedTuple N = NamedTuple('N', [('x', int)]) def foo(): return N(1) +[builtins fixtures/tuple.pyi] [out] () -> foo.N == @@ -200,6 +202,7 @@ class B: ... from foo import foo from baz import B foo(B()) +[builtins fixtures/tuple.pyi] [out] (baz.B) -> Tuple[foo.A, foo:A.C] diff --git a/test-data/unit/fine-grained.test b/test-data/unit/fine-grained.test index fb77f534b075..4e2309b3c5cf 100644 --- a/test-data/unit/fine-grained.test +++ b/test-data/unit/fine-grained.test @@ -1717,6 +1717,7 @@ N = NamedTuple('N', [('x', int)]) [file a.py] def f() -> None: pass [file a.py.2] +[builtins fixtures/tuple.pyi] [out] == main:2: error: Module 'a' has no attribute 'f' @@ -3302,6 +3303,7 @@ class C(N): x = 0 [file m.py.2] x = '' +[builtins fixtures/tuple.pyi] [out] == @@ -3410,6 +3412,7 @@ a: A def g() -> None: x = L(A()) x.f(a) +[builtins fixtures/tuple.pyi] [out] == @@ -3477,6 +3480,7 @@ import a def f(x: a.N) -> None: pass f(a.x) +[builtins fixtures/tuple.pyi] [out] == @@ -3495,6 +3499,7 @@ import a def f(x: a.N) -> None: pass f(a.x) +[builtins fixtures/tuple.pyi] [out] == @@ -3523,6 +3528,7 @@ def f(x: b.M) -> None: lol(x) f(b.x) lol(b.x) +[builtins fixtures/tuple.pyi] [out] == c.py:7: error: Argument 1 to "lol" has incompatible type "M"; expected "Tuple[Tuple[int]]" @@ -3545,6 +3551,7 @@ import a def f(x: a.N) -> None: pass f(a.x) +[builtins fixtures/tuple.pyi] [out] == @@ -4364,6 +4371,7 @@ def f() -> None: x = 0 [file b.py.2] x = '' +[builtins fixtures/tuple.pyi] [out] == @@ -4384,9 +4392,9 @@ x = 0 x = '' [builtins fixtures/tuple.pyi] [out] -b.py:5: error: Incompatible types in assignment (expression has type "int", base class "tuple" defined the type as "Callable[[Tuple[int, ...], Any], int]") +b.py:5: error: Incompatible types in assignment (expression has type "int", base class "tuple" defined the type as "Callable[[Tuple[int, ...], object], int]") == -b.py:5: error: Incompatible types in assignment (expression has type "int", base class "tuple" defined the type as "Callable[[Tuple[int, ...], Any], int]") +b.py:5: error: Incompatible types in assignment (expression has type "int", base class "tuple" defined the type as "Callable[[Tuple[int, ...], object], int]") [case testReprocessEllipses1] import a @@ -4440,6 +4448,7 @@ def unchecked(): def inner(): # type: () -> (str, int) return 'lol', 10 +[builtins fixtures/tuple.pyi] [out] == @@ -4454,6 +4463,7 @@ def inner(): def inner(): # type: () -> (str, int) return 'lol', 10 +[builtins fixtures/tuple.pyi] [out] a.py:1: error: Syntax error in type annotation a.py:1: note: Suggestion: Use Tuple[T1, ..., Tn] instead of (T1, ..., Tn) @@ -4468,6 +4478,7 @@ import a [file a.py.2] # dummy change (x, y) = 1, 'hi' # type: (int, str) +[builtins fixtures/tuple.pyi] [out] == @@ -5814,6 +5825,7 @@ def f() -> Tuple[int, object]: pass [file b.py.3] from typing import Tuple def f() -> Tuple[str, int]: pass +[builtins fixtures/tuple.pyi] [out] == a.py:10: error: Argument 1 to "h" has incompatible type "int"; expected "str" @@ -8002,6 +8014,7 @@ def deco(func: F) -> F: # type: ignore def test(x: int, y: int) -> str: pass x = 1 +[builtins fixtures/tuple.pyi] [out] == @@ -8406,6 +8419,7 @@ NT = NamedTuple('NT', [('x', B)]) [file b.py.2] def func(x): pass B = func +[builtins fixtures/tuple.pyi] [out] == main:5: error: Variable "b.B" is not valid as a type @@ -8422,6 +8436,7 @@ A = B [file b.py.2] def func(x): pass B = func +[builtins fixtures/tuple.pyi] [out] == main:5: error: Variable "a.A" is not valid as a type @@ -8448,6 +8463,7 @@ A = B [file b.py.2] def func(x): pass B = func +[builtins fixtures/tuple.pyi] [out] == m.py:4: error: Variable "a.A" is not valid as a type @@ -8496,6 +8512,7 @@ x: Literal[1] = 1 [file mod.py.3] from typing_extensions import Literal x: Literal[1] = 2 +[builtins fixtures/tuple.pyi] [out] main:2: note: Revealed type is 'builtins.int' == @@ -8515,6 +8532,7 @@ def foo(x: Literal[3]) -> None: pass [file mod.py.3] from typing_extensions import Literal def foo(x: Literal[4]) -> None: pass +[builtins fixtures/tuple.pyi] [out] == == @@ -8531,6 +8549,7 @@ Alias = Literal[1] [file mod.py.3] from typing_extensions import Literal Alias = Literal[2] +[builtins fixtures/tuple.pyi] [out] == == @@ -8557,6 +8576,7 @@ def foo(x: int) -> str: ... @overload def foo(x: Literal['bar']) -> int: ... def foo(x): pass +[builtins fixtures/tuple.pyi] [out] main:2: note: Revealed type is 'builtins.str' == @@ -8578,6 +8598,7 @@ qux: Literal[3] [file mod3.py.2] from typing_extensions import Literal qux: Literal[4] +[builtins fixtures/tuple.pyi] [out] == main:4: error: Argument 1 to "expect_3" has incompatible type "Literal[4]"; expected "Literal[3]" @@ -8600,6 +8621,7 @@ Alias3 = Literal[3] [file mod3.py.2] from typing_extensions import Literal Alias3 = Literal[4] +[builtins fixtures/tuple.pyi] [out] == main:5: error: Argument 1 to "expect_3" has incompatible type "Literal[4]"; expected "Literal[3]" @@ -8620,6 +8642,7 @@ def func3() -> Literal[3]: pass [file mod3.py.2] from typing_extensions import Literal def func3() -> Literal[4]: pass +[builtins fixtures/tuple.pyi] [out] == main:4: error: Argument 1 to "expect_3" has incompatible type "Literal[4]"; expected "Literal[3]" @@ -8638,6 +8661,7 @@ bar = 3 [file mod2.py.2] from typing_extensions import Literal bar: Literal[3] = 3 +[builtins fixtures/tuple.pyi] [out] main:2: note: Revealed type is 'builtins.int*' == @@ -8663,6 +8687,7 @@ qux: Final = 4 [file mod3.py.3] from typing_extensions import Final qux: Final[int] = 4 +[builtins fixtures/tuple.pyi] [out] == main:4: error: Argument 1 to "expect_3" has incompatible type "Literal[4]"; expected "Literal[3]" @@ -8684,6 +8709,7 @@ def bar() -> Literal[u"foo"]: pass [file mod2.py.3] from typing_extensions import Literal def bar() -> Literal[b"foo"]: pass +[builtins fixtures/tuple.pyi] [out] main:2: note: Revealed type is 'Literal['foo']' == @@ -8765,6 +8791,7 @@ def f(x: Union[int, str]) -> None: ... [targets2 c, b] [targets3 a] +[builtins fixtures/tuple.pyi] [out] == == @@ -8830,6 +8857,7 @@ B().x [targets2 c, b] [targets3 a] +[builtins fixtures/tuple.pyi] [out] == == @@ -8868,6 +8896,7 @@ B().x [targets2 c, b] [targets3 a] +[builtins fixtures/tuple.pyi] [out] == == diff --git a/test-data/unit/fixtures/tuple.pyi b/test-data/unit/fixtures/tuple.pyi index 686e2dd55818..a101595c6f30 100644 --- a/test-data/unit/fixtures/tuple.pyi +++ b/test-data/unit/fixtures/tuple.pyi @@ -5,27 +5,29 @@ from typing import Iterable, Iterator, TypeVar, Generic, Sequence, Any, overload Tco = TypeVar('Tco', covariant=True) class object: - def __init__(self): pass + def __init__(self) -> None: pass class type: - def __init__(self, *a) -> None: pass - def __call__(self, *a) -> object: pass + def __init__(self, *a: object) -> None: pass + def __call__(self, *a: object) -> object: pass class tuple(Sequence[Tco], Generic[Tco]): def __iter__(self) -> Iterator[Tco]: pass def __contains__(self, item: object) -> bool: pass def __getitem__(self, x: int) -> Tco: pass - def __rmul__(self, n: int) -> tuple: pass + def __rmul__(self, n: int) -> Tuple[Tco, ...]: pass def __add__(self, x: Tuple[Tco, ...]) -> Tuple[Tco, ...]: pass - def count(self, obj: Any) -> int: pass + def count(self, obj: object) -> int: pass class function: pass class ellipsis: pass # We need int and slice for indexing tuples. class int: def __neg__(self) -> 'int': pass +class float: pass class slice: pass -class bool: pass +class bool(int): pass class str: pass # For convenience +class bytes: pass class unicode: pass T = TypeVar('T') @@ -35,6 +37,8 @@ class list(Sequence[T], Generic[T]): def __getitem__(self, i: int) -> T: ... @overload def __getitem__(self, s: slice) -> list[T]: ... + def __contains__(self, item: object) -> bool: ... + def __iter__(self) -> Iterator[T]: ... def isinstance(x: object, t: type) -> bool: pass diff --git a/test-data/unit/lib-stub/builtins.pyi b/test-data/unit/lib-stub/builtins.pyi index de1266e3a284..7ba4002ed4ac 100644 --- a/test-data/unit/lib-stub/builtins.pyi +++ b/test-data/unit/lib-stub/builtins.pyi @@ -2,9 +2,6 @@ # # Use [builtins fixtures/...pyi] if you need more features. -from typing import Generic, TypeVar -_T = TypeVar('_T') - class object: def __init__(self) -> None: pass @@ -19,7 +16,6 @@ class float: pass class str: pass class bytes: pass -class tuple(Generic[_T]): pass class function: pass class ellipsis: pass diff --git a/test-data/unit/merge.test b/test-data/unit/merge.test index df621cccbe81..407262a99262 100644 --- a/test-data/unit/merge.test +++ b/test-data/unit/merge.test @@ -656,6 +656,7 @@ N = NamedTuple('N', [('x', A)]) from typing import NamedTuple class A: pass N = NamedTuple('N', [('x', A), ('y', A)]) +[builtins fixtures/tuple.pyi] [out] TypeInfo<0>( Name(target.A) @@ -665,20 +666,20 @@ TypeInfo<0>( TypeInfo<2>( Name(target.N) Bases(builtins.tuple[target.A<0>]<3>) - Mro(target.N<2>, builtins.tuple<3>, builtins.object<1>) + Mro(target.N<2>, builtins.tuple<3>, typing.Sequence<4>, typing.Iterable<5>, builtins.object<1>) Names( - _NT<4> - __annotations__<5> (builtins.object<1>) - __doc__<6> (builtins.str<7>) - __new__<8> - _asdict<9> - _field_defaults<10> (builtins.object<1>) - _field_types<11> (builtins.object<1>) - _fields<12> (Tuple[builtins.str<7>]) - _make<13> - _replace<14> - _source<15> (builtins.str<7>) - x<16> (target.A<0>))) + _NT<6> + __annotations__<7> (builtins.object<1>) + __doc__<8> (builtins.str<9>) + __new__<10> + _asdict<11> + _field_defaults<12> (builtins.object<1>) + _field_types<13> (builtins.object<1>) + _fields<14> (Tuple[builtins.str<9>]) + _make<15> + _replace<16> + _source<17> (builtins.str<9>) + x<18> (target.A<0>))) ==> TypeInfo<0>( Name(target.A) @@ -688,21 +689,21 @@ TypeInfo<0>( TypeInfo<2>( Name(target.N) Bases(builtins.tuple[target.A<0>]<3>) - Mro(target.N<2>, builtins.tuple<3>, builtins.object<1>) + Mro(target.N<2>, builtins.tuple<3>, typing.Sequence<4>, typing.Iterable<5>, builtins.object<1>) Names( - _NT<4> - __annotations__<5> (builtins.object<1>) - __doc__<6> (builtins.str<7>) - __new__<8> - _asdict<9> - _field_defaults<10> (builtins.object<1>) - _field_types<11> (builtins.object<1>) - _fields<12> (Tuple[builtins.str<7>, builtins.str<7>]) - _make<13> - _replace<14> - _source<15> (builtins.str<7>) - x<16> (target.A<0>) - y<17> (target.A<0>))) + _NT<6> + __annotations__<7> (builtins.object<1>) + __doc__<8> (builtins.str<9>) + __new__<10> + _asdict<11> + _field_defaults<12> (builtins.object<1>) + _field_types<13> (builtins.object<1>) + _fields<14> (Tuple[builtins.str<9>, builtins.str<9>]) + _make<15> + _replace<16> + _source<17> (builtins.str<9>) + x<18> (target.A<0>) + y<19> (target.A<0>))) [case testUnionType_types] import target @@ -1093,6 +1094,7 @@ N = NamedTuple('N', [('x', int)]) [file target.py] f = 1 [file target.py.next] +[builtins fixtures/tuple.pyi] [out] __main__: N: TypeInfo<0> @@ -1454,6 +1456,7 @@ bar: Literal[4] = 4 from typing_extensions import Literal def foo(x: Literal['3']) -> Literal['b']: pass bar: Literal[5] = 5 +[builtins fixtures/tuple.pyi] [out] MypyFile:1<0>( tmp/main diff --git a/test-data/unit/semanal-classvar.test b/test-data/unit/semanal-classvar.test index d39ee221efa2..8add559bdd27 100644 --- a/test-data/unit/semanal-classvar.test +++ b/test-data/unit/semanal-classvar.test @@ -132,6 +132,7 @@ main:2: error: Invalid type: ClassVar nested inside other type [case testTupleClassVar] from typing import ClassVar, Tuple x = None # type: Tuple[ClassVar, int] +[builtins fixtures/tuple.pyi] [out] main:2: error: Invalid type: ClassVar nested inside other type diff --git a/test-data/unit/semanal-errors.test b/test-data/unit/semanal-errors.test index 042b39658df0..144218df6f58 100644 --- a/test-data/unit/semanal-errors.test +++ b/test-data/unit/semanal-errors.test @@ -116,6 +116,7 @@ def f() -> A[B[int]]: pass # E: "B" expects no type arguments, but 1 given from typing import Tuple class A: pass x = None # type: Tuple[A[int]] # E: "A" expects no type arguments, but 1 given +[builtins fixtures/tuple.pyi] [out] [case testInvalidNumberOfGenericArgsInFunctionType] @@ -908,26 +909,32 @@ A[1] # E: Invalid type: try using Literal[1] instead? [case testVariableDeclWithInvalidNumberOfTypes] x, y = 1, 2 # type: int, str, int # E: Incompatible number of tuple items +[builtins fixtures/tuple.pyi] [out] [case testVariableDeclWithInvalidNumberOfTypesNested] x, (y, z) = 1, (2, 3) # type: int, (str, int, int) # E: Incompatible number of tuple items +[builtins fixtures/tuple.pyi] [out] [case testVariableDeclWithInvalidNumberOfTypesNested2] x, (y, z) = 1, (2, 3) # type: int, (str, ) # E: Incompatible number of tuple items +[builtins fixtures/tuple.pyi] [out] [case testVariableDeclWithInvalidNumberOfTypesNested3] x, (y, z) = 1, (2, 3) # type: int, str # E: Tuple type expected for multiple variables +[builtins fixtures/tuple.pyi] [out] [case testVariableDeclWithInvalidNumberOfTypesNested4] x, (y, z) = 1, (2, 3) # type: int, str, int # E: Incompatible number of tuple items +[builtins fixtures/tuple.pyi] [out] [case testVariableDeclWithInvalidNumberOfTypesNested5] x, (y, ) = 1, (2, ) # type: int, str # E: Tuple type expected for multiple variables +[builtins fixtures/tuple.pyi] [out] [case testVariableDeclWithInvalidType] @@ -952,6 +959,7 @@ a.y, a.x = None, None # type: int, int \ # E: Type cannot be declared in assignment to non-self attribute a[1], a[2] = None, None # type: int, int \ # E: Unexpected type declaration +[builtins fixtures/tuple.pyi] [out] [case testMissingGenericImport] @@ -1318,6 +1326,7 @@ from typing import Tuple a = 1 # type: int a = 's' # type: str a = ('spam', 'spam', 'eggs', 'spam') # type: Tuple[str] +[builtins fixtures/tuple.pyi] [out] main:3: error: Name 'a' already defined on line 2 @@ -1367,6 +1376,7 @@ N = NamedTuple('N', [('a', int), class N: # E: Name 'N' already defined on line 2 pass +[builtins fixtures/tuple.pyi] [out] [case testDuplicateDefTypedDict] diff --git a/test-data/unit/semanal-namedtuple.test b/test-data/unit/semanal-namedtuple.test index 2ca12a21e6a8..b352e2d5fc6f 100644 --- a/test-data/unit/semanal-namedtuple.test +++ b/test-data/unit/semanal-namedtuple.test @@ -4,6 +4,7 @@ from collections import namedtuple N = namedtuple('N', ['a']) def f() -> N: pass +[builtins fixtures/tuple.pyi] [out] MypyFile:1( ImportFrom:1(collections, [namedtuple]) @@ -20,6 +21,7 @@ MypyFile:1( from collections import namedtuple N = namedtuple('N', ['a', 'xyz']) def f() -> N: pass +[builtins fixtures/tuple.pyi] [out] MypyFile:1( ImportFrom:1(collections, [namedtuple]) @@ -36,6 +38,7 @@ MypyFile:1( from collections import namedtuple N = namedtuple('N', ('a', 'xyz')) def f() -> N: pass +[builtins fixtures/tuple.pyi] [out] MypyFile:1( ImportFrom:1(collections, [namedtuple]) @@ -52,6 +55,7 @@ MypyFile:1( from collections import namedtuple N = namedtuple('N', ' a xyz ') def f() -> N: pass +[builtins fixtures/tuple.pyi] [out] MypyFile:1( ImportFrom:1(collections, [namedtuple]) @@ -68,6 +72,7 @@ MypyFile:1( from typing import NamedTuple N = NamedTuple('N', [('a', int), ('b', str)]) +[builtins fixtures/tuple.pyi] [out] MypyFile:1( ImportFrom:1(typing, [NamedTuple]) @@ -79,6 +84,7 @@ MypyFile:1( from typing import NamedTuple N = NamedTuple('N', (('a', int), ('b', str))) +[builtins fixtures/tuple.pyi] [out] MypyFile:1( ImportFrom:1(typing, [NamedTuple]) @@ -90,6 +96,7 @@ MypyFile:1( from collections import namedtuple N = namedtuple('N', ['x']) class A(N): pass +[builtins fixtures/tuple.pyi] [out] MypyFile:1( ImportFrom:1(collections, [namedtuple]) @@ -107,6 +114,7 @@ MypyFile:1( [case testNamedTupleBaseClass2] from collections import namedtuple class A(namedtuple('N', ['x'])): pass +[builtins fixtures/tuple.pyi] [out] MypyFile:1( ImportFrom:1(collections, [namedtuple]) @@ -121,6 +129,7 @@ MypyFile:1( [case testNamedTupleBaseClassWithItemTypes] from typing import NamedTuple class A(NamedTuple('N', [('x', int)])): pass +[builtins fixtures/tuple.pyi] [out] MypyFile:1( ImportFrom:1(typing, [NamedTuple]) @@ -137,33 +146,40 @@ MypyFile:1( [case testNamedTupleWithTooFewArguments] from collections import namedtuple N = namedtuple('N') # E: Too few arguments for namedtuple() +[builtins fixtures/tuple.pyi] [case testNamedTupleWithInvalidName] from collections import namedtuple N = namedtuple(1, ['x']) # E: namedtuple() expects a string literal as the first argument +[builtins fixtures/tuple.pyi] [case testNamedTupleWithInvalidItems] from collections import namedtuple N = namedtuple('N', 1) # E: List or tuple literal expected as the second argument to namedtuple() +[builtins fixtures/tuple.pyi] [case testNamedTupleWithInvalidItems2] from collections import namedtuple N = namedtuple('N', ['x', 1]) # E: String literal expected as namedtuple() item +[builtins fixtures/tuple.pyi] [case testNamedTupleWithUnderscoreItemName] from collections import namedtuple N = namedtuple('N', ['_fallback']) # E: namedtuple() field names cannot start with an underscore: _fallback +[builtins fixtures/tuple.pyi] -- NOTE: The following code works at runtime but is not yet supported by mypy. -- Keyword arguments may potentially be supported in the future. [case testNamedTupleWithNonpositionalArgs] from collections import namedtuple N = namedtuple(typename='N', field_names=['x']) # E: Unexpected arguments to namedtuple() +[builtins fixtures/tuple.pyi] [case testInvalidNamedTupleBaseClass] from typing import NamedTuple class A(NamedTuple('N', [1])): pass # E: Tuple expected as NamedTuple() field class B(A): pass +[builtins fixtures/tuple.pyi] [case testInvalidNamedTupleBaseClass2] diff --git a/test-data/unit/semanal-typealiases.test b/test-data/unit/semanal-typealiases.test index c72cb9d08842..7230580c40a6 100644 --- a/test-data/unit/semanal-typealiases.test +++ b/test-data/unit/semanal-typealiases.test @@ -171,6 +171,7 @@ MypyFile:1( from typing import Tuple T = Tuple[int, str] def f(x: T) -> None: pass +[builtins fixtures/tuple.pyi] [out] MypyFile:1( ImportFrom:1(typing, [Tuple]) @@ -430,6 +431,7 @@ MypyFile:1( from typing import Union, Tuple, Any A = Union['int', Tuple[int, Any]] a = 1 # type: A +[builtins fixtures/tuple.pyi] [out] MypyFile:1( ImportFrom:1(typing, [Union, Tuple, Any]) diff --git a/test-data/unit/semanal-types.test b/test-data/unit/semanal-types.test index 359eb292746c..64b2110db4d6 100644 --- a/test-data/unit/semanal-types.test +++ b/test-data/unit/semanal-types.test @@ -147,6 +147,7 @@ class A: pass class B: pass a, b = None, None # type: (A, B) x = a, b +[builtins fixtures/tuple.pyi] [out] MypyFile:1( ClassDef:2( @@ -358,6 +359,7 @@ MypyFile:1( [case testCastToTupleType] from typing import Tuple, cast cast(Tuple[int, str], None) +[builtins fixtures/tuple.pyi] [out] MypyFile:1( ImportFrom:1(typing, [Tuple, cast]) @@ -464,6 +466,7 @@ from typing import TypeVar, Tuple, Generic t = TypeVar('t') class A(Generic[t]): pass def f(x: Tuple[int, t]) -> None: pass +[builtins fixtures/tuple.pyi] [out] MypyFile:1( ImportFrom:1(typing, [TypeVar, Tuple, Generic]) @@ -701,6 +704,7 @@ MypyFile:1( [case testInvalidTupleType] from typing import Tuple t = None # type: Tuple[int, str, ...] # E: Unexpected '...' +[builtins fixtures/tuple.pyi] [out] [case testFunctionTypes] diff --git a/test-data/unit/typexport-basic.test b/test-data/unit/typexport-basic.test index be446e2c80e2..deb43f6d316f 100644 --- a/test-data/unit/typexport-basic.test +++ b/test-data/unit/typexport-basic.test @@ -906,6 +906,7 @@ class A: def f(self, *args) -> None: pass A.f +[builtins fixtures/tuple.pyi] [out] MemberExpr(10) : Overload(def (self: A), def (self: A, builtins.object)) @@ -920,6 +921,7 @@ class A: def f(self, *args): pass A.f +[builtins fixtures/tuple.pyi] [out] MemberExpr(10) : Overload(def (self: A) -> Any, def (self: A, Any) -> Any) @@ -974,6 +976,7 @@ class A(Generic[t]): ab, b = None, None # type: (A[B], B) A.f(ab, b) +[builtins fixtures/tuple.pyi] [out] CallExpr(13) : B From 55fcb524ad9a1fccdc70941274fbeb9b4d379b5a Mon Sep 17 00:00:00 2001 From: "Michael J. Sullivan" Date: Fri, 10 Jan 2020 03:11:18 -0800 Subject: [PATCH 043/117] Make format_type_distinctly n-ary instead of binary (#8270) --- mypy/messages.py | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/mypy/messages.py b/mypy/messages.py index 3f5cbca99ebd..a07616f22e2f 100644 --- a/mypy/messages.py +++ b/mypy/messages.py @@ -1736,8 +1736,8 @@ def format_type_bare(typ: Type, return format_type_inner(typ, verbosity, find_type_overlaps(typ)) -def format_type_distinctly(type1: Type, type2: Type, bare: bool = False) -> Tuple[str, str]: - """Jointly format a pair of types to distinct strings. +def format_type_distinctly(*types: Type, bare: bool = False) -> Tuple[str, ...]: + """Jointly format types to distinct strings. Increase the verbosity of the type strings until they become distinct while also requiring that distinct types with the same short name are @@ -1748,16 +1748,18 @@ def format_type_distinctly(type1: Type, type2: Type, bare: bool = False) -> Tupl be quoted; callers who need to do post-processing of the strings before quoting them (such as prepending * or **) should use this. """ - overlapping = find_type_overlaps(type1, type2) + overlapping = find_type_overlaps(*types) for verbosity in range(2): - str1 = format_type_inner(type1, verbosity=verbosity, fullnames=overlapping) - str2 = format_type_inner(type2, verbosity=verbosity, fullnames=overlapping) - if str1 != str2: + strs = [ + format_type_inner(type, verbosity=verbosity, fullnames=overlapping) + for type in types + ] + if len(set(strs)) == len(strs): break if bare: - return (str1, str2) + return tuple(strs) else: - return (quote_type_string(str1), quote_type_string(str2)) + return tuple(quote_type_string(s) for s in strs) def pretty_callable(tp: CallableType) -> str: From 974a58d3cb3029044b481f9e3154fa3a185cf0f3 Mon Sep 17 00:00:00 2001 From: PattenR Date: Tue, 14 Jan 2020 13:00:54 +0000 Subject: [PATCH 044/117] Typeddict context returned correctly when unambiguous (#8212) Fixes #8156 This is a suggestion as to how we could handle getting the Typeddict context from a Union with more than one Typeddict. The idea is that we check each of the Typeddict types to find one who's keys match that of the expression, if more than one match it's then ambiguous. The slightly weird thing is that this matching is only applied in the Union case, if there is only a single Typeddict it's returned as before. Co-authored-by: rhys-carbon <47317532+rhys-carbon@users.noreply.github.com> --- mypy/checkexpr.py | 47 ++++++++++++++++++++++------- mypy/messages.py | 8 +++++ test-data/unit/check-typeddict.test | 21 +++++++++++++ 3 files changed, 65 insertions(+), 11 deletions(-) diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index 9a0815060bca..310aac82b13c 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -487,9 +487,8 @@ def check_typeddict_call(self, callee: TypedDictType, self.chk.fail(message_registry.INVALID_TYPEDDICT_ARGS, context) return AnyType(TypeOfAny.from_error) - def check_typeddict_call_with_dict(self, callee: TypedDictType, - kwargs: DictExpr, - context: Context) -> Type: + def validate_typeddict_kwargs( + self, kwargs: DictExpr) -> 'Optional[OrderedDict[str, Expression]]': item_args = [item[1] for item in kwargs.items] item_names = [] # List[str] @@ -504,12 +503,32 @@ def check_typeddict_call_with_dict(self, callee: TypedDictType, key_context = item_name_expr or item_arg self.chk.fail(message_registry.TYPEDDICT_KEY_MUST_BE_STRING_LITERAL, key_context) - return AnyType(TypeOfAny.from_error) + return None else: item_names.append(literal_value) + return OrderedDict(zip(item_names, item_args)) + + def match_typeddict_call_with_dict(self, callee: TypedDictType, + kwargs: DictExpr, + context: Context) -> bool: + validated_kwargs = self.validate_typeddict_kwargs(kwargs=kwargs) + if validated_kwargs is not None: + return (callee.required_keys <= set(validated_kwargs.keys()) + <= set(callee.items.keys())) + else: + return False - return self.check_typeddict_call_with_kwargs( - callee, OrderedDict(zip(item_names, item_args)), context) + def check_typeddict_call_with_dict(self, callee: TypedDictType, + kwargs: DictExpr, + context: Context) -> Type: + validated_kwargs = self.validate_typeddict_kwargs(kwargs=kwargs) + if validated_kwargs is not None: + return self.check_typeddict_call_with_kwargs( + callee, + kwargs=validated_kwargs, + context=context) + else: + return AnyType(TypeOfAny.from_error) def check_typeddict_call_with_kwargs(self, callee: TypedDictType, kwargs: 'OrderedDict[str, Expression]', @@ -3229,7 +3248,7 @@ def visit_dict_expr(self, e: DictExpr) -> Type: # an error, but returns the TypedDict type that matches the literal it found # that would cause a second error when that TypedDict type is returned upstream # to avoid the second error, we always return TypedDict type that was requested - typeddict_context = self.find_typeddict_context(self.type_context[-1]) + typeddict_context = self.find_typeddict_context(self.type_context[-1], e) if typeddict_context: self.check_typeddict_call_with_dict( callee=typeddict_context, @@ -3295,19 +3314,25 @@ def visit_dict_expr(self, e: DictExpr) -> Type: assert rv is not None return rv - def find_typeddict_context(self, context: Optional[Type]) -> Optional[TypedDictType]: + def find_typeddict_context(self, context: Optional[Type], + dict_expr: DictExpr) -> Optional[TypedDictType]: context = get_proper_type(context) if isinstance(context, TypedDictType): return context elif isinstance(context, UnionType): items = [] for item in context.items: - item_context = self.find_typeddict_context(item) - if item_context: + item_context = self.find_typeddict_context(item, dict_expr) + if (item_context is not None + and self.match_typeddict_call_with_dict( + item_context, dict_expr, dict_expr)): items.append(item_context) if len(items) == 1: - # Only one union item is TypedDict, so use the context as it's unambiguous. + # Only one union item is valid TypedDict for the given dict_expr, so use the + # context as it's unambiguous. return items[0] + if len(items) > 1: + self.msg.typeddict_context_ambiguous(items, dict_expr) # No TypedDict type in context. return None diff --git a/mypy/messages.py b/mypy/messages.py index a07616f22e2f..011527b00fd7 100644 --- a/mypy/messages.py +++ b/mypy/messages.py @@ -1128,6 +1128,14 @@ def typeddict_key_not_found( else: self.fail("TypedDict {} has no key '{}'".format(format_type(typ), item_name), context) + def typeddict_context_ambiguous( + self, + types: List[TypedDictType], + context: Context) -> None: + formatted_types = ', '.join(list(format_type_distinctly(*types))) + self.fail('Type of TypedDict is ambiguous, could be any of ({})'.format( + formatted_types), context) + def typeddict_key_cannot_be_deleted( self, typ: TypedDictType, diff --git a/test-data/unit/check-typeddict.test b/test-data/unit/check-typeddict.test index 0a6b57d5cafa..3e6d6537b604 100644 --- a/test-data/unit/check-typeddict.test +++ b/test-data/unit/check-typeddict.test @@ -822,6 +822,27 @@ reveal_type(u(c, m_i_i)) # N: Revealed type is 'Union[typing.Mapping*[builtins.i reveal_type(u(c, m_s_a)) # N: Revealed type is 'Union[typing.Mapping*[builtins.str, Any], TypedDict('__main__.C', {'a': builtins.int, 'b': builtins.int})]' [builtins fixtures/dict.pyi] +[case testTypedDictUnionUnambiguousCase] +from typing import Union, Mapping, Any, cast +from typing_extensions import TypedDict, Literal + +A = TypedDict('A', {'@type': Literal['a-type'], 'a': str}) +B = TypedDict('B', {'@type': Literal['b-type'], 'b': int}) + +c: Union[A, B] = {'@type': 'a-type', 'a': 'Test'} +reveal_type(c) # N: Revealed type is 'Union[TypedDict('__main__.A', {'@type': Literal['a-type'], 'a': builtins.str}), TypedDict('__main__.B', {'@type': Literal['b-type'], 'b': builtins.int})]' +[builtins fixtures/tuple.pyi] + +[case testTypedDictUnionAmbiguousCase] +from typing import Union, Mapping, Any, cast +from typing_extensions import TypedDict, Literal + +A = TypedDict('A', {'@type': Literal['a-type'], 'a': str}) +B = TypedDict('B', {'@type': Literal['a-type'], 'a': str}) + +c: Union[A, B] = {'@type': 'a-type', 'a': 'Test'} # E: Type of TypedDict is ambiguous, could be any of ("A", "B") \ + # E: Incompatible types in assignment (expression has type "Dict[str, str]", variable has type "Union[A, B]") +[builtins fixtures/dict.pyi] -- Use dict literals From 07a20a6a5a0a76e9d0f53d1909eb630b4312582e Mon Sep 17 00:00:00 2001 From: "Michael J. Sullivan" Date: Thu, 16 Jan 2020 03:22:18 -0800 Subject: [PATCH 045/117] Fix some crashes in dataclasses (#8271) Store the type of an attribute in `DataclassAttribute` so we don't need to pull it out of `TypeInfo` in a fragile way (since the child might try to override it in a way that breaks things). This also allows us to get rid of some pretty dodgy code having to do with InitVars (that could cause an incremental mode crash.) Fixes #6809 and an unreported incremental bug. --- mypy/plugins/common.py | 19 +++++-- mypy/plugins/dataclasses.py | 72 ++++++++++--------------- test-data/unit/check-attr.test | 15 ++++++ test-data/unit/check-dataclasses.test | 75 +++++++++++++++++++++++++++ 4 files changed, 132 insertions(+), 49 deletions(-) diff --git a/mypy/plugins/common.py b/mypy/plugins/common.py index 6f2f5845cbeb..66105f96a3eb 100644 --- a/mypy/plugins/common.py +++ b/mypy/plugins/common.py @@ -1,15 +1,18 @@ -from typing import List, Optional +from typing import List, Optional, Union from mypy.nodes import ( ARG_POS, MDEF, Argument, Block, CallExpr, Expression, SYMBOL_FUNCBASE_TYPES, - FuncDef, PassStmt, RefExpr, SymbolTableNode, Var + FuncDef, PassStmt, RefExpr, SymbolTableNode, Var, JsonDict, ) -from mypy.plugin import ClassDefContext +from mypy.plugin import ClassDefContext, SemanticAnalyzerPluginInterface from mypy.semanal import set_callable_name -from mypy.types import CallableType, Overloaded, Type, TypeVarDef, get_proper_type +from mypy.types import ( + CallableType, Overloaded, Type, TypeVarDef, deserialize_type, get_proper_type, +) from mypy.typevars import fill_typevars from mypy.util import get_unique_redefinition_name from mypy.typeops import try_getting_str_literals # noqa: F401 # Part of public API +from mypy.fixup import TypeFixer def _get_decorator_bool_argument( @@ -128,3 +131,11 @@ def add_method( info.names[name] = SymbolTableNode(MDEF, func, plugin_generated=True) info.defn.defs.body.append(func) + + +def deserialize_and_fixup_type( + data: Union[str, JsonDict], api: SemanticAnalyzerPluginInterface +) -> Type: + typ = deserialize_type(data) + typ.accept(TypeFixer(api.modules, allow_missing=False)) + return typ diff --git a/mypy/plugins/dataclasses.py b/mypy/plugins/dataclasses.py index 86b24f0a58fb..81f50d1a4c4d 100644 --- a/mypy/plugins/dataclasses.py +++ b/mypy/plugins/dataclasses.py @@ -5,12 +5,14 @@ from mypy.nodes import ( ARG_OPT, ARG_POS, MDEF, Argument, AssignmentStmt, CallExpr, - Context, Expression, FuncDef, JsonDict, NameExpr, RefExpr, + Context, Expression, JsonDict, NameExpr, RefExpr, SymbolTableNode, TempNode, TypeInfo, Var, TypeVarExpr, PlaceholderNode ) -from mypy.plugin import ClassDefContext -from mypy.plugins.common import add_method, _get_decorator_bool_argument -from mypy.types import Instance, NoneType, TypeVarDef, TypeVarType, get_proper_type +from mypy.plugin import ClassDefContext, SemanticAnalyzerPluginInterface +from mypy.plugins.common import ( + add_method, _get_decorator_bool_argument, deserialize_and_fixup_type, +) +from mypy.types import Type, Instance, NoneType, TypeVarDef, TypeVarType, get_proper_type from mypy.server.trigger import make_wildcard_trigger # The set of decorators that generate dataclasses. @@ -31,6 +33,7 @@ def __init__( has_default: bool, line: int, column: int, + type: Optional[Type], ) -> None: self.name = name self.is_in_init = is_in_init @@ -38,19 +41,21 @@ def __init__( self.has_default = has_default self.line = line self.column = column + self.type = type - def to_argument(self, info: TypeInfo) -> Argument: + def to_argument(self) -> Argument: return Argument( - variable=self.to_var(info), - type_annotation=info[self.name].type, + variable=self.to_var(), + type_annotation=self.type, initializer=None, kind=ARG_OPT if self.has_default else ARG_POS, ) - def to_var(self, info: TypeInfo) -> Var: - return Var(self.name, info[self.name].type) + def to_var(self) -> Var: + return Var(self.name, self.type) def serialize(self) -> JsonDict: + assert self.type return { 'name': self.name, 'is_in_init': self.is_in_init, @@ -58,11 +63,16 @@ def serialize(self) -> JsonDict: 'has_default': self.has_default, 'line': self.line, 'column': self.column, + 'type': self.type.serialize(), } @classmethod - def deserialize(cls, info: TypeInfo, data: JsonDict) -> 'DataclassAttribute': - return cls(**data) + def deserialize( + cls, info: TypeInfo, data: JsonDict, api: SemanticAnalyzerPluginInterface + ) -> 'DataclassAttribute': + data = data.copy() + typ = deserialize_and_fixup_type(data.pop('type'), api) + return cls(type=typ, **data) class DataclassTransformer: @@ -81,12 +91,7 @@ def transform(self) -> None: # Some definitions are not ready, defer() should be already called. return for attr in attributes: - node = info.get(attr.name) - if node is None: - # Nodes of superclass InitVars not used in __init__ cannot be reached. - assert attr.is_init_var and not attr.is_in_init - continue - if node.type is None: + if attr.type is None: ctx.api.defer() return decorator_arguments = { @@ -106,7 +111,7 @@ def transform(self) -> None: add_method( ctx, '__init__', - args=[attr.to_argument(info) for attr in attributes if attr.is_in_init], + args=[attr.to_argument() for attr in attributes if attr.is_in_init], return_type=NoneType(), ) @@ -191,7 +196,7 @@ def reset_init_only_vars(self, info: TypeInfo, attributes: List[DataclassAttribu del info.names[attr.name] else: # Nodes of superclass InitVars not used in __init__ cannot be reached. - assert attr.is_init_var and not attr.is_in_init + assert attr.is_init_var for stmt in info.defn.defs.body: if isinstance(stmt, AssignmentStmt) and stmt.unanalyzed_type: lvalue = stmt.lvalues[0] @@ -282,6 +287,7 @@ def collect_attributes(self) -> Optional[List[DataclassAttribute]]: has_default=has_default, line=stmt.line, column=stmt.column, + type=sym.type, )) # Next, collect attributes belonging to any class in the MRO @@ -301,23 +307,7 @@ def collect_attributes(self) -> Optional[List[DataclassAttribute]]: for data in info.metadata['dataclass']['attributes']: name = data['name'] # type: str if name not in known_attrs: - attr = DataclassAttribute.deserialize(info, data) - if attr.is_init_var: - # InitVars are removed from classes so, in order for them to be inherited - # properly, we need to re-inject them into subclasses' sym tables here. - # To do that, we look 'em up from the parents' __init__. These variables - # are subsequently removed from the sym table at the end of - # DataclassTransformer.transform. - superclass_init = info.get_method('__init__') - if isinstance(superclass_init, FuncDef): - attr_node = _get_arg_from_init(superclass_init, attr.name) - if attr_node is None: - # Continue the loop: we will look it up in the next MRO entry. - # Don't add it to the known or super attrs because we don't know - # anything about it yet - continue - else: - cls.info.names[attr.name] = attr_node + attr = DataclassAttribute.deserialize(info, data, ctx.api) known_attrs.add(name) super_attrs.append(attr) elif all_attrs: @@ -365,21 +355,13 @@ def _freeze(self, attributes: List[DataclassAttribute]) -> None: assert isinstance(var, Var) var.is_property = True else: - var = attr.to_var(info) + var = attr.to_var() var.info = info var.is_property = True var._fullname = info.fullname + '.' + var.name info.names[var.name] = SymbolTableNode(MDEF, var) -def _get_arg_from_init(init_method: FuncDef, attr_name: str) -> Optional[SymbolTableNode]: - """Given an init method and an attribute name, find the Var in the init method's args.""" - for arg, arg_name in zip(init_method.arguments, init_method.arg_names): - if arg_name == attr_name: - return SymbolTableNode(MDEF, arg.variable) - return None - - def dataclass_class_maker_callback(ctx: ClassDefContext) -> None: """Hooks into the class typechecking process to add support for dataclasses. """ diff --git a/test-data/unit/check-attr.test b/test-data/unit/check-attr.test index 9748ab8942bc..fd6fc2b3755e 100644 --- a/test-data/unit/check-attr.test +++ b/test-data/unit/check-attr.test @@ -1244,3 +1244,18 @@ class A2: b: int = attr.ib() [builtins fixtures/list.pyi] + +[case testAttrsInheritanceNoAnnotation] +import attr + +@attr.s +class A: + foo = attr.ib() # type: int + +x = 0 +@attr.s +class B(A): + foo = x + +reveal_type(B) # N: Revealed type is 'def (foo: builtins.int) -> __main__.B' +[builtins fixtures/bool.pyi] diff --git a/test-data/unit/check-dataclasses.test b/test-data/unit/check-dataclasses.test index 36b62d53dbf1..97bb9954ec29 100644 --- a/test-data/unit/check-dataclasses.test +++ b/test-data/unit/check-dataclasses.test @@ -723,6 +723,51 @@ C(1, 'a') # E: Argument 2 to "C" has incompatible type "str"; expected "int" [builtins fixtures/primitives.pyi] +[case testDataclassesInitVarIncremental] +import a + +[file a.py] +import dataclasses +from b import A + +@dataclasses.dataclass +class B(A): + b: int = dataclasses.field(init=False) + + def __post_init__(self, a: int) -> None: + super().__post_init__(a) + self.b = a + 1 + +[file a.py.2] +import dataclasses +from b import A + +@dataclasses.dataclass +class B(A): + b: int = dataclasses.field(init=False) + + def __post_init__(self, a: int) -> None: + super().__post_init__(a) + self.b = a + 2 + +reveal_type(B) + +[file b.py] +import dataclasses + +@dataclasses.dataclass +class A: + a: dataclasses.InitVar[int] + _a: int = dataclasses.field(init=False) + + def __post_init__(self, a: int) -> None: + self._a = a +[out2] +tmp/a.py:12: note: Revealed type is 'def (a: builtins.int) -> a.B' + +[builtins fixtures/primitives.pyi] + + [case testNoComplainFieldNone] # flags: --python-version 3.6 # flags: --no-strict-optional @@ -957,3 +1002,33 @@ class A: # E: Name 'x' already defined (possibly by an import) @dataclass class B(A): pass + +[case testDataclassInheritanceNoAnnotation] +from dataclasses import dataclass + +@dataclass +class A: + foo: int + +x = 0 +@dataclass +class B(A): + foo = x + +reveal_type(B) # N: Revealed type is 'def (foo: builtins.int) -> __main__.B' + +[case testDataclassInheritanceNoAnnotation2] +from dataclasses import dataclass + +@dataclass(frozen=True) +class A: + foo: int + +@dataclass +class B(A): + @property + def foo(self) -> int: pass # E: Signature of "foo" incompatible with supertype "A" + +reveal_type(B) # N: Revealed type is 'def (foo: builtins.int) -> __main__.B' + +[builtins fixtures/property.pyi] From e9b7db7badaeec1b31dc650700bd743cca295dc0 Mon Sep 17 00:00:00 2001 From: Xuanda Yang Date: Thu, 16 Jan 2020 21:22:58 +0800 Subject: [PATCH 046/117] [mypyc] Implement str.split primitive (#8153) Work on mypyc/mypyc#644 --- mypyc/lib-rt/CPy.h | 10 ++++++++++ mypyc/ops_str.py | 17 ++++++++++++++++- mypyc/test-data/fixtures/ir.py | 1 + mypyc/test-data/run.test | 22 ++++++++++++++++++++++ 4 files changed, 49 insertions(+), 1 deletion(-) diff --git a/mypyc/lib-rt/CPy.h b/mypyc/lib-rt/CPy.h index fd0995149d21..8dcf6bd6f894 100644 --- a/mypyc/lib-rt/CPy.h +++ b/mypyc/lib-rt/CPy.h @@ -930,6 +930,16 @@ static PyObject *CPyDict_FromAny(PyObject *obj) { } } +static PyObject *CPyStr_Split(PyObject *str, PyObject *sep, CPyTagged max_split) +{ + Py_ssize_t temp_max_split = CPyTagged_AsSsize_t(max_split); + if (temp_max_split == -1 && PyErr_Occurred()) { + PyErr_SetString(PyExc_OverflowError, "Python int too large to convert to C ssize_t"); + return NULL; + } + return PyUnicode_Split(str, sep, temp_max_split); +} + static PyObject *CPyIter_Next(PyObject *iter) { return (*iter->ob_type->tp_iternext)(iter); diff --git a/mypyc/ops_str.py b/mypyc/ops_str.py index 887c3406780f..f3bedcc1ed13 100644 --- a/mypyc/ops_str.py +++ b/mypyc/ops_str.py @@ -1,7 +1,8 @@ from typing import List, Callable from mypyc.ops import ( - object_rprimitive, str_rprimitive, bool_rprimitive, ERR_MAGIC, ERR_NEVER, EmitterInterface + object_rprimitive, str_rprimitive, bool_rprimitive, ERR_MAGIC, ERR_NEVER, EmitterInterface, + RType, int_rprimitive, list_rprimitive, EmitCallback ) from mypyc.ops_primitive import func_op, binary_op, simple_emit, name_ref_op, method_op @@ -31,6 +32,20 @@ error_kind=ERR_MAGIC, emit=simple_emit('{dest} = PyUnicode_Join({args[0]}, {args[1]});')) + +str_split_types = [str_rprimitive, str_rprimitive, int_rprimitive] # type: List[RType] +str_split_emits = [simple_emit('{dest} = PyUnicode_Split({args[0]}, NULL, -1);'), + simple_emit('{dest} = PyUnicode_Split({args[0]}, {args[1]}, -1);'), + simple_emit('{dest} = CPyStr_Split({args[0]}, {args[1]}, {args[2]});')] \ + # type: List[EmitCallback] +for i in range(len(str_split_types)): + method_op( + name='split', + arg_types=str_split_types[0:i+1], + result_type=list_rprimitive, + error_kind=ERR_MAGIC, + emit=str_split_emits[i]) + # PyUnicodeAppend makes an effort to reuse the LHS when the refcount # is 1. This is super dodgy but oh well, the interpreter does it. binary_op(op='+=', diff --git a/mypyc/test-data/fixtures/ir.py b/mypyc/test-data/fixtures/ir.py index 323800429522..bcd9ca5ebb2d 100644 --- a/mypyc/test-data/fixtures/ir.py +++ b/mypyc/test-data/fixtures/ir.py @@ -54,6 +54,7 @@ def __le__(self, x: str) -> bool: ... def __gt__(self, x: str) -> bool: ... def __ge__(self, x: str) -> bool: ... def __contains__(self, item: str) -> bool: pass + def split(self, sep: Optional[str] = None, max: Optional[int] = None) -> List[str]: pass def strip (self, item: str) -> str: pass def join(self, x: Iterable[str]) -> str: pass def format(self, *args: Any, **kwargs: Any) -> str: ... diff --git a/mypyc/test-data/run.test b/mypyc/test-data/run.test index d867791d6bdf..ba2915d7696f 100644 --- a/mypyc/test-data/run.test +++ b/mypyc/test-data/run.test @@ -4701,3 +4701,25 @@ import b assert f(20) == 61 assert isinstance(whatever, b.A) + +[case testStrSplit] +from typing import List, Optional +def f(s: str, sep: Optional[str] = None, max_split: Optional[int] = None) -> List[str]: + if sep is not None: + if max_split is not None: + return s.split(sep, max_split) + else: + return s.split(sep) + return s.split() + +[file driver.py] +from native import f +s = "abc abcd abcde abcdef" + +assert f(s) == ["abc", "abcd", "abcde", "abcdef"] +assert f(s, " ") == ["abc", "abcd", "abcde", "abcdef"] +assert f(s, "-") == ["abc abcd abcde abcdef"] +assert f(s, " ", -1) == ["abc", "abcd", "abcde", "abcdef"] +assert f(s, " ", 0) == ["abc abcd abcde abcdef"] +assert f(s, " ", 1) == ["abc", "abcd abcde abcdef"] +assert f(s, " ", 2) == ["abc", "abcd", "abcde abcdef"] From 4731070f690c9c2c98b3d38f9575646be97d5a06 Mon Sep 17 00:00:00 2001 From: Shantanu Date: Thu, 16 Jan 2020 06:11:12 -0800 Subject: [PATCH 047/117] stubtest: various fixes (#8288) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This is enough to get stubtest working again, e.g., with `python3 stubtest.py zipfile`. I'm not familiar enough with the code to know precisely why we need to disable incremental, but from tracing the code we seemed to run into issues with mypy.Build.State.meta – let me know if there's a better fix. --- scripts/stubtest.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/scripts/stubtest.py b/scripts/stubtest.py index 5bbadb2f4cbe..048075f1445e 100644 --- a/scripts/stubtest.py +++ b/scripts/stubtest.py @@ -181,6 +181,13 @@ def verify_decorator(node: nodes.Decorator, yield None +@verify.register(nodes.TypeAlias) +def verify_typealias(node: nodes.TypeAlias, + module_node: Optional[DumpNode]) -> Iterator[ErrorParts]: + if False: + yield None + + def dump_module(name: str) -> DumpNode: mod = importlib.import_module(name) return {'type': 'file', 'names': module_to_json(mod)} @@ -191,8 +198,7 @@ def build_stubs(options: Options, mod: str) -> Dict[str, nodes.MypyFile]: sources = find_module_cache.find_modules_recursive(mod) try: - res = build.build(sources=sources, - options=options) + res = build.build(sources=sources, options=options) messages = res.errors except CompileError as error: messages = error.messages @@ -212,7 +218,7 @@ def main(args: List[str]) -> Iterator[Error]: modules = args[1:] options = Options() - options.python_version = (3, 6) + options.incremental = False data_dir = default_data_dir() search_path = compute_search_paths([], options, data_dir) find_module_cache = FindModuleCache(search_path) From e355c28c6c37801ab911b15d8c6da95df805202b Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Thu, 16 Jan 2020 14:26:18 +0000 Subject: [PATCH 048/117] Include --strict-equality in --strict (#8290) Fixes #7910. The stub changes are needed for clean output when using --strict. --- mypy/main.py | 2 +- test-data/unit/check-flags.test | 8 ++++++++ test-data/unit/fixtures/ops.pyi | 13 +++++++------ 3 files changed, 16 insertions(+), 7 deletions(-) diff --git a/mypy/main.py b/mypy/main.py index 61f069a79950..4b8d9c5f7b0d 100644 --- a/mypy/main.py +++ b/mypy/main.py @@ -602,7 +602,7 @@ def add_invertible_flag(flag: str, help="Treat imports as private unless aliased", group=strictness_group) - add_invertible_flag('--strict-equality', default=False, strict_flag=False, + add_invertible_flag('--strict-equality', default=False, strict_flag=True, help="Prohibit equality, identity, and container checks for" " non-overlapping types", group=strictness_group) diff --git a/test-data/unit/check-flags.test b/test-data/unit/check-flags.test index 38fb0213bcfe..cf6d810d7357 100644 --- a/test-data/unit/check-flags.test +++ b/test-data/unit/check-flags.test @@ -1149,6 +1149,14 @@ def f(c: A) -> None: # E: Missing type parameters for generic type "A" pass [out] +[case testStrictAndStrictEquality] +# flags: --strict +x = 0 +y = '' +if x == y: # E: Non-overlapping equality check (left operand type: "int", right operand type: "str") + int() +[builtins fixtures/ops.pyi] + [case testStrictEqualityPerFile] # flags: --config-file tmp/mypy.ini import b diff --git a/test-data/unit/fixtures/ops.pyi b/test-data/unit/fixtures/ops.pyi index 0c3497b1667f..d5845aba43c6 100644 --- a/test-data/unit/fixtures/ops.pyi +++ b/test-data/unit/fixtures/ops.pyi @@ -13,14 +13,14 @@ class type: pass class slice: pass -class tuple(Sequence[Tco], Generic[Tco]): +class tuple(Sequence[Tco]): def __getitem__(self, x: int) -> Tco: pass def __eq__(self, x: object) -> bool: pass def __ne__(self, x: object) -> bool: pass - def __lt__(self, x: 'tuple') -> bool: pass - def __le__(self, x: 'tuple') -> bool: pass - def __gt__(self, x: 'tuple') -> bool: pass - def __ge__(self, x: 'tuple') -> bool: pass + def __lt__(self, x: Tuple[Tco, ...]) -> bool: pass + def __le__(self, x: Tuple[Tco, ...]) -> bool: pass + def __gt__(self, x: Tuple[Tco, ...]) -> bool: pass + def __ge__(self, x: Tuple[Tco, ...]) -> bool: pass class function: pass @@ -70,6 +70,7 @@ class complex: class BaseException: pass -def __print(a1=None, a2=None, a3=None, a4=None): pass +def __print(a1: object = None, a2: object = None, a3: object = None, + a4: object = None) -> None: pass class ellipsis: pass From 6e567a655a0d13f5d2cc3bc9531d06a0e5bd0c5b Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Thu, 16 Jan 2020 14:26:33 +0000 Subject: [PATCH 049/117] Strict equality: detect always false container check against tuple type (#8291) Tuple types weren't detected as containers. Fixes #8286. --- mypy/checker.py | 2 ++ test-data/unit/check-expressions.test | 7 +++++++ 2 files changed, 9 insertions(+) diff --git a/mypy/checker.py b/mypy/checker.py index fdd1c2422b06..534c4bed24b9 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -3461,6 +3461,8 @@ def analyze_container_item_type(self, typ: Type) -> Optional[Type]: super_instance = map_instance_to_supertype(typ, supertype) assert len(super_instance.args) == 1 return super_instance.args[0] + if isinstance(typ, TupleType): + return self.analyze_container_item_type(tuple_fallback(typ)) return None def analyze_index_variables(self, index: Expression, item_type: Type, diff --git a/test-data/unit/check-expressions.test b/test-data/unit/check-expressions.test index 7eca7432aad0..67ad5aecc221 100644 --- a/test-data/unit/check-expressions.test +++ b/test-data/unit/check-expressions.test @@ -2754,6 +2754,13 @@ assert u'foo' == 'foo' assert u'foo' == u'bar' # E: Non-overlapping equality check (left operand type: "Literal[u'foo']", right operand type: "Literal[u'bar']") [builtins_py2 fixtures/python2.pyi] +[case testStrictEqualityWithFixedLengthTupleInCheck] +# flags: --strict-equality +if 1 in ('x', 'y'): # E: Non-overlapping container check (element type: "int", container item type: "str") + pass +[builtins fixtures/tuple.pyi] +[typing fixtures/typing-full.pyi] + [case testUnimportedHintAny] def f(x: Any) -> None: # E: Name 'Any' is not defined \ # N: Did you forget to import it from "typing"? (Suggestion: "from typing import Any") From 0b9089e9c9be2ad0b345b1d3743f808365d4aefc Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Thu, 16 Jan 2020 17:31:13 +0000 Subject: [PATCH 050/117] Sync typeshed (#8297) --- mypy/typeshed | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mypy/typeshed b/mypy/typeshed index a06abc5dff29..375e8c112728 160000 --- a/mypy/typeshed +++ b/mypy/typeshed @@ -1 +1 @@ -Subproject commit a06abc5dff2928b7111452fe9f44a922585ebde0 +Subproject commit 375e8c11272889cbb653a9cf680ff9b889309105 From 74953402c639f5ca343e06492343ff0b111c5291 Mon Sep 17 00:00:00 2001 From: Cohen Karnell Date: Fri, 17 Jan 2020 06:19:47 -0500 Subject: [PATCH 051/117] Allow redefining TypedDict keys while still throwing an error (#8109) Creating a TypedDict type that is similar to an existing one but has different types for one or two keys was impossible and required redefining the TypedDict completely, which can be very verbose in the case of complex types. Throwing a type error in the same way as before, but allowing the overwrite to go through, seems much more reasonable for quality of life purposes. --- mypy/semanal_typeddict.py | 10 +++++----- test-data/unit/check-typeddict.test | 12 ++++++------ 2 files changed, 11 insertions(+), 11 deletions(-) diff --git a/mypy/semanal_typeddict.py b/mypy/semanal_typeddict.py index 855d0e1b6a1d..b8f0308d552b 100644 --- a/mypy/semanal_typeddict.py +++ b/mypy/semanal_typeddict.py @@ -72,7 +72,9 @@ def analyze_typeddict_classdef(self, defn: ClassDef) -> Tuple[bool, Optional[Typ keys = [] # type: List[str] types = [] required_keys = set() - for base in typeddict_bases: + + # Iterate over bases in reverse order so that leftmost base class' keys take precedence + for base in reversed(typeddict_bases): assert isinstance(base, RefExpr) assert isinstance(base.node, TypeInfo) assert isinstance(base.node.typeddict_type, TypedDictType) @@ -81,9 +83,8 @@ def analyze_typeddict_classdef(self, defn: ClassDef) -> Tuple[bool, Optional[Typ valid_items = base_items.copy() for key in base_items: if key in keys: - self.fail('Cannot overwrite TypedDict field "{}" while merging' + self.fail('Overwriting TypedDict field "{}" while merging' .format(key), defn) - valid_items.pop(key) keys.extend(valid_items.keys()) types.extend(valid_items.values()) required_keys.update(base_typed_dict.required_keys) @@ -132,9 +133,8 @@ def analyze_typeddict_classdef_fields( else: name = stmt.lvalues[0].name if name in (oldfields or []): - self.fail('Cannot overwrite TypedDict field "{}" while extending' + self.fail('Overwriting TypedDict field "{}" while extending' .format(name), stmt) - continue if name in fields: self.fail('Duplicate TypedDict field "{}"'.format(name), stmt) continue diff --git a/test-data/unit/check-typeddict.test b/test-data/unit/check-typeddict.test index 3e6d6537b604..aef1fe2de87b 100644 --- a/test-data/unit/check-typeddict.test +++ b/test-data/unit/check-typeddict.test @@ -202,7 +202,7 @@ p: Point reveal_type(p) # N: Revealed type is 'TypedDict('__main__.Point', {'x': builtins.int, '_y': builtins.int})' [builtins fixtures/dict.pyi] -[case testCannotCreateTypedDictWithClassOverwriting] +[case testCannotCreateTypedDictWithDuplicateField] # flags: --python-version 3.6 from mypy_extensions import TypedDict @@ -214,7 +214,7 @@ b: Bad reveal_type(b) # N: Revealed type is 'TypedDict('__main__.Bad', {'x': builtins.int})' [builtins fixtures/dict.pyi] -[case testCannotCreateTypedDictWithClassOverwriting2] +[case testCanCreateTypedDictWithClassOverwriting] # flags: --python-version 3.6 from mypy_extensions import TypedDict @@ -222,24 +222,24 @@ class Point1(TypedDict): x: int class Point2(TypedDict): x: float -class Bad(Point1, Point2): # E: Cannot overwrite TypedDict field "x" while merging +class Bad(Point1, Point2): # E: Overwriting TypedDict field "x" while merging pass b: Bad reveal_type(b) # N: Revealed type is 'TypedDict('__main__.Bad', {'x': builtins.int})' [builtins fixtures/dict.pyi] -[case testCannotCreateTypedDictWithClassOverwriting2] +[case testCanCreateTypedDictWithClassOverwriting2] # flags: --python-version 3.6 from mypy_extensions import TypedDict class Point1(TypedDict): x: int class Point2(Point1): - x: float # E: Cannot overwrite TypedDict field "x" while extending + x: float # E: Overwriting TypedDict field "x" while extending p2: Point2 -reveal_type(p2) # N: Revealed type is 'TypedDict('__main__.Point2', {'x': builtins.int})' +reveal_type(p2) # N: Revealed type is 'TypedDict('__main__.Point2', {'x': builtins.float})' [builtins fixtures/dict.pyi] From c784e3b9b7fa4176c229fb3d65d886c0fbd9767a Mon Sep 17 00:00:00 2001 From: Xuanda Yang Date: Fri, 17 Jan 2020 20:22:40 +0800 Subject: [PATCH 052/117] Add docs for --local-partial-types option (#8201) Resolves #8046. --- docs/source/command_line.rst | 27 +++++++++++++++++++++++++++ 1 file changed, 27 insertions(+) diff --git a/docs/source/command_line.rst b/docs/source/command_line.rst index 0cfcbe98cb86..d0fe5430c4e0 100644 --- a/docs/source/command_line.rst +++ b/docs/source/command_line.rst @@ -482,6 +482,33 @@ of the above sections. # 'items' now has type List[List[str]] ... +.. option:: --local-partial-types + + In mypy, the most common cases for partial types are variables initialized using ``None``, + but without explicit ``Optional`` annotations. By default, mypy won't check partial types + spanning module top level or class top level. This flag changes the behavior to only allow + partial types at local level, therefore it disallows inferring variable type for ``None`` + from two assignments in different scopes. For example: + + .. code-block:: python + + from typing import Optional + + a = None # Need type annotation here if using --local-partial-types + b = None # type: Optional[int] + + class Foo: + bar = None # Need type annotation here if using --local-partial-types + baz = None # type: Optional[int] + + def __init__(self) -> None + self.bar = 1 + + reveal_type(Foo().bar) # Union[int, None] without --local-partial-types + + Note: this option is always implicitly enabled in mypy daemon and will become + enabled by default for mypy in a future release. + .. option:: --no-implicit-reexport By default, imported values to a module are treated as exported and mypy allows From 861f01c3be99b9faf2e8d42e316aee4ec6e571c4 Mon Sep 17 00:00:00 2001 From: Xuanda Yang Date: Fri, 17 Jan 2020 21:23:59 +0800 Subject: [PATCH 053/117] Don't make underscored attributes as enum members (#8302) Fixes #5312. From @JelleZijlstra 's description, for underscored attributes, we don't make them as enum members. --- mypy/checkmember.py | 38 ++++++++++++++++++++++------------ test-data/unit/check-enum.test | 26 +++++++++++++++++++++++ 2 files changed, 51 insertions(+), 13 deletions(-) diff --git a/mypy/checkmember.py b/mypy/checkmember.py index 65c84793eee3..a80db832bece 100644 --- a/mypy/checkmember.py +++ b/mypy/checkmember.py @@ -705,19 +705,9 @@ def analyze_class_attribute_access(itype: Instance, check_final_member(name, info, mx.msg, mx.context) if info.is_enum and not (mx.is_lvalue or is_decorated or is_method): - # Skip "_order_" and "__order__", since Enum will remove it - if name in ("_order_", "__order__"): - return mx.msg.has_no_attr( - mx.original_type, itype, name, mx.context, mx.module_symbol_table - ) - - enum_literal = LiteralType(name, fallback=itype) - # When we analyze enums, the corresponding Instance is always considered to be erased - # due to how the signature of Enum.__new__ is `(cls: Type[_T], value: object) -> _T` - # in typeshed. However, this is really more of an implementation detail of how Enums - # are typed, and we really don't want to treat every single Enum value as if it were - # from type variable substitution. So we reset the 'erased' field here. - return itype.copy_modified(erased=False, last_known_value=enum_literal) + enum_class_attribute_type = analyze_enum_class_attribute_access(itype, name, mx) + if enum_class_attribute_type: + return enum_class_attribute_type t = node.type if t: @@ -815,6 +805,28 @@ def analyze_class_attribute_access(itype: Instance, return typ +def analyze_enum_class_attribute_access(itype: Instance, + name: str, + mx: MemberContext, + ) -> Optional[Type]: + # Skip "_order_" and "__order__", since Enum will remove it + if name in ("_order_", "__order__"): + return mx.msg.has_no_attr( + mx.original_type, itype, name, mx.context, mx.module_symbol_table + ) + # For other names surrendered by underscores, we don't make them Enum members + if name.startswith('__') and name.endswith("__") and name.replace('_', '') != '': + return None + + enum_literal = LiteralType(name, fallback=itype) + # When we analyze enums, the corresponding Instance is always considered to be erased + # due to how the signature of Enum.__new__ is `(cls: Type[_T], value: object) -> _T` + # in typeshed. However, this is really more of an implementation detail of how Enums + # are typed, and we really don't want to treat every single Enum value as if it were + # from type variable substitution. So we reset the 'erased' field here. + return itype.copy_modified(erased=False, last_known_value=enum_literal) + + def add_class_tvars(t: ProperType, isuper: Optional[Instance], is_classmethod: bool, original_type: Type, diff --git a/test-data/unit/check-enum.test b/test-data/unit/check-enum.test index 8984b68cbc24..cf9bb55a946c 100644 --- a/test-data/unit/check-enum.test +++ b/test-data/unit/check-enum.test @@ -1136,3 +1136,29 @@ else: reveal_type(x4) # N: Revealed type is '__main__.Foo' reveal_type(x5) # N: Revealed type is '__main__.Foo' [builtins fixtures/primitives.pyi] + +[case testPrivateAttributeNotAsEnumMembers] +import enum + +class Comparator(enum.Enum): + LessThan = "<" + LessThanOrEqualTo = "<=" + EqualTo = "==" + NotEqualTo = "!=" + GreaterThanOrEqualTo = ">=" + GreaterThan = ">" + + __foo__ = { + LessThan: 1, + LessThanOrEqualTo: 2, + EqualTo: 3, + NotEqualTo: 4, + GreaterThanOrEqualTo: 5, + GreaterThan: 6, + } + + def foo(self) -> int: + return Comparator.__foo__[self.value] + +reveal_type(Comparator.__foo__) # N: Revealed type is 'builtins.dict[builtins.str, builtins.int]' +[builtins fixtures/dict.pyi] From 5d8c0cd28f8c038e853731d2cd12578412945beb Mon Sep 17 00:00:00 2001 From: Ethan Smith Date: Mon, 20 Jan 2020 18:30:00 -0800 Subject: [PATCH 054/117] Pin pytest (#8308) According to https://github.com/pytest-dev/pytest/issues/6492 pytest 5.3.2 broke some things, so we should pin for now. --- test-requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test-requirements.txt b/test-requirements.txt index f741bc52e133..649438743053 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -5,7 +5,7 @@ flake8-bugbear; python_version >= '3.5' flake8-pyi; python_version >= '3.6' lxml>=4.4.0 psutil>=4.0 -pytest>=5.1.1 +pytest==5.3.2 pytest-xdist>=1.22 # pytest-xdist depends on pytest-forked and 1.1.0 doesn't install clean on macOS 3.5 pytest-forked>=1.0.0,<1.1.0 From 0e6e9c6c9686820c40bd48bd211bf0051c40eb72 Mon Sep 17 00:00:00 2001 From: Xuanda Yang Date: Tue, 21 Jan 2020 23:49:31 +0800 Subject: [PATCH 055/117] replace @abstractproperty usage in mypy (#8310) --- mypy/plugin.py | 8 +++++--- mypy/semanal_shared.py | 8 +++++--- 2 files changed, 10 insertions(+), 6 deletions(-) diff --git a/mypy/plugin.py b/mypy/plugin.py index 74dc99b7bd98..ed2d80cfaf29 100644 --- a/mypy/plugin.py +++ b/mypy/plugin.py @@ -119,7 +119,7 @@ class C: pass semantic analyzer is enabled (it's always true in mypy 0.730 and later). """ -from abc import abstractmethod, abstractproperty +from abc import abstractmethod from typing import Any, Callable, List, Tuple, Optional, NamedTuple, TypeVar, Dict from mypy_extensions import trait, mypyc_attr @@ -214,7 +214,8 @@ class CheckerPluginInterface: path = None # type: str # Type context for type inference - @abstractproperty + @property + @abstractmethod def type_context(self) -> List[Optional[Type]]: """Return the type context of the plugin""" raise NotImplementedError @@ -348,7 +349,8 @@ def defer(self) -> None: """ raise NotImplementedError - @abstractproperty + @property + @abstractmethod def final_iteration(self) -> bool: """Is this the final iteration of semantic analysis?""" raise NotImplementedError diff --git a/mypy/semanal_shared.py b/mypy/semanal_shared.py index c040fee4e7d7..44262371253c 100644 --- a/mypy/semanal_shared.py +++ b/mypy/semanal_shared.py @@ -1,6 +1,6 @@ """Shared definitions used by different parts of semantic analysis.""" -from abc import abstractmethod, abstractproperty +from abc import abstractmethod from typing import Optional, List, Callable from typing_extensions import Final @@ -67,7 +67,8 @@ def is_incomplete_namespace(self, fullname: str) -> bool: """Is a module or class namespace potentially missing some definitions?""" raise NotImplementedError - @abstractproperty + @property + @abstractmethod def final_iteration(self) -> bool: """Is this the final iteration of semantic analysis?""" raise NotImplementedError @@ -156,7 +157,8 @@ def parse_bool(self, expr: Expression) -> Optional[bool]: def qualified_name(self, n: str) -> str: raise NotImplementedError - @abstractproperty + @property + @abstractmethod def is_typeshed_stub_file(self) -> bool: raise NotImplementedError From 4f3c9cdb8a0b943dda7124710163deef5674861a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ville=20Skytt=C3=A4?= Date: Tue, 21 Jan 2020 23:12:07 +0200 Subject: [PATCH 056/117] Automatically write .gitignore to cache dir, ignoring everything (#8193) --- mypy/build.py | 23 +++++++++++++++++++++-- 1 file changed, 21 insertions(+), 2 deletions(-) diff --git a/mypy/build.py b/mypy/build.py index 0c8e05f11556..8d6636048e51 100644 --- a/mypy/build.py +++ b/mypy/build.py @@ -1080,12 +1080,31 @@ def _cache_dir_prefix(options: Options) -> str: return base +def add_catch_all_gitignore(target_dir: str) -> None: + """Add catch-all .gitignore to an existing directory. + + No-op if the .gitignore already exists. + """ + gitignore = os.path.join(target_dir, ".gitignore") + try: + with open(gitignore, "x") as f: + print("# Automatically created by mypy", file=f) + print("*", file=f) + except FileExistsError: + pass + + def create_metastore(options: Options) -> MetadataStore: """Create the appropriate metadata store.""" + # Add catch-all .gitignore to cache dir if we created it + cache_dir_existed = os.path.isdir(options.cache_dir) if options.sqlite_cache: - return SqliteMetadataStore(_cache_dir_prefix(options)) + mds = SqliteMetadataStore(_cache_dir_prefix(options)) # type: MetadataStore else: - return FilesystemMetadataStore(_cache_dir_prefix(options)) + mds = FilesystemMetadataStore(_cache_dir_prefix(options)) + if not cache_dir_existed and os.path.isdir(options.cache_dir): + add_catch_all_gitignore(options.cache_dir) + return mds def get_cache_names(id: str, path: str, options: Options) -> Tuple[str, str, Optional[str]]: From 2d3a1bf2da448a32bfc6be32cbfcfee31570382d Mon Sep 17 00:00:00 2001 From: "Uwe L. Korn" Date: Tue, 21 Jan 2020 23:33:21 +0100 Subject: [PATCH 057/117] Fix module alias as instance attribute (#8259) Fixes #4291 --- mypy/semanal.py | 15 ++++++++++++--- test-data/unit/check-basic.test | 26 ++++++++++++++++++++++++++ 2 files changed, 38 insertions(+), 3 deletions(-) diff --git a/mypy/semanal.py b/mypy/semanal.py index 42770e1a0389..b2769373f324 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -3120,21 +3120,30 @@ def process_module_assignment(self, lvals: List[Lvalue], rval: Expression, rnode = self.lookup_type_node(rval) if rnode and isinstance(rnode.node, MypyFile): for lval in lvals: - if not isinstance(lval, NameExpr): + if not isinstance(lval, RefExpr): continue # respect explicitly annotated type if (isinstance(lval.node, Var) and lval.node.type is not None): continue - lnode = self.current_symbol_table().get(lval.name) + + # We can handle these assignments to locals and to self + if isinstance(lval, NameExpr): + lnode = self.current_symbol_table().get(lval.name) + elif isinstance(lval, MemberExpr) and self.is_self_member_ref(lval): + assert self.type is not None + lnode = self.type.names.get(lval.name) + else: + continue + if lnode: if isinstance(lnode.node, MypyFile) and lnode.node is not rnode.node: + assert isinstance(lval, (NameExpr, MemberExpr)) self.fail( "Cannot assign multiple modules to name '{}' " "without explicit 'types.ModuleType' annotation".format(lval.name), ctx) # never create module alias except on initial var definition elif lval.is_inferred_def: - lnode.kind = self.current_symbol_kind() assert rnode.node is not None lnode.node = rnode.node diff --git a/test-data/unit/check-basic.test b/test-data/unit/check-basic.test index 313f0446871c..4939c2d5be93 100644 --- a/test-data/unit/check-basic.test +++ b/test-data/unit/check-basic.test @@ -474,3 +474,29 @@ from typing import Any def f() -> object: x: Any = 1 return x + +[case testImportModuleAsClassMember] +import test + +class A: + def __init__(self) -> None: + self.test = test + + def __call__(self) -> None: + self.test.foo("Message") + +[file test.py] +def foo(s: str) -> None: ... + +[case testLocalImportModuleAsClassMember] +class A: + def __init__(self) -> None: + import test + + self.test = test + + def __call__(self) -> None: + self.test.foo("Message") + +[file test.py] +def foo(s: str) -> None: ... From 0e8e135548a4d2fde42f93e2a2a56802af019263 Mon Sep 17 00:00:00 2001 From: dosisod <39638017+dosisod@users.noreply.github.com> Date: Wed, 22 Jan 2020 03:01:24 -0800 Subject: [PATCH 058/117] Fixed indicator position in code with tabs (#8307) --- mypy/errors.py | 7 ++++++- test-data/unit/cmdline.test | 10 ++++++++++ test-data/unit/fine-grained-blockers.test | 2 +- 3 files changed, 17 insertions(+), 2 deletions(-) diff --git a/mypy/errors.py b/mypy/errors.py index 5c37365160c1..89d9baec93f2 100644 --- a/mypy/errors.py +++ b/mypy/errors.py @@ -451,12 +451,17 @@ def format_messages(self, error_info: List[ErrorInfo], # Add source code fragment and a location marker. if severity == 'error' and source_lines and line > 0: source_line = source_lines[line - 1] + source_line_expanded = source_line.expandtabs() if column < 0: # Something went wrong, take first non-empty column. column = len(source_line) - len(source_line.lstrip()) + + # Shifts column after tab expansion + column = len(source_line[:column].expandtabs()) + # Note, currently coloring uses the offset to detect source snippets, # so these offsets should not be arbitrary. - a.append(' ' * DEFAULT_SOURCE_OFFSET + source_line) + a.append(' ' * DEFAULT_SOURCE_OFFSET + source_line_expanded) a.append(' ' * (DEFAULT_SOURCE_OFFSET + column) + '^') return a diff --git a/test-data/unit/cmdline.test b/test-data/unit/cmdline.test index 0e7adc41a29d..fb70f80e837f 100644 --- a/test-data/unit/cmdline.test +++ b/test-data/unit/cmdline.test @@ -1036,6 +1036,16 @@ some_file.py:1: error: invalid syntax [syntax] ^ == Return code: 2 +[case testTabRenderingUponError] +# cmd: mypy --pretty tabs.py +[file tabs.py] +def test_tabs() -> str: + return None +[out] +tabs.py:2: error: Incompatible return value type (got "None", expected "str") + return None + ^ + [case testSpecialTypeshedGenericNote] # cmd: mypy --disallow-any-generics --python-version=3.6 test.py [file test.py] diff --git a/test-data/unit/fine-grained-blockers.test b/test-data/unit/fine-grained-blockers.test index 2b02dd689705..3afe4dd5c0b3 100644 --- a/test-data/unit/fine-grained-blockers.test +++ b/test-data/unit/fine-grained-blockers.test @@ -40,7 +40,7 @@ def f() -> None: pass == a.py:1: error: invalid syntax [syntax] def f(x: int) -> - ^ + ^ == main:3: error: Too few arguments for "f" [call-arg] a.f() From f3c57e507efc5792fce7a5e7ee12fec5f91c9f38 Mon Sep 17 00:00:00 2001 From: Denys Halenok Date: Thu, 23 Jan 2020 18:56:34 +0200 Subject: [PATCH 059/117] Add new error code for unreachable errors (#8312) Closes #8190. This introduces a new error code for errors shown when using the `--warn-unreachable` flag, such as the "Statement is unreachable" error. --- docs/source/error_code_list2.rst | 19 +++++++++++++++++++ mypy/errorcodes.py | 2 ++ mypy/messages.py | 8 +++++--- 3 files changed, 26 insertions(+), 3 deletions(-) diff --git a/docs/source/error_code_list2.rst b/docs/source/error_code_list2.rst index c84294f95ecf..c91c1ba20a2c 100644 --- a/docs/source/error_code_list2.rst +++ b/docs/source/error_code_list2.rst @@ -174,3 +174,22 @@ that ``Cat`` falls back to ``Any`` in a type annotation: # Error: Argument 1 to "feed" becomes "Any" due to an unfollowed import [no-any-unimported] def feed(cat: Cat) -> None: ... + +Check that statement or expression is unreachable [unreachable] +--------------------------------------------------------------- + +If you use :option:`--warn-unreachable `, mypy generates an error if it +thinks that a statement or expression will never be executed. In most cases, this is due to +incorrect control flow or conditional checks that are accidentally always true or false. + +.. code-block:: python + + # mypy: warn-unreachable + + def example(x: int) -> None: + # Error: Right operand of 'or' is never evaluated [unreachable] + assert isinstance(x, int) or x == 'unused' + + return + # Error: Statement is unreachable [unreachable] + print('unreachable') diff --git a/mypy/errorcodes.py b/mypy/errorcodes.py index 7749db5e8008..47206c53e9de 100644 --- a/mypy/errorcodes.py +++ b/mypy/errorcodes.py @@ -103,6 +103,8 @@ def __str__(self) -> str: NO_ANY_RETURN = ErrorCode( 'no-any-return', 'Reject returning value with "Any" type if return type is not "Any"', 'General') # type: Final +UNREACHABLE = ErrorCode( + 'unreachable', "Warn about unreachable statements or expressions", 'General') # type: Final # Syntax errors are often blocking. SYNTAX = ErrorCode( diff --git a/mypy/messages.py b/mypy/messages.py index 011527b00fd7..b24c57313d47 100644 --- a/mypy/messages.py +++ b/mypy/messages.py @@ -1235,7 +1235,7 @@ def note_call(self, context, code=code) def unreachable_statement(self, context: Context) -> None: - self.fail("Statement is unreachable", context) + self.fail("Statement is unreachable", context, code=codes.UNREACHABLE) def redundant_left_operand(self, op_name: str, context: Context) -> None: """Indicates that the left operand of a boolean expression is redundant: @@ -1249,7 +1249,8 @@ def redundant_right_operand(self, op_name: str, context: Context) -> None: it does not change the truth value of the entire condition as a whole. 'op_name' should either be the string "and" or the string "or". """ - self.fail("Right operand of '{}' is never evaluated".format(op_name), context) + self.fail("Right operand of '{}' is never evaluated".format(op_name), + context, code=codes.UNREACHABLE) def redundant_condition_in_comprehension(self, truthiness: bool, context: Context) -> None: self.redundant_expr("If condition in comprehension", truthiness, context) @@ -1261,7 +1262,8 @@ def redundant_condition_in_assert(self, truthiness: bool, context: Context) -> N self.redundant_expr("Condition in assert", truthiness, context) def redundant_expr(self, description: str, truthiness: bool, context: Context) -> None: - self.fail("{} is always {}".format(description, str(truthiness).lower()), context) + self.fail("{} is always {}".format(description, str(truthiness).lower()), + context, code=codes.UNREACHABLE) def report_protocol_problems(self, subtype: Union[Instance, TupleType, TypedDictType], From ad6c717c408c6ab6d21a488ab1f89930448ae83c Mon Sep 17 00:00:00 2001 From: Michael Lee Date: Thu, 23 Jan 2020 11:04:17 -0800 Subject: [PATCH 060/117] Make isinstance/issubclass generate ad-hoc intersections (#8305) This diff makes `isinstance(...)` and `issubclass(...)` try generating ad-hoc intersections of Instances when possible. For example, we previously concluded the if-branch is unreachable in the following program. This PR makes mypy infer an ad-hoc intersection instead. class A: pass class B: pass x: A if isinstance(x, B): reveal_type(x) # N: Revealed type is 'test.' If you try doing an `isinstance(...)` that legitimately is impossible due to conflicting method signatures or MRO issues, we continue to declare the branch unreachable. Passing in the `--warn-unreachable` flag will now also report an error about this: # flags: --warn-unreachable x: str # E: Subclass of "str" and "bytes" cannot exist: would have # incompatible method signatures if isinstance(x, bytes): reveal_type(x) # E: Statement is unreachable This error message has the same limitations as the other `--warn-unreachable` ones: we suppress them if the isinstance check is inside a function using TypeVars with multiple values. However, we *do* end up always inferring an intersection type when possible -- that logic is never suppressed. I initially thought we might have to suppress the new logic as well (see https://github.com/python/mypy/issues/3603#issuecomment-506996850), but it turns out this is a non-issue in practice once you add in the check that disallows impossible intersections. For example, when I tried running this PR on the larger of our two internal codebases, I found about 25 distinct errors, all of which were legitimate and unrelated to the problem discussed in the PR. (And if we don't suppress the extra error message, we get about 100-120 errors, mostly due to tests repeatdly doing `result = blah()` followed by `assert isinstance(result, X)` where X keeps changing.) --- mypy/checker.py | 180 ++++++++++++-- mypy/messages.py | 24 +- mypy/nodes.py | 4 + mypy/semanal.py | 4 +- test-data/unit/check-incremental.test | 152 ++++++++++++ test-data/unit/check-isinstance.test | 299 ++++++++++++++++++++++- test-data/unit/check-protocols.test | 4 +- test-data/unit/check-typevar-values.test | 25 +- test-data/unit/deps.test | 16 ++ test-data/unit/fine-grained.test | 152 ++++++++++++ test-data/unit/pythoneval.test | 14 ++ 11 files changed, 834 insertions(+), 40 deletions(-) diff --git a/mypy/checker.py b/mypy/checker.py index 534c4bed24b9..d80e0ec02b69 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -38,7 +38,7 @@ get_proper_types, is_literal_type, TypeAliasType) from mypy.sametypes import is_same_type from mypy.messages import ( - MessageBuilder, make_inferred_type_note, append_invariance_notes, + MessageBuilder, make_inferred_type_note, append_invariance_notes, pretty_seq, format_type, format_type_bare, format_type_distinctly, SUGGESTED_TEST_FIXTURES ) import mypy.checkexpr @@ -63,7 +63,7 @@ from mypy.maptype import map_instance_to_supertype from mypy.typevars import fill_typevars, has_no_typevars, fill_typevars_with_any from mypy.semanal import set_callable_name, refers_to_fullname -from mypy.mro import calculate_mro +from mypy.mro import calculate_mro, MroError from mypy.erasetype import erase_typevars, remove_instance_last_known_values, erase_type from mypy.expandtype import expand_type, expand_type_by_instance from mypy.visitor import NodeVisitor @@ -1963,13 +1963,15 @@ def visit_block(self, b: Block) -> None: return for s in b.body: if self.binder.is_unreachable(): - if (self.options.warn_unreachable - and not self.binder.is_unreachable_warning_suppressed() - and not self.is_raising_or_empty(s)): + if self.should_report_unreachable_issues() and not self.is_raising_or_empty(s): self.msg.unreachable_statement(s) break self.accept(s) + def should_report_unreachable_issues(self) -> bool: + return (self.options.warn_unreachable + and not self.binder.is_unreachable_warning_suppressed()) + def is_raising_or_empty(self, s: Statement) -> bool: """Returns 'true' if the given statement either throws an error of some kind or is a no-op. @@ -3636,6 +3638,100 @@ def visit_continue_stmt(self, s: ContinueStmt) -> None: self.binder.handle_continue() return None + def make_fake_typeinfo(self, + curr_module_fullname: str, + class_gen_name: str, + class_short_name: str, + bases: List[Instance], + ) -> Tuple[ClassDef, TypeInfo]: + # Build the fake ClassDef and TypeInfo together. + # The ClassDef is full of lies and doesn't actually contain a body. + # Use format_bare to generate a nice name for error messages. + # We skip fully filling out a handful of TypeInfo fields because they + # should be irrelevant for a generated type like this: + # is_protocol, protocol_members, is_abstract + cdef = ClassDef(class_short_name, Block([])) + cdef.fullname = curr_module_fullname + '.' + class_gen_name + info = TypeInfo(SymbolTable(), cdef, curr_module_fullname) + cdef.info = info + info.bases = bases + calculate_mro(info) + info.calculate_metaclass_type() + return cdef, info + + def intersect_instances(self, + instances: Sequence[Instance], + ctx: Context, + ) -> Optional[Instance]: + """Try creating an ad-hoc intersection of the given instances. + + Note that this function does *not* try and create a full-fledged + intersection type. Instead, it returns an instance of a new ad-hoc + subclass of the given instances. + + This is mainly useful when you need a way of representing some + theoretical subclass of the instances the user may be trying to use + the generated intersection can serve as a placeholder. + + This function will create a fresh subclass every time you call it, + even if you pass in the exact same arguments. So this means calling + `self.intersect_intersection([inst_1, inst_2], ctx)` twice will result + in instances of two distinct subclasses of inst_1 and inst_2. + + This is by design: we want each ad-hoc intersection to be unique since + they're supposed represent some other unknown subclass. + + Returns None if creating the subclass is impossible (e.g. due to + MRO errors or incompatible signatures). If we do successfully create + a subclass, its TypeInfo will automatically be added to the global scope. + """ + curr_module = self.scope.stack[0] + assert isinstance(curr_module, MypyFile) + + base_classes = [] + formatted_names = [] + for inst in instances: + expanded = [inst] + if inst.type.is_intersection: + expanded = inst.type.bases + + for expanded_inst in expanded: + base_classes.append(expanded_inst) + formatted_names.append(format_type_bare(expanded_inst)) + + pretty_names_list = pretty_seq(format_type_distinctly(*base_classes, bare=True), "and") + short_name = ''.format(pretty_names_list) + full_name = gen_unique_name(short_name, curr_module.names) + + old_msg = self.msg + new_msg = self.msg.clean_copy() + self.msg = new_msg + try: + cdef, info = self.make_fake_typeinfo( + curr_module.fullname, + full_name, + short_name, + base_classes, + ) + self.check_multiple_inheritance(info) + info.is_intersection = True + except MroError: + if self.should_report_unreachable_issues(): + old_msg.impossible_intersection( + pretty_names_list, "inconsistent method resolution order", ctx) + return None + finally: + self.msg = old_msg + + if new_msg.is_errors(): + if self.should_report_unreachable_issues(): + self.msg.impossible_intersection( + pretty_names_list, "incompatible method signatures", ctx) + return None + + curr_module.names[full_name] = SymbolTableNode(GDEF, info) + return Instance(info, []) + def intersect_instance_callable(self, typ: Instance, callable_type: CallableType) -> Instance: """Creates a fake type that represents the intersection of an Instance and a CallableType. @@ -3650,20 +3746,9 @@ def intersect_instance_callable(self, typ: Instance, callable_type: CallableType gen_name = gen_unique_name("".format(typ.type.name), cur_module.names) - # Build the fake ClassDef and TypeInfo together. - # The ClassDef is full of lies and doesn't actually contain a body. - # Use format_bare to generate a nice name for error messages. - # We skip fully filling out a handful of TypeInfo fields because they - # should be irrelevant for a generated type like this: - # is_protocol, protocol_members, is_abstract + # Synthesize a fake TypeInfo short_name = format_type_bare(typ) - cdef = ClassDef(short_name, Block([])) - cdef.fullname = cur_module.fullname + '.' + gen_name - info = TypeInfo(SymbolTable(), cdef, cur_module.fullname) - cdef.info = info - info.bases = [typ] - calculate_mro(info) - info.calculate_metaclass_type() + cdef, info = self.make_fake_typeinfo(cur_module.fullname, gen_name, short_name, [typ]) # Build up a fake FuncDef so we can populate the symbol table. func_def = FuncDef('__call__', [], Block([]), callable_type) @@ -3828,9 +3913,11 @@ def find_isinstance_check_helper(self, node: Expression) -> Tuple[TypeMap, TypeM return {}, {} expr = node.args[0] if literal(expr) == LITERAL_TYPE: - vartype = type_map[expr] - type = get_isinstance_type(node.args[1], type_map) - return conditional_type_map(expr, vartype, type) + return self.conditional_type_map_with_intersection( + expr, + type_map[expr], + get_isinstance_type(node.args[1], type_map), + ) elif refers_to_fullname(node.callee, 'builtins.issubclass'): if len(node.args) != 2: # the error will be reported elsewhere return {}, {} @@ -4309,6 +4396,10 @@ def refine_identity_comparison_expression(self, if enum_name is not None: expr_type = try_expanding_enum_to_union(expr_type, enum_name) + + # We intentionally use 'conditional_type_map' directly here instead of + # 'self.conditional_type_map_with_intersection': we only compute ad-hoc + # intersections when working with pure instances. partial_type_maps.append(conditional_type_map(expr, expr_type, target_type)) return reduce_conditional_maps(partial_type_maps) @@ -4726,10 +4817,55 @@ def infer_issubclass_maps(self, node: CallExpr, # Any other object whose type we don't know precisely # for example, Any or a custom metaclass. return {}, {} # unknown type - yes_map, no_map = conditional_type_map(expr, vartype, type) + yes_map, no_map = self.conditional_type_map_with_intersection(expr, vartype, type) yes_map, no_map = map(convert_to_typetype, (yes_map, no_map)) return yes_map, no_map + def conditional_type_map_with_intersection(self, + expr: Expression, + expr_type: Type, + type_ranges: Optional[List[TypeRange]], + ) -> Tuple[TypeMap, TypeMap]: + # For some reason, doing "yes_map, no_map = conditional_type_maps(...)" + # doesn't work: mypyc will decide that 'yes_map' is of type None if we try. + initial_maps = conditional_type_map(expr, expr_type, type_ranges) + yes_map = initial_maps[0] # type: TypeMap + no_map = initial_maps[1] # type: TypeMap + + if yes_map is not None or type_ranges is None: + return yes_map, no_map + + # If conditions_type_map was unable to successfully narrow the expr_type + # using the type_ranges and concluded if-branch is unreachable, we try + # computing it again using a different algorithm that tries to generate + # an ad-hoc intersection between the expr_type and the type_ranges. + expr_type = get_proper_type(expr_type) + if isinstance(expr_type, UnionType): + possible_expr_types = get_proper_types(expr_type.relevant_items()) + else: + possible_expr_types = [expr_type] + + possible_target_types = [] + for tr in type_ranges: + item = get_proper_type(tr.item) + if not isinstance(item, Instance) or tr.is_upper_bound: + return yes_map, no_map + possible_target_types.append(item) + + out = [] + for v in possible_expr_types: + if not isinstance(v, Instance): + return yes_map, no_map + for t in possible_target_types: + intersection = self.intersect_instances([v, t], expr) + if intersection is None: + continue + out.append(intersection) + if len(out) == 0: + return None, {} + new_yes_type = make_simplified_union(out) + return {expr: new_yes_type}, {} + def conditional_type_map(expr: Expression, current_type: Optional[Type], diff --git a/mypy/messages.py b/mypy/messages.py index b24c57313d47..14e1b146a82b 100644 --- a/mypy/messages.py +++ b/mypy/messages.py @@ -290,7 +290,11 @@ def has_no_attr(self, if matches: self.fail( '{} has no attribute "{}"; maybe {}?{}'.format( - format_type(original_type), member, pretty_or(matches), extra), + format_type(original_type), + member, + pretty_seq(matches, "or"), + extra, + ), context, code=codes.ATTR_DEFINED) failed = True @@ -623,7 +627,7 @@ def unexpected_keyword_argument(self, callee: CallableType, name: str, arg_type: if not matches: matches = best_matches(name, not_matching_type_args) if matches: - msg += "; did you mean {}?".format(pretty_or(matches[:3])) + msg += "; did you mean {}?".format(pretty_seq(matches[:3], "or")) self.fail(msg, context, code=codes.CALL_ARG) module = find_defining_module(self.modules, callee) if module: @@ -1265,6 +1269,15 @@ def redundant_expr(self, description: str, truthiness: bool, context: Context) - self.fail("{} is always {}".format(description, str(truthiness).lower()), context, code=codes.UNREACHABLE) + def impossible_intersection(self, + formatted_base_class_list: str, + reason: str, + context: Context, + ) -> None: + template = "Subclass of {} cannot exist: would have {}" + self.fail(template.format(formatted_base_class_list, reason), context, + code=codes.UNREACHABLE) + def report_protocol_problems(self, subtype: Union[Instance, TupleType, TypedDictType], supertype: Instance, @@ -1997,13 +2010,14 @@ def best_matches(current: str, options: Iterable[str]) -> List[str]: reverse=True, key=lambda v: (ratios[v], v)) -def pretty_or(args: List[str]) -> str: +def pretty_seq(args: Sequence[str], conjunction: str) -> str: quoted = ['"' + a + '"' for a in args] if len(quoted) == 1: return quoted[0] if len(quoted) == 2: - return "{} or {}".format(quoted[0], quoted[1]) - return ", ".join(quoted[:-1]) + ", or " + quoted[-1] + return "{} {} {}".format(quoted[0], conjunction, quoted[1]) + last_sep = ", " + conjunction + " " + return ", ".join(quoted[:-1]) + last_sep + quoted[-1] def append_invariance_notes(notes: List[str], arg_type: Instance, diff --git a/mypy/nodes.py b/mypy/nodes.py index b2c7769580bd..e24a8887dd01 100644 --- a/mypy/nodes.py +++ b/mypy/nodes.py @@ -2379,6 +2379,9 @@ class is generic then it will be a type constructor of higher kind. # Is this a newtype type? is_newtype = False + # Is this a synthesized intersection type? + is_intersection = False + # This is a dictionary that will be serialized and un-serialized as is. # It is useful for plugins to add their data to save in the cache. metadata = None # type: Dict[str, JsonDict] @@ -2386,6 +2389,7 @@ class is generic then it will be a type constructor of higher kind. FLAGS = [ 'is_abstract', 'is_enum', 'fallback_to_any', 'is_named_tuple', 'is_newtype', 'is_protocol', 'runtime_protocol', 'is_final', + 'is_intersection', ] # type: Final[List[str]] def __init__(self, names: 'SymbolTable', defn: ClassDef, module_name: str) -> None: diff --git a/mypy/semanal.py b/mypy/semanal.py index b2769373f324..72ea96173be8 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -81,7 +81,7 @@ from mypy.typevars import fill_typevars from mypy.visitor import NodeVisitor from mypy.errors import Errors, report_internal_error -from mypy.messages import best_matches, MessageBuilder, pretty_or, SUGGESTED_TEST_FIXTURES +from mypy.messages import best_matches, MessageBuilder, pretty_seq, SUGGESTED_TEST_FIXTURES from mypy.errorcodes import ErrorCode from mypy import message_registry, errorcodes as codes from mypy.types import ( @@ -1802,7 +1802,7 @@ def report_missing_module_attribute(self, import_id: str, source_id: str, import alternatives = set(module.names.keys()).difference({source_id}) matches = best_matches(source_id, alternatives)[:3] if matches: - suggestion = "; maybe {}?".format(pretty_or(matches)) + suggestion = "; maybe {}?".format(pretty_seq(matches, "or")) message += "{}".format(suggestion) self.fail(message, context, code=codes.ATTR_DEFINED) self.add_unknown_imported_symbol(imported_id, context) diff --git a/test-data/unit/check-incremental.test b/test-data/unit/check-incremental.test index edf536ac9306..0178226ea97f 100644 --- a/test-data/unit/check-incremental.test +++ b/test-data/unit/check-incremental.test @@ -5165,3 +5165,155 @@ class Foo: # type: ignore import a [file b.py.2] import a # a change + +[case testIsInstanceAdHocIntersectionIncrementalNoChange] +import b +[file a.py] +class A: pass +class B: pass + +class Foo: + def __init__(self) -> None: + x: A + assert isinstance(x, B) + self.x = x +[file b.py] +from a import Foo +[file b.py.2] +from a import Foo +reveal_type(Foo().x) +[builtins fixtures/isinstance.pyi] +[out] +[out2] +tmp/b.py:2: note: Revealed type is 'a.' + +[case testIsInstanceAdHocIntersectionIncrementalIsInstanceChange] +import c +[file a.py] +class A: pass +class B: pass +class C: pass + +class Foo: + def __init__(self) -> None: + x: A + assert isinstance(x, B) + self.x = x +[file a.py.2] +class A: pass +class B: pass +class C: pass + +class Foo: + def __init__(self) -> None: + x: A + assert isinstance(x, C) + self.x = x + +[file b.py] +from a import Foo +y = Foo().x + +[file c.py] +from b import y +reveal_type(y) +[builtins fixtures/isinstance.pyi] +[out] +tmp/c.py:2: note: Revealed type is 'a.' +[out2] +tmp/c.py:2: note: Revealed type is 'a.' + +[case testIsInstanceAdHocIntersectionIncrementalUnderlyingObjChang] +import c +[file a.py] +class A: pass +class B: pass +class C: pass +Extra = B +[file a.py.2] +class A: pass +class B: pass +class C: pass +Extra = C + +[file b.py] +from a import A, Extra +x: A +if isinstance(x, Extra): + y = x + +[file c.py] +from b import y +reveal_type(y) +[builtins fixtures/isinstance.pyi] +[out] +tmp/c.py:2: note: Revealed type is 'b.' +[out2] +tmp/c.py:2: note: Revealed type is 'b.' + +[case testIsInstanceAdHocIntersectionIncrementalIntersectionToUnreachable] +import c +[file a.py] +class A: + x: int +class B: + x: int +x: A +assert isinstance(x, B) +y = x + +[file a.py.2] +class A: + x: int +class B: + x: str +x: A +assert isinstance(x, B) +y = x + +[file b.py] +from a import y +z = y + +[file c.py] +from b import z +reveal_type(z) +[builtins fixtures/isinstance.pyi] +[out] +tmp/c.py:2: note: Revealed type is 'a.' +[out2] +tmp/c.py:2: note: Revealed type is 'a.A' + +[case testIsInstanceAdHocIntersectionIncrementalUnreachaableToIntersection] +import c +[file a.py] +class A: + x: int +class B: + x: str +x: A +assert isinstance(x, B) +y = x + +[file a.py.2] +class A: + x: int +class B: + x: int +x: A +assert isinstance(x, B) +y = x + +[file b.py] +from a import y +z = y + +[file c.py] +from b import z +reveal_type(z) +[builtins fixtures/isinstance.pyi] +[out] +tmp/c.py:2: note: Revealed type is 'a.A' +[out2] +tmp/c.py:2: note: Revealed type is 'a.' + diff --git a/test-data/unit/check-isinstance.test b/test-data/unit/check-isinstance.test index 09c174a5d41a..953178bc84e9 100644 --- a/test-data/unit/check-isinstance.test +++ b/test-data/unit/check-isinstance.test @@ -1236,7 +1236,10 @@ else: [builtins fixtures/isinstancelist.pyi] [case testIsinstanceMultiAndSpecialCase] -class A: pass +class A: + # Ensure A.__add__ and int.__add__ are different to + # force 'isinstance(y, int)' checks below to never succeed. + def __add__(self, other: A) -> A: pass class B(A): flag = 1 @@ -1357,7 +1360,7 @@ class B: pass x = B() if isinstance(x, A): - reveal_type(x) # unreachable + reveal_type(x) # N: Revealed type is '__main__.' else: reveal_type(x) # N: Revealed type is '__main__.B' reveal_type(x) # N: Revealed type is '__main__.B' @@ -2158,7 +2161,7 @@ def foo2(x: Optional[str]) -> None: if x is None: reveal_type(x) # N: Revealed type is 'None' elif isinstance(x, A): - reveal_type(x) + reveal_type(x) # N: Revealed type is '__main__.' else: reveal_type(x) # N: Revealed type is 'builtins.str' [builtins fixtures/isinstance.pyi] @@ -2182,8 +2185,7 @@ def foo2(x: Optional[str]) -> None: if x is None: reveal_type(x) # N: Revealed type is 'None' elif isinstance(x, A): - # Mypy should, however, be able to skip impossible cases - reveal_type(x) + reveal_type(x) # N: Revealed type is '__main__.' else: reveal_type(x) # N: Revealed type is 'builtins.str' [builtins fixtures/isinstance.pyi] @@ -2284,3 +2286,290 @@ var = 'some string' if isinstance(var, *(str, int)): # E: Too many arguments for "isinstance" pass [builtins fixtures/isinstancelist.pyi] + +[case testIsInstanceAdHocIntersectionBasic] +class A: + def f1(self) -> int: ... +class B: + def f2(self) -> int: ... +class C: + def f3(self) -> int: ... + +x: A +if isinstance(x, B): + reveal_type(x) # N: Revealed type is '__main__.' + if isinstance(x, C): + reveal_type(x) # N: Revealed type is '__main__.' + reveal_type(x.f1()) # N: Revealed type is 'builtins.int' + reveal_type(x.f2()) # N: Revealed type is 'builtins.int' + reveal_type(x.f3()) # N: Revealed type is 'builtins.int' + x.bad() # E: "" has no attribute "bad" + else: + reveal_type(x) # N: Revealed type is '__main__.' +else: + reveal_type(x) # N: Revealed type is '__main__.A' +[builtins fixtures/isinstance.pyi] + +[case testIsInstanceAdHocIntersectionRepeatedChecks] +# flags: --warn-unreachable + +class A: pass +class B: pass + +x: A +if isinstance(x, B): + reveal_type(x) # N: Revealed type is '__main__.' + if isinstance(x, A): + reveal_type(x) # N: Revealed type is '__main__.' + if isinstance(x, B): + reveal_type(x) # N: Revealed type is '__main__.' +[builtins fixtures/isinstance.pyi] + +[case testIsInstanceAdHocIntersectionIncompatibleClasses] +# flags: --warn-unreachable +class A: + def f(self) -> int: ... +class B: + def f(self) -> str: ... +class C: + def f(self) -> str: ... + +class Example(A, B): pass # E: Definition of "f" in base class "A" is incompatible with definition in base class "B" +x: A +if isinstance(x, B): # E: Subclass of "A" and "B" cannot exist: would have incompatible method signatures + reveal_type(x) # E: Statement is unreachable +else: + reveal_type(x) # N: Revealed type is '__main__.A' + +y: C +if isinstance(y, B): + reveal_type(y) # N: Revealed type is '__main__.' + if isinstance(y, A): # E: Subclass of "C", "B", and "A" cannot exist: would have incompatible method signatures + reveal_type(y) # E: Statement is unreachable +[builtins fixtures/isinstance.pyi] + +[case testIsInstanceAdHocIntersectionGenerics] +# flags: --warn-unreachable +from typing import Generic, TypeVar + +class Parent: pass +class Child(Parent): pass + +T = TypeVar('T') +class A(Generic[T]): + def f(self) -> T: ... +class B: + def f(self) -> Parent: ... + +x: A[int] +if isinstance(x, B): # E: Subclass of "A[int]" and "B" cannot exist: would have incompatible method signatures + reveal_type(x) # E: Statement is unreachable +else: + reveal_type(x) # N: Revealed type is '__main__.A[builtins.int]' + +y: A[Parent] +if isinstance(y, B): + reveal_type(y) # N: Revealed type is '__main__.' + reveal_type(y.f()) # N: Revealed type is '__main__.Parent*' +else: + reveal_type(y) # N: Revealed type is '__main__.A[__main__.Parent]' + +z: A[Child] +if isinstance(z, B): + reveal_type(z) # N: Revealed type is '__main__.' + reveal_type(z.f()) # N: Revealed type is '__main__.Child*' +else: + reveal_type(z) # N: Revealed type is '__main__.A[__main__.Child]' +[builtins fixtures/isinstance.pyi] + +[case testIsInstanceAdHocIntersectionGenericsWithValues] +# flags: --warn-unreachable +from typing import TypeVar + +class A: + attr: int +class B: + attr: int +class C: + attr: str + +T1 = TypeVar('T1', A, B) +def f1(x: T1) -> T1: + if isinstance(x, A): + reveal_type(x) # N: Revealed type is '__main__.A*' \ + # N: Revealed type is '__main__.' + if isinstance(x, B): + reveal_type(x) # N: Revealed type is '__main__.' \ + # N: Revealed type is '__main__.' + else: + reveal_type(x) # N: Revealed type is '__main__.A*' + else: + reveal_type(x) # N: Revealed type is '__main__.B*' + return x + +T2 = TypeVar('T2', B, C) +def f2(x: T2) -> T2: + if isinstance(x, B): + reveal_type(x) # N: Revealed type is '__main__.B*' + # Note: even though --warn-unreachable is set, we don't report + # errors for the below: we don't yet have a way of filtering out + # reachability errors that occur for only one variation of the + # TypeVar yet. + if isinstance(x, C): + reveal_type(x) + else: + reveal_type(x) # N: Revealed type is '__main__.B*' + else: + reveal_type(x) # N: Revealed type is '__main__.C*' + return x +[builtins fixtures/isinstance.pyi] + +[case testIsInstanceAdHocIntersectionGenericsWithValuesDirectReturn] +# flags: --warn-unreachable +from typing import TypeVar + +class A: + attr: int +class B: + attr: int +class C: + attr: str + +T1 = TypeVar('T1', A, B) +def f1(x: T1) -> T1: + if isinstance(x, A): + # The error message is confusing, but we indeed do run into problems if + # 'x' is a subclass of A and B + return A() # E: Incompatible return value type (got "A", expected "B") + else: + return B() + +T2 = TypeVar('T2', B, C) +def f2(x: T2) -> T2: + if isinstance(x, B): + # In contrast, it's impossible for a subclass of "B" and "C" to + # exist, so this is fine + return B() + else: + return C() +[builtins fixtures/isinstance.pyi] + +[case testIsInstanceAdHocIntersectionUsage] +# flags: --warn-unreachable +class A: pass +class B: pass +class Concrete(A, B): pass + +def accept_a(a: A) -> None: pass +def accept_b(a: B) -> None: pass +def accept_concrete(c: Concrete) -> None: pass + +x: A +if isinstance(x, B): + var = x + reveal_type(var) # N: Revealed type is '__main__.' + accept_a(var) + accept_b(var) + accept_concrete(var) # E: Argument 1 to "accept_concrete" has incompatible type ""; expected "Concrete" +[builtins fixtures/isinstance.pyi] + +[case testIsInstanceAdHocIntersectionReinfer] +# flags: --warn-unreachable +class A: pass +class B: pass + +x: A +assert isinstance(x, B) +reveal_type(x) # N: Revealed type is '__main__.' + +y: A +assert isinstance(y, B) +reveal_type(y) # N: Revealed type is '__main__.1' + +x = y +reveal_type(x) # N: Revealed type is '__main__.1' +[builtins fixtures/isinstance.pyi] + +[case testIsInstanceAdHocIntersectionWithUnions] +# flags: --warn-unreachable +from typing import Type, Union +class A: pass +class B: pass +class C: pass +class D: pass + +v1: A +if isinstance(v1, (B, C)): + reveal_type(v1) # N: Revealed type is 'Union[__main__., __main__.]' + +v2: Union[A, B] +if isinstance(v2, C): + reveal_type(v2) # N: Revealed type is 'Union[__main__.1, __main__.]' + +v3: Union[A, B] +if isinstance(v3, (C, D)): + reveal_type(v3) # N: Revealed type is 'Union[__main__.2, __main__., __main__.1, __main__.]' +[builtins fixtures/isinstance.pyi] + +[case testIsInstanceAdHocIntersectionSameNames] +# flags: --warn-unreachable +from foo import A as A2 +class A: pass + +x: A +if isinstance(x, A2): + reveal_type(x) # N: Revealed type is '__main__.' + +[file foo.py] +class A: pass +[builtins fixtures/isinstance.pyi] + +[case testIsInstanceAdHocIntersectionBadMro] +# flags: --warn-unreachable +class X: pass +class Y: pass +class A(X, Y): pass +class B(Y, X): pass + +foo: A +if isinstance(foo, B): # E: Subclass of "A" and "B" cannot exist: would have inconsistent method resolution order + reveal_type(foo) # E: Statement is unreachable +[builtins fixtures/isinstance.pyi] + +[case testIsInstanceAdHocIntersectionAmbiguousClass] +# flags: --warn-unreachable +from typing import Any + +class Concrete: + x: int +class Ambiguous: + x: Any + +# We bias towards assuming these two classes could be overlapping +foo: Concrete +if isinstance(foo, Ambiguous): + reveal_type(foo) # N: Revealed type is '__main__.' + reveal_type(foo.x) # N: Revealed type is 'builtins.int' +[builtins fixtures/isinstance.pyi] + +[case testIsSubclassAdHocIntersection] +# flags: --warn-unreachable +from typing import Type + +class A: + x: int +class B: + x: int +class C: + x: str + +x: Type[A] +if issubclass(x, B): + reveal_type(x) # N: Revealed type is 'Type[__main__.]' + if issubclass(x, C): # E: Subclass of "A", "B", and "C" cannot exist: would have incompatible method signatures + reveal_type(x) # E: Statement is unreachable + else: + reveal_type(x) # N: Revealed type is 'Type[__main__.]' +else: + reveal_type(x) # N: Revealed type is 'Type[__main__.A]' +[builtins fixtures/isinstance.pyi] diff --git a/test-data/unit/check-protocols.test b/test-data/unit/check-protocols.test index 0081394541b0..8773e91d0840 100644 --- a/test-data/unit/check-protocols.test +++ b/test-data/unit/check-protocols.test @@ -1564,7 +1564,7 @@ if isinstance(c1i, P1): else: reveal_type(c1i) # Unreachable if isinstance(c1i, P): - reveal_type(c1i) # Unreachable + reveal_type(c1i) # N: Revealed type is '__main__.' else: reveal_type(c1i) # N: Revealed type is '__main__.C1[builtins.int]' @@ -1576,7 +1576,7 @@ else: c2: C2 if isinstance(c2, P): - reveal_type(c2) # Unreachable + reveal_type(c2) # N: Revealed type is '__main__.' else: reveal_type(c2) # N: Revealed type is '__main__.C2' diff --git a/test-data/unit/check-typevar-values.test b/test-data/unit/check-typevar-values.test index d70f7b240333..72993261a22f 100644 --- a/test-data/unit/check-typevar-values.test +++ b/test-data/unit/check-typevar-values.test @@ -178,20 +178,37 @@ def f(x: T) -> T: [out] [case testIsinstanceWithUserDefinedTypeAndTypeVarValues] +# flags: --warn-unreachable from typing import TypeVar class A: pass class B: pass -T = TypeVar('T', A, B) -def f(x: T) -> None: +T1 = TypeVar('T1', A, B) +def f1(x: T1) -> None: y = x if isinstance(x, A): - # This is only checked when x is A, since A and B are not considered overlapping. x = y - x = A() + x = A() # E: Incompatible types in assignment (expression has type "A", variable has type "B") else: x = B() x = y x.foo() # E: "B" has no attribute "foo" + +class C: + field: int +class D: + field: str +T2 = TypeVar('T2', C, D) +def f2(x: T2) -> None: + y = x + if isinstance(x, C): + # C and D are non-overlapping, so this branch is never checked + x = y + x = C() + else: + x = D() + x = y + x.foo() # E: "D" has no attribute "foo" + S = TypeVar('S', int, str) def g(x: S) -> None: y = x diff --git a/test-data/unit/deps.test b/test-data/unit/deps.test index f5224d9216fc..62ddeac07bc7 100644 --- a/test-data/unit/deps.test +++ b/test-data/unit/deps.test @@ -409,6 +409,21 @@ def ff(x: object) -> None: class A: x: int +class B: + x: str + +def f(x: A) -> None: + if isinstance(x, B): + x.y +[builtins fixtures/isinstancelist.pyi] +[out] + -> , m.A, m.f + -> m.B, m.f + +[case testIsInstanceAdHocIntersectionDeps] +class A: + x: int + class B: y: int @@ -417,6 +432,7 @@ def f(x: A) -> None: x.y [builtins fixtures/isinstancelist.pyi] [out] +.y> -> m.f -> , m.A, m.f -> m.B, m.f diff --git a/test-data/unit/fine-grained.test b/test-data/unit/fine-grained.test index 4e2309b3c5cf..d09aaad614e1 100644 --- a/test-data/unit/fine-grained.test +++ b/test-data/unit/fine-grained.test @@ -9410,3 +9410,155 @@ x: List[C] = [a.f(), a.f()] == a.py:2: error: "C" expects 2 type arguments, but 1 given [builtins fixtures/list.pyi] + +[case testIsInstanceAdHocIntersectionFineGrainedIncrementalNoChange] +import b +[file a.py] +class A: pass +class B: pass + +class Foo: + def __init__(self) -> None: + x: A + assert isinstance(x, B) + self.x = x +[file b.py] +from a import Foo +[file b.py.2] +from a import Foo +reveal_type(Foo().x) +[builtins fixtures/isinstance.pyi] +[out] +== +b.py:2: note: Revealed type is 'a.' + +[case testIsInstanceAdHocIntersectionFineGrainedIncrementalIsInstanceChange] +import c +[file a.py] +class A: pass +class B: pass +class C: pass + +class Foo: + def __init__(self) -> None: + x: A + assert isinstance(x, B) + self.x = x +[file a.py.2] +class A: pass +class B: pass +class C: pass + +class Foo: + def __init__(self) -> None: + x: A + assert isinstance(x, C) + self.x = x + +[file b.py] +from a import Foo +y = Foo().x + +[file c.py] +from b import y +reveal_type(y) +[builtins fixtures/isinstance.pyi] +[out] +c.py:2: note: Revealed type is 'a.' +== +c.py:2: note: Revealed type is 'a.' + +[case testIsInstanceAdHocIntersectionFineGrainedIncrementalUnderlyingObjChang] +import c +[file a.py] +class A: pass +class B: pass +class C: pass +Extra = B +[file a.py.2] +class A: pass +class B: pass +class C: pass +Extra = C + +[file b.py] +from a import A, Extra +x: A +if isinstance(x, Extra): + y = x + +[file c.py] +from b import y +reveal_type(y) +[builtins fixtures/isinstance.pyi] +[out] +c.py:2: note: Revealed type is 'b.' +== +c.py:2: note: Revealed type is 'b.' + +[case testIsInstanceAdHocIntersectionFineGrainedIncrementalIntersectionToUnreachable] +import c +[file a.py] +class A: + x: int +class B: + x: int +x: A +assert isinstance(x, B) +y = x + +[file a.py.2] +class A: + x: int +class B: + x: str +x: A +assert isinstance(x, B) +y = x + +[file b.py] +from a import y +z = y + +[file c.py] +from b import z +reveal_type(z) +[builtins fixtures/isinstance.pyi] +[out] +c.py:2: note: Revealed type is 'a.' +== +c.py:2: note: Revealed type is 'a.A' + +[case testIsInstanceAdHocIntersectionFineGrainedIncrementalUnreachaableToIntersection] +import c +[file a.py] +class A: + x: int +class B: + x: str +x: A +assert isinstance(x, B) +y = x + +[file a.py.2] +class A: + x: int +class B: + x: int +x: A +assert isinstance(x, B) +y = x + +[file b.py] +from a import y +z = y + +[file c.py] +from b import z +reveal_type(z) +[builtins fixtures/isinstance.pyi] +[out] +c.py:2: note: Revealed type is 'a.A' +== +c.py:2: note: Revealed type is 'a.' + diff --git a/test-data/unit/pythoneval.test b/test-data/unit/pythoneval.test index 7cda44c4e569..c07449a6f24b 100644 --- a/test-data/unit/pythoneval.test +++ b/test-data/unit/pythoneval.test @@ -1479,6 +1479,20 @@ def f_suppresses() -> int: _testUnreachableWithStdlibContextManagersNoStrictOptional.py:9: error: Statement is unreachable _testUnreachableWithStdlibContextManagersNoStrictOptional.py:15: error: Statement is unreachable +[case testIsInstanceAdHocIntersectionWithStrAndBytes] +# mypy: warn-unreachable +x: str +if isinstance(x, bytes): + reveal_type(x) +y: str +if isinstance(x, int): + reveal_type(x) +[out] +_testIsInstanceAdHocIntersectionWithStrAndBytes.py:3: error: Subclass of "str" and "bytes" cannot exist: would have incompatible method signatures +_testIsInstanceAdHocIntersectionWithStrAndBytes.py:4: error: Statement is unreachable +_testIsInstanceAdHocIntersectionWithStrAndBytes.py:6: error: Subclass of "str" and "int" cannot exist: would have incompatible method signatures +_testIsInstanceAdHocIntersectionWithStrAndBytes.py:7: error: Statement is unreachable + [case testAsyncioFutureWait] # mypy: strict-optional from asyncio import Future, wait From 7570779cad7918ecd5b81d31b5acf2bf564a2e2a Mon Sep 17 00:00:00 2001 From: Maksim Kurnikov Date: Fri, 24 Jan 2020 00:19:44 +0300 Subject: [PATCH 061/117] add add_method_to_class function for plugins.common (#8245) --- mypy/plugins/common.py | 31 ++++++++++++++++++++++++++----- 1 file changed, 26 insertions(+), 5 deletions(-) diff --git a/mypy/plugins/common.py b/mypy/plugins/common.py index 66105f96a3eb..536022a1e09e 100644 --- a/mypy/plugins/common.py +++ b/mypy/plugins/common.py @@ -1,7 +1,7 @@ from typing import List, Optional, Union from mypy.nodes import ( - ARG_POS, MDEF, Argument, Block, CallExpr, Expression, SYMBOL_FUNCBASE_TYPES, + ARG_POS, MDEF, Argument, Block, CallExpr, ClassDef, Expression, SYMBOL_FUNCBASE_TYPES, FuncDef, PassStmt, RefExpr, SymbolTableNode, Var, JsonDict, ) from mypy.plugin import ClassDefContext, SemanticAnalyzerPluginInterface @@ -90,19 +90,40 @@ def add_method( self_type: Optional[Type] = None, tvar_def: Optional[TypeVarDef] = None, ) -> None: - """Adds a new method to a class. """ - info = ctx.cls.info + Adds a new method to a class. + Deprecated, use add_method_to_class() instead. + """ + add_method_to_class(ctx.api, ctx.cls, + name=name, + args=args, + return_type=return_type, + self_type=self_type, + tvar_def=tvar_def) + + +def add_method_to_class( + api: SemanticAnalyzerPluginInterface, + cls: ClassDef, + name: str, + args: List[Argument], + return_type: Type, + self_type: Optional[Type] = None, + tvar_def: Optional[TypeVarDef] = None, +) -> None: + """Adds a new method to a class definition. + """ + info = cls.info # First remove any previously generated methods with the same name # to avoid clashes and problems in the semantic analyzer. if name in info.names: sym = info.names[name] if sym.plugin_generated and isinstance(sym.node, FuncDef): - ctx.cls.defs.body.remove(sym.node) + cls.defs.body.remove(sym.node) self_type = self_type or fill_typevars(info) - function_type = ctx.api.named_type('__builtins__.function') + function_type = api.named_type('__builtins__.function') args = [Argument(Var('self'), self_type, None, ARG_POS)] + args arg_types, arg_names, arg_kinds = [], [], [] From 9c2b8b3651a8ec1d063f124f90008e5324e8e450 Mon Sep 17 00:00:00 2001 From: "Michael J. Sullivan" Date: Thu, 23 Jan 2020 13:45:58 -0800 Subject: [PATCH 062/117] Make the fine-grained mergechecker work again (#8313) This fixes some stuff in the mergechecker that breaks with current mypy's and also some bugs that the mergechecker caught. The entire fine-grained test suite now passes with the merge checker on, which I don't think has ever been true before. This means that we could turn it on by default, but it doubles the runtime of the fine-grained tests (from 90s CPU time to 180s CPU time on my laptop), so I've left it off for now. The motivation here is that I knew intersect_callable's creation of types during typechecking used to run afoul of the consistency checker and so I was nervous that #8305 would cause more problems by adding more logic of that kind. It no longer does, probably as a result of the semantic analyzer rewrite, so I think we are in the clear on that. --- mypy/semanal_main.py | 6 ++++++ mypy/server/astmerge.py | 19 ++++++++----------- mypy/server/mergecheck.py | 5 ++++- mypy/server/objgraph.py | 16 +++++++++++----- mypy/traverser.py | 4 ++++ test-data/unit/deps-classes.test | 2 +- test-data/unit/deps-types.test | 6 +++--- 7 files changed, 37 insertions(+), 21 deletions(-) diff --git a/mypy/semanal_main.py b/mypy/semanal_main.py index a12b6cadeb69..cac2a83214b8 100644 --- a/mypy/semanal_main.py +++ b/mypy/semanal_main.py @@ -335,6 +335,12 @@ def semantic_analyze_target(target: str, priority = mypy.build.PRI_LOW if priority <= state.priorities.get(dep, priority): state.priorities[dep] = priority + + # Clear out some stale data to avoid memory leaks and astmerge + # validity check confusion + analyzer.statement = None + del analyzer.cur_mod_node + if analyzer.deferred: return [target], analyzer.incomplete, analyzer.progress else: diff --git a/mypy/server/astmerge.py b/mypy/server/astmerge.py index 5d00acee8c42..587df57e8a08 100644 --- a/mypy/server/astmerge.py +++ b/mypy/server/astmerge.py @@ -51,7 +51,7 @@ MypyFile, SymbolTable, Block, AssignmentStmt, NameExpr, MemberExpr, RefExpr, TypeInfo, FuncDef, ClassDef, NamedTupleExpr, SymbolNode, Var, Statement, SuperExpr, NewTypeExpr, OverloadedFuncDef, LambdaExpr, TypedDictExpr, EnumCallExpr, FuncBase, TypeAliasExpr, CallExpr, - CastExpr, + CastExpr, TypeAlias, MDEF ) from mypy.traverser import TraverserVisitor @@ -213,7 +213,7 @@ def visit_ref_expr(self, node: RefExpr) -> None: node.node = self.fixup(node.node) if isinstance(node.node, Var): # The Var node may be an orphan and won't otherwise be processed. - fixup_var(node.node, self.replacements) + node.node.accept(self) def visit_namedtuple_expr(self, node: NamedTupleExpr) -> None: super().visit_namedtuple_expr(node) @@ -266,6 +266,10 @@ def visit_var(self, node: Var) -> None: self.fixup_type(node.type) super().visit_var(node) + def visit_type_alias(self, node: TypeAlias) -> None: + self.fixup_type(node.target) + super().visit_type_alias(node) + # Helpers def fixup(self, node: SN) -> SN: @@ -459,13 +463,6 @@ def replace_nodes_in_symbol_table(symbols: SymbolTable, old = node.node replace_object_state(new, old) node.node = new - if isinstance(node.node, Var): + if isinstance(node.node, (Var, TypeAlias)): # Handle them here just in case these aren't exposed through the AST. - # TODO: Is this necessary? - fixup_var(node.node, replacements) - - -def fixup_var(node: Var, replacements: Dict[SymbolNode, SymbolNode]) -> None: - if node.type: - node.type.accept(TypeReplaceVisitor(replacements)) - node.info = cast(TypeInfo, replacements.get(node.info, node.info)) + node.node.accept(NodeReplaceVisitor(replacements)) diff --git a/mypy/server/mergecheck.py b/mypy/server/mergecheck.py index dcb820bbffc1..afa450fb5a75 100644 --- a/mypy/server/mergecheck.py +++ b/mypy/server/mergecheck.py @@ -3,7 +3,7 @@ from typing import Dict, List, Tuple from typing_extensions import Final -from mypy.nodes import SymbolNode, Var, Decorator, FuncDef +from mypy.nodes import FakeInfo, SymbolNode, Var, Decorator, FuncDef from mypy.server.objgraph import get_reachable_graph, get_path # If True, print more verbose output on failure. @@ -21,6 +21,9 @@ def check_consistency(o: object) -> None: m = {} # type: Dict[str, SymbolNode] for sym in syms: + if isinstance(sym, FakeInfo): + continue + fn = sym.fullname # Skip None names, since they are ambiguous. # TODO: Everything should have a proper full name? diff --git a/mypy/server/objgraph.py b/mypy/server/objgraph.py index d2dc15217096..a7b45f5ec81f 100644 --- a/mypy/server/objgraph.py +++ b/mypy/server/objgraph.py @@ -54,12 +54,18 @@ def isproperty(o: object, attr: str) -> bool: def get_edge_candidates(o: object) -> Iterator[Tuple[object, object]]: + # use getattr because mypyc expects dict, not mappingproxy + if '__getattribute__' in getattr(type(o), '__dict__'): # noqa + return if type(o) not in COLLECTION_TYPE_BLACKLIST: for attr in dir(o): - if attr not in ATTR_BLACKLIST and hasattr(o, attr) and not isproperty(o, attr): - e = getattr(o, attr) - if not type(e) in ATOMIC_TYPE_BLACKLIST: - yield attr, e + try: + if attr not in ATTR_BLACKLIST and hasattr(o, attr) and not isproperty(o, attr): + e = getattr(o, attr) + if not type(e) in ATOMIC_TYPE_BLACKLIST: + yield attr, e + except AssertionError: + pass if isinstance(o, Mapping): for k, v in o.items(): yield k, v @@ -78,7 +84,7 @@ def get_edges(o: object) -> Iterator[Tuple[object, object]]: yield (s, '__closure__'), e.__closure__ # type: ignore if hasattr(e, '__self__'): se = e.__self__ # type: ignore - if se is not o and se is not type(o): + if se is not o and se is not type(o) and hasattr(s, '__self__'): yield s.__self__, se # type: ignore else: if not type(e) in TYPE_BLACKLIST: diff --git a/mypy/traverser.py b/mypy/traverser.py index de43269f31a9..4ce8332fed86 100644 --- a/mypy/traverser.py +++ b/mypy/traverser.py @@ -64,6 +64,10 @@ def visit_class_def(self, o: ClassDef) -> None: d.accept(self) for base in o.base_type_exprs: base.accept(self) + if o.metaclass: + o.metaclass.accept(self) + for v in o.keywords.values(): + v.accept(self) o.defs.accept(self) if o.analyzed: o.analyzed.accept(self) diff --git a/test-data/unit/deps-classes.test b/test-data/unit/deps-classes.test index e8b2aaa7dcca..222b428a0ed4 100644 --- a/test-data/unit/deps-classes.test +++ b/test-data/unit/deps-classes.test @@ -246,4 +246,4 @@ def f() -> None: -> m.f -> m.C, m.f -> m.f - -> , m.M + -> , m, m.M diff --git a/test-data/unit/deps-types.test b/test-data/unit/deps-types.test index b13f33fe7017..d0674dfadceb 100644 --- a/test-data/unit/deps-types.test +++ b/test-data/unit/deps-types.test @@ -239,7 +239,7 @@ class M(type): pass [out] -> m.C - -> + -> , m -> m [case testMetaclassDepsDeclared_python2] @@ -268,8 +268,8 @@ class M(type): pass [out] -> m.func - -> - -> m + -> , m.func + -> m, m.func [case testMetaclassAttributes_python2] # flags: --py2 From d56d90087ea98fed32b908f466f950d3fad952af Mon Sep 17 00:00:00 2001 From: "Michael J. Sullivan" Date: Thu, 23 Jan 2020 21:30:46 -0800 Subject: [PATCH 063/117] [mypyc] Delete testDictFree because it is too fragile (#8324) The stuff with gc that it was doing is fragile and it was breaking on windows with github actions (see #8292) even with all the compiled code removed. --- mypyc/test-data/run.test | 34 ---------------------------------- 1 file changed, 34 deletions(-) diff --git a/mypyc/test-data/run.test b/mypyc/test-data/run.test index ba2915d7696f..d1abc264ea94 100644 --- a/mypyc/test-data/run.test +++ b/mypyc/test-data/run.test @@ -4405,40 +4405,6 @@ def foo(x: bool, y: bool) -> Tuple[Optional[A], bool]: [file driver.py] # really I only care it builds -[case testDictFree] -# Test that we garbage collect stuff with __dict__ right! -from typing import Optional, Any, Dict, Generic, List -from base import Base - -class A(Base): - z: Any - -def make_garbage(x: List[str]) -> None: - a = A() - b = A() - a.x = b - b.x = a - a.y = [1,2,3,4,5] - -[file base.py] -class Base: - x = None # type: object - y = None # type: object - -[file driver.py] -from native import make_garbage -import gc - -def test(): - gc.collect(2) - x = len(gc.get_objects()) - make_garbage([1,2,3,4]) - gc.collect(2) - y = len(gc.get_objects()) - assert x == y - -test() - [case testIterTypeTrickiness] # Test inferring the type of a for loop body doesn't cause us grief # Extracted from somethings that broke in mypy From 5f29d80106290144a41987b51deb35b47ffc8aa2 Mon Sep 17 00:00:00 2001 From: Anthony Sottile Date: Fri, 24 Jan 2020 11:49:17 -0700 Subject: [PATCH 064/117] Add GitHub Actions for testing windows (#8327) Resolves #8292 --- .github/workflows/test.yml | 49 ++++++++++++++++++++++++++++++++++++++ tox.ini | 2 +- 2 files changed, 50 insertions(+), 1 deletion(-) create mode 100644 .github/workflows/test.yml diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml new file mode 100644 index 000000000000..f76468695a77 --- /dev/null +++ b/.github/workflows/test.yml @@ -0,0 +1,49 @@ +name: main + +on: + push: + branches: [master] + tags: ['*'] + pull_request: + paths-ignore: + - 'docs/**' + - '**/*.rst' + - '**/*.md' + - .gitignore + - .travis.yml + - CREDITS + - LICENSE + +jobs: + build: + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + name: [windows-py37-32, windows-py37-64] + include: + - name: windows-py37-32 + python: '3.7' + arch: x86 + os: windows-latest + toxenv: py37 + - name: windows-py37-64 + python: '3.7' + arch: x64 + os: windows-latest + toxenv: py37 + + steps: + - uses: actions/checkout@v1 + - name: initialize submodules + run: git submodule update --init + - uses: actions/setup-python@v1 + with: + python-version: ${{ matrix.python }} + architecture: ${{ matrix.arch }} + - name: install tox + run: pip install --upgrade setuptools tox==3.9.0 + - name: setup tox environment + run: tox -e ${{ matrix.toxenv }} --notest + - name: test + run: tox -e ${{ matrix.toxenv }} diff --git a/tox.ini b/tox.ini index 0a58c9511305..18cf56f9c3a8 100644 --- a/tox.ini +++ b/tox.ini @@ -13,7 +13,7 @@ isolated_build = true [testenv] description = run the test driver with {basepython} setenv = cov: COVERAGE_FILE={toxworkdir}/.coverage.{envname} -passenv = PYTEST_XDIST_WORKER_COUNT +passenv = PYTEST_XDIST_WORKER_COUNT PROGRAMDATA PROGRAMFILES(X86) deps = -rtest-requirements.txt commands = python -m pytest {posargs} cov: python -m pytest {posargs: --cov mypy --cov-config setup.cfg} From da5bad24d14b3ee20b280df1b41b85d05a171117 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ville=20Skytt=C3=A4?= Date: Sat, 25 Jan 2020 00:53:38 +0200 Subject: [PATCH 065/117] Allow Travis for other than python/mypy repos, e.g. contributor private ones (#8304) --- .travis.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index 9ec81e83fb53..ee6c107f49f1 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,5 +1,5 @@ -# we only CI the master, release branches, tags and PRs -if: tag IS present OR type = pull_request OR ((branch = master OR branch =~ release-*) AND type = push) +# in the python/mypy repo, we only CI the master, release branches, tags and PRs +if: tag IS present OR type = pull_request OR ((branch = master OR branch =~ release-*) AND type = push) OR repo != python/mypy language: python # cache package wheels (1 cache per python version) From 41b40aa4fc7cdeb32f5ee4fec97f5379ebbe795d Mon Sep 17 00:00:00 2001 From: Tan Yuanhong Date: Sat, 25 Jan 2020 21:35:10 +0800 Subject: [PATCH 066/117] Update common issues to include __init__ without arguments (#8303) This is the implication of #5677 where the return type of `__init__` is inferred given that at least one argument is typed. However, if no argument is present in `__init__`, `-> None` becomes compulsory if we are to enable type-checking on that method without using `--check-untyped-defs` flag. I believe it is worth mentioning in the section of "No errors reported for obviously wrong code". --- docs/source/common_issues.rst | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) diff --git a/docs/source/common_issues.rst b/docs/source/common_issues.rst index 14369e44cc56..ed122b097005 100644 --- a/docs/source/common_issues.rst +++ b/docs/source/common_issues.rst @@ -72,6 +72,32 @@ flagged as an error. e.g. the :py:func:`pow` builtin returns ``Any`` (see `typeshed issue 285 `_ for the reason). +- **:py:meth:`__init__ ` method has no annotated + arguments or return type annotation.** :py:meth:`__init__ ` + is considered fully-annotated **if at least one argument is annotated**, + while mypy will infer the return type as ``None``. + The implication is that, for a :py:meth:`__init__ ` method + that has no argument, you'll have to explicitly annotate the return type + as ``None`` to type-check this :py:meth:`__init__ ` method: + + .. code-block:: python + + def foo(s: str) -> str: + return s + + class A(): + def __init__(self, value: str): # Return type inferred as None, considered as typed method + self.value = value + foo(1) # error: Argument 1 to "foo" has incompatible type "int"; expected "str" + + class B(): + def __init__(self): # No argument is annotated, considered as untyped method + foo(1) # No error! + + class C(): + def __init__(self) -> None: # Must specify return type to type-check + foo(1) # error: Argument 1 to "foo" has incompatible type "int"; expected "str" + - **Some imports may be silently ignored**. Another source of unexpected ``Any`` values are the :option:`--ignore-missing-imports ` and :option:`--follow-imports=skip From be883f6770b24bc467ccb993fd77864248b61979 Mon Sep 17 00:00:00 2001 From: Denys Halenok Date: Sun, 26 Jan 2020 18:28:16 +0200 Subject: [PATCH 067/117] Report an error if final class has abstract attributes (#8332) Closes #8316. --- docs/source/final_attrs.rst | 14 +++++++++ mypy/semanal_classprop.py | 4 +++ test-data/unit/check-classes.test | 47 +++++++++++++++++++++++++++++++ 3 files changed, 65 insertions(+) diff --git a/docs/source/final_attrs.rst b/docs/source/final_attrs.rst index 22010544ad1a..ec1b268df1d8 100644 --- a/docs/source/final_attrs.rst +++ b/docs/source/final_attrs.rst @@ -219,3 +219,17 @@ Here are some situations where using a final class may be useful: base classes and subclasses. * You want to retain the freedom to arbitrarily change the class implementation in the future, and these changes might break subclasses. + +An abstract class that defines at least one abstract method or +property and has ``@final`` decorator will generate an error from +mypy, since those attributes could never be implemented. + +.. code-block:: python + + from abc import ABCMeta, abstractmethod + from typing_extensions import final + + @final + class A(metaclass=ABCMeta): # error: Final class A has abstract attributes "f" + @abstractmethod + def f(self, x: int) -> None: pass diff --git a/mypy/semanal_classprop.py b/mypy/semanal_classprop.py index 7052a1197d04..8dc518662445 100644 --- a/mypy/semanal_classprop.py +++ b/mypy/semanal_classprop.py @@ -109,6 +109,10 @@ def report(message: str, severity: str) -> None: report("Class {} has abstract attributes {}".format(typ.fullname, attrs), 'error') report("If it is meant to be abstract, add 'abc.ABCMeta' as an explicit metaclass", 'note') + if typ.is_final and abstract: + attrs = ", ".join('"{}"'.format(attr) for attr in sorted(abstract)) + errors.report(typ.line, typ.column, + "Final class {} has abstract attributes {}".format(typ.fullname, attrs)) def check_protocol_status(info: TypeInfo, errors: Errors) -> None: diff --git a/test-data/unit/check-classes.test b/test-data/unit/check-classes.test index 290926c80466..56b591c041d4 100644 --- a/test-data/unit/check-classes.test +++ b/test-data/unit/check-classes.test @@ -6677,3 +6677,50 @@ class B: # N: Perhaps you need "Callable[...]" or a callback protocol? [builtins fixtures/classmethod.pyi] + +[case testFinalClassWithAbstractAttributes] +from abc import abstractmethod, ABCMeta +from typing import final + +@final +class A(metaclass=ABCMeta): # E: Final class __main__.A has abstract attributes "bar", "foo" + @abstractmethod + def foo(self): + pass + + @property + @abstractmethod + def bar(self): + pass + +[builtins fixtures/property.pyi] + +[case testFinalClassWithoutABCMeta] +from abc import abstractmethod +from typing import final + +@final +class A(): # E: Final class __main__.A has abstract attributes "bar", "foo" + @abstractmethod + def foo(self): + pass + + @property + @abstractmethod + def bar(self): + pass + +[builtins fixtures/property.pyi] + +[case testFinalClassInheritedAbstractAttributes] +from abc import abstractmethod, ABCMeta +from typing import final + +class A(metaclass=ABCMeta): + @abstractmethod + def foo(self): + pass + +@final +class B(A): # E: Final class __main__.B has abstract attributes "foo" + pass From bb5649ec5df2b9c848253433ed3333a5a2dc7f14 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Wed, 29 Jan 2020 16:38:25 -0800 Subject: [PATCH 068/117] Sync typeshed (#8343) --- mypy/typeshed | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mypy/typeshed b/mypy/typeshed index 375e8c112728..47409f3e254a 160000 --- a/mypy/typeshed +++ b/mypy/typeshed @@ -1 +1 @@ -Subproject commit 375e8c11272889cbb653a9cf680ff9b889309105 +Subproject commit 47409f3e254afc69bbff902fe490f00e44400fd0 From ea3c65cc377fcb6e810dec9ed6314b25113dcda2 Mon Sep 17 00:00:00 2001 From: Xuanda Yang Date: Fri, 31 Jan 2020 01:52:28 +0800 Subject: [PATCH 069/117] Update docs about variables and aliases (#8200) Resolves #3494 (Since module types are tracked in #3500) Following #8187 (comment) and #3494, if I understand correctly and the semantics in #3494's example code has been fixed (report error on re-assign Alias = B, the remaining work of #3494 is to update the docs, which is the main focus of this PR. Newly added docs are in common issues and solutions section, with the content mostly adapted from Ivan's example in #3494. And a note point to the docs is also added. --- docs/source/common_issues.rst | 41 +++++++++++++++++++++++++ mypy/typeanal.py | 3 +- test-data/unit/check-columns.test | 27 ++++++++++------ test-data/unit/check-custom-plugin.test | 3 ++ test-data/unit/check-errorcodes.test | 3 +- test-data/unit/check-generics.test | 6 ++-- test-data/unit/check-literal.test | 31 +++++++++++++------ test-data/unit/check-python38.test | 6 ++-- test-data/unit/check-redefine.test | 3 +- test-data/unit/check-semanal-error.test | 1 + test-data/unit/check-type-aliases.test | 6 ++-- test-data/unit/fine-grained.test | 10 ++++++ test-data/unit/merge.test | 1 + test-data/unit/pythoneval-asyncio.test | 1 + test-data/unit/semanal-errors.test | 7 +++-- test-data/unit/semanal-typealiases.test | 6 ++-- 16 files changed, 123 insertions(+), 32 deletions(-) diff --git a/docs/source/common_issues.rst b/docs/source/common_issues.rst index ed122b097005..6891b3262547 100644 --- a/docs/source/common_issues.rst +++ b/docs/source/common_issues.rst @@ -710,3 +710,44 @@ You can install the latest development version of mypy from source. Clone the git clone --recurse-submodules https://github.com/python/mypy.git cd mypy sudo python3 -m pip install --upgrade . + +Variables vs type aliases +----------------------------------- + +Mypy has both type aliases and variables with types like ``Type[...]`` and it is important to know their difference. + +1. Variables with type ``Type[...]`` should be created by assignments with an explicit type annotations: + +.. code-block:: python + + class A: ... + tp: Type[A] = A + +2. Aliases are created by assignments without an explicit type: + +.. code-block:: python + + class A: ... + Alias = A + +3. The difference is that aliases are completely known statically and can be used in type context (annotations): + +.. code-block:: python + + class A: ... + class B: ... + + if random() > 0.5: + Alias = A + else: + Alias = B # error: Cannot assign multiple types to name "Alias" without an explicit "Type[...]" annotation \ + # error: Incompatible types in assignment (expression has type "Type[B]", variable has type "Type[A]") + + tp: Type[object] # tp is a type variable + if random() > 0.5: + tp = A + else: + tp = B # This is OK + + def fun1(x: Alias) -> None: ... # This is OK + def fun2(x: tp) -> None: ... # error: Variable "__main__.tp" is not valid as a type \ No newline at end of file diff --git a/mypy/typeanal.py b/mypy/typeanal.py index 82b4585cfafb..ed5d0e0474e4 100644 --- a/mypy/typeanal.py +++ b/mypy/typeanal.py @@ -423,7 +423,8 @@ def analyze_unbound_type_without_type_info(self, t: UnboundType, sym: SymbolTabl # TODO: Move this message building logic to messages.py. notes = [] # type: List[str] if isinstance(sym.node, Var): - # TODO: add a link to alias docs, see #3494. + notes.append('See https://mypy.readthedocs.io/en/' + 'latest/common_issues.html#variables-vs-type-aliases') message = 'Variable "{}" is not valid as a type' elif isinstance(sym.node, (SYMBOL_FUNCBASE_TYPES, Decorator)): message = 'Function "{}" is not valid as a type' diff --git a/test-data/unit/check-columns.test b/test-data/unit/check-columns.test index 4bc70457ac29..206ff15a9d91 100644 --- a/test-data/unit/check-columns.test +++ b/test-data/unit/check-columns.test @@ -153,16 +153,22 @@ from typing import Iterable bad = 0 -def f(x: bad): # E:10: Variable "__main__.bad" is not valid as a type - y: bad # E:8: Variable "__main__.bad" is not valid as a type +def f(x: bad): # E:10: Variable "__main__.bad" is not valid as a type \ + # N:10: See https://mypy.readthedocs.io/en/latest/common_issues.html#variables-vs-type-aliases + y: bad # E:8: Variable "__main__.bad" is not valid as a type \ + # N:8: See https://mypy.readthedocs.io/en/latest/common_issues.html#variables-vs-type-aliases if int(): - def g(x): # E:5: Variable "__main__.bad" is not valid as a type + def g(x): # E:5: Variable "__main__.bad" is not valid as a type \ + # N:5: See https://mypy.readthedocs.io/en/latest/common_issues.html#variables-vs-type-aliases # type: (bad) -> None - y = 0 # type: bad # E:9: Variable "__main__.bad" is not valid as a type + y = 0 # type: bad # E:9: Variable "__main__.bad" is not valid as a type \ + # N:9: See https://mypy.readthedocs.io/en/latest/common_issues.html#variables-vs-type-aliases -z: Iterable[bad] # E:13: Variable "__main__.bad" is not valid as a type -h: bad[int] # E:4: Variable "__main__.bad" is not valid as a type +z: Iterable[bad] # E:13: Variable "__main__.bad" is not valid as a type \ + # N:13: See https://mypy.readthedocs.io/en/latest/common_issues.html#variables-vs-type-aliases +h: bad[int] # E:4: Variable "__main__.bad" is not valid as a type \ + # N:4: See https://mypy.readthedocs.io/en/latest/common_issues.html#variables-vs-type-aliases [case testColumnInvalidType_python2] @@ -171,11 +177,14 @@ from typing import Iterable bad = 0 if int(): - def g(x): # E:5: Variable "__main__.bad" is not valid as a type + def g(x): # E:5: Variable "__main__.bad" is not valid as a type \ + # N:5: See https://mypy.readthedocs.io/en/latest/common_issues.html#variables-vs-type-aliases # type: (bad) -> None - y = 0 # type: bad # E:9: Variable "__main__.bad" is not valid as a type + y = 0 # type: bad # E:9: Variable "__main__.bad" is not valid as a type \ + # N:9: See https://mypy.readthedocs.io/en/latest/common_issues.html#variables-vs-type-aliases - z = () # type: Iterable[bad] # E:5: Variable "__main__.bad" is not valid as a type + z = () # type: Iterable[bad] # E:5: Variable "__main__.bad" is not valid as a type \ + # N:5: See https://mypy.readthedocs.io/en/latest/common_issues.html#variables-vs-type-aliases [case testColumnFunctionMissingTypeAnnotation] # flags: --disallow-untyped-defs diff --git a/test-data/unit/check-custom-plugin.test b/test-data/unit/check-custom-plugin.test index 16651e16efc1..77225b7df9ba 100644 --- a/test-data/unit/check-custom-plugin.test +++ b/test-data/unit/check-custom-plugin.test @@ -498,10 +498,13 @@ Bad1 = non_declarative_base() Bad2 = Bad3 = declarative_base() class C1(Bad1): ... # E: Variable "__main__.Bad1" is not valid as a type \ + # N: See https://mypy.readthedocs.io/en/latest/common_issues.html#variables-vs-type-aliases \ # E: Invalid base class "Bad1" class C2(Bad2): ... # E: Variable "__main__.Bad2" is not valid as a type \ + # N: See https://mypy.readthedocs.io/en/latest/common_issues.html#variables-vs-type-aliases \ # E: Invalid base class "Bad2" class C3(Bad3): ... # E: Variable "__main__.Bad3" is not valid as a type \ + # N: See https://mypy.readthedocs.io/en/latest/common_issues.html#variables-vs-type-aliases \ # E: Invalid base class "Bad3" [file mod.py] from typing import Generic, TypeVar diff --git a/test-data/unit/check-errorcodes.test b/test-data/unit/check-errorcodes.test index f6886261570f..c0e3a9782b5e 100644 --- a/test-data/unit/check-errorcodes.test +++ b/test-data/unit/check-errorcodes.test @@ -250,7 +250,8 @@ x: f # E: Function "__main__.f" is not valid as a type [valid-type] \ import sys y: sys # E: Module "sys" is not valid as a type [valid-type] -z: y # E: Variable "__main__.y" is not valid as a type [valid-type] +z: y # E: Variable "__main__.y" is not valid as a type [valid-type] \ + # N: See https://mypy.readthedocs.io/en/latest/common_issues.html#variables-vs-type-aliases [builtins fixtures/tuple.pyi] [case testErrorCodeNeedTypeAnnotation] diff --git a/test-data/unit/check-generics.test b/test-data/unit/check-generics.test index 93714a97ddde..9b1af9a47628 100644 --- a/test-data/unit/check-generics.test +++ b/test-data/unit/check-generics.test @@ -985,9 +985,11 @@ class C: b = int # E: Cannot assign multiple types to name "b" without an explicit "Type[...]" annotation if int(): c = int - def f(self, x: a) -> None: pass # E: Variable "__main__.C.a" is not valid as a type + def f(self, x: a) -> None: pass # E: Variable "__main__.C.a" is not valid as a type \ + # N: See https://mypy.readthedocs.io/en/latest/common_issues.html#variables-vs-type-aliases def g(self, x: b) -> None: pass - def h(self, x: c) -> None: pass # E: Variable "__main__.C.c" is not valid as a type + def h(self, x: c) -> None: pass # E: Variable "__main__.C.c" is not valid as a type \ + # N: See https://mypy.readthedocs.io/en/latest/common_issues.html#variables-vs-type-aliases x: b reveal_type(x) # N: Revealed type is 'Union[builtins.int, builtins.str]' [out] diff --git a/test-data/unit/check-literal.test b/test-data/unit/check-literal.test index 773a2e36f6a0..1d401986e8e6 100644 --- a/test-data/unit/check-literal.test +++ b/test-data/unit/check-literal.test @@ -707,6 +707,7 @@ y: Foo[Foo] # E: Literal[...] must have at least one parameter NotAType = 3 def f() -> NotAType['also' + 'not' + 'a' + 'type']: ... # E: Variable "__main__.NotAType" is not valid as a type \ + # N: See https://mypy.readthedocs.io/en/latest/common_issues.html#variables-vs-type-aliases \ # E: Invalid type comment or annotation # Note: this makes us re-inspect the type (e.g. via '_patch_indirect_dependencies' @@ -907,10 +908,12 @@ d2t = 3j a2: a2t reveal_type(a2) # N: Revealed type is 'Any' -b2: b2t # E: Variable "__main__.b2t" is not valid as a type +b2: b2t # E: Variable "__main__.b2t" is not valid as a type \ + # N: See https://mypy.readthedocs.io/en/latest/common_issues.html#variables-vs-type-aliases c2: c2t reveal_type(c2) # N: Revealed type is 'Any' -d2: d2t # E: Variable "__main__.d2t" is not valid as a type +d2: d2t # E: Variable "__main__.d2t" is not valid as a type \ + # N: See https://mypy.readthedocs.io/en/latest/common_issues.html#variables-vs-type-aliases [builtins fixtures/complex_tuple.pyi] [out] @@ -949,8 +952,10 @@ c: [1, 2, 3] # E: Bracketed expression "[...]" is not valid a from typing_extensions import Literal at = Literal[{"a": 1, "b": 2}] # E: Invalid type alias: expression is not a valid type bt = {"a": 1, "b": 2} -a: at # E: Variable "__main__.at" is not valid as a type -b: bt # E: Variable "__main__.bt" is not valid as a type +a: at # E: Variable "__main__.at" is not valid as a type \ + # N: See https://mypy.readthedocs.io/en/latest/common_issues.html#variables-vs-type-aliases +b: bt # E: Variable "__main__.bt" is not valid as a type \ + # N: See https://mypy.readthedocs.io/en/latest/common_issues.html#variables-vs-type-aliases [builtins fixtures/dict.pyi] [out] @@ -959,8 +964,10 @@ b: bt # E: Variable "__main__.bt" is not valid as a ty from typing_extensions import Literal at = Literal[{1, 2, 3}] # E: Invalid type alias: expression is not a valid type bt = {1, 2, 3} -a: at # E: Variable "__main__.at" is not valid as a type -b: bt # E: Variable "__main__.bt" is not valid as a type +a: at # E: Variable "__main__.at" is not valid as a type \ + # N: See https://mypy.readthedocs.io/en/latest/common_issues.html#variables-vs-type-aliases +b: bt # E: Variable "__main__.bt" is not valid as a type \ + # N: See https://mypy.readthedocs.io/en/latest/common_issues.html#variables-vs-type-aliases [builtins fixtures/set.pyi] [out] @@ -2868,13 +2875,17 @@ d: Literal[3] # "3" wherever it's used and get the same behavior -- so maybe we do need to support # at least case "b" for consistency? a_wrap: Literal[4, a] # E: Parameter 2 of Literal[...] is invalid \ - # E: Variable "__main__.a" is not valid as a type + # E: Variable "__main__.a" is not valid as a type \ + # N: See https://mypy.readthedocs.io/en/latest/common_issues.html#variables-vs-type-aliases b_wrap: Literal[4, b] # E: Parameter 2 of Literal[...] is invalid \ - # E: Variable "__main__.b" is not valid as a type + # E: Variable "__main__.b" is not valid as a type \ + # N: See https://mypy.readthedocs.io/en/latest/common_issues.html#variables-vs-type-aliases c_wrap: Literal[4, c] # E: Parameter 2 of Literal[...] is invalid \ - # E: Variable "__main__.c" is not valid as a type + # E: Variable "__main__.c" is not valid as a type \ + # N: See https://mypy.readthedocs.io/en/latest/common_issues.html#variables-vs-type-aliases d_wrap: Literal[4, d] # E: Parameter 2 of Literal[...] is invalid \ - # E: Variable "__main__.d" is not valid as a type + # E: Variable "__main__.d" is not valid as a type \ + # N: See https://mypy.readthedocs.io/en/latest/common_issues.html#variables-vs-type-aliases [builtins fixtures/tuple.pyi] [out] diff --git a/test-data/unit/check-python38.test b/test-data/unit/check-python38.test index f0a346a40c4c..98eda306c731 100644 --- a/test-data/unit/check-python38.test +++ b/test-data/unit/check-python38.test @@ -238,10 +238,12 @@ def f(x: int = (c := 4)) -> int: # Just make sure we don't crash on this sort of thing. if NT := NamedTuple("NT", [("x", int)]): # E: "int" not callable - z2: NT # E: Variable "NT" is not valid as a type + z2: NT # E: Variable "NT" is not valid as a type \ + # N: See https://mypy.readthedocs.io/en/latest/common_issues.html#variables-vs-type-aliases if Alias := int: - z3: Alias # E: Variable "Alias" is not valid as a type + z3: Alias # E: Variable "Alias" is not valid as a type \ + # N: See https://mypy.readthedocs.io/en/latest/common_issues.html#variables-vs-type-aliases if (reveal_type(y9 := 3) and # N: Revealed type is 'Literal[3]?' reveal_type(y9)): # N: Revealed type is 'builtins.int' diff --git a/test-data/unit/check-redefine.test b/test-data/unit/check-redefine.test index 8e6368ab16be..d5f453c4e84d 100644 --- a/test-data/unit/check-redefine.test +++ b/test-data/unit/check-redefine.test @@ -276,7 +276,8 @@ def f() -> None: # NOTE: '"int" not callable' is due to test stubs y = TypeVar('y') # E: Cannot redefine 'y' as a type variable \ # E: "int" not callable - def h(a: y) -> y: return a # E: Variable "y" is not valid as a type + def h(a: y) -> y: return a # E: Variable "y" is not valid as a type \ + # N: See https://mypy.readthedocs.io/en/latest/common_issues.html#variables-vs-type-aliases [case testCannotRedefineVarAsModule] # flags: --allow-redefinition diff --git a/test-data/unit/check-semanal-error.test b/test-data/unit/check-semanal-error.test index d47674a13475..ac8f72b4cd36 100644 --- a/test-data/unit/check-semanal-error.test +++ b/test-data/unit/check-semanal-error.test @@ -57,6 +57,7 @@ A().foo(1) A().x = '' # E [out] main:3: error: Variable "__main__.X" is not valid as a type +main:3: note: See https://mypy.readthedocs.io/en/latest/common_issues.html#variables-vs-type-aliases main:3: error: Invalid base class "X" main:6: error: Incompatible types in assignment (expression has type "str", variable has type "int") diff --git a/test-data/unit/check-type-aliases.test b/test-data/unit/check-type-aliases.test index 1e3c6f10a37b..cab61d7dcffb 100644 --- a/test-data/unit/check-type-aliases.test +++ b/test-data/unit/check-type-aliases.test @@ -99,8 +99,10 @@ T = TypeVar('T') A: Type[float] = int if int(): A = float # OK -x: A # E: Variable "__main__.A" is not valid as a type -def bad(tp: A) -> None: # E: Variable "__main__.A" is not valid as a type +x: A # E: Variable "__main__.A" is not valid as a type \ + # N: See https://mypy.readthedocs.io/en/latest/common_issues.html#variables-vs-type-aliases +def bad(tp: A) -> None: # E: Variable "__main__.A" is not valid as a type \ + # N: See https://mypy.readthedocs.io/en/latest/common_issues.html#variables-vs-type-aliases pass Alias = int diff --git a/test-data/unit/fine-grained.test b/test-data/unit/fine-grained.test index d09aaad614e1..ad8357f3d4e9 100644 --- a/test-data/unit/fine-grained.test +++ b/test-data/unit/fine-grained.test @@ -7951,14 +7951,18 @@ x = 1 a.py:1: error: Name 'TypeVar' is not defined a.py:1: note: Did you forget to import it from "typing"? (Suggestion: "from typing import TypeVar") a.py:7: error: Variable "a.T" is not valid as a type +a.py:7: note: See https://mypy.readthedocs.io/en/latest/common_issues.html#variables-vs-type-aliases a.py:10: error: Name 'bar' already defined on line 6 a.py:11: error: Variable "a.T" is not valid as a type +a.py:11: note: See https://mypy.readthedocs.io/en/latest/common_issues.html#variables-vs-type-aliases == a.py:1: error: Name 'TypeVar' is not defined a.py:1: note: Did you forget to import it from "typing"? (Suggestion: "from typing import TypeVar") a.py:7: error: Variable "a.T" is not valid as a type +a.py:7: note: See https://mypy.readthedocs.io/en/latest/common_issues.html#variables-vs-type-aliases a.py:10: error: Name 'bar' already defined on line 6 a.py:11: error: Variable "a.T" is not valid as a type +a.py:11: note: See https://mypy.readthedocs.io/en/latest/common_issues.html#variables-vs-type-aliases [case testRefreshForWithTypeComment1] [file a.py] @@ -8423,6 +8427,7 @@ B = func [out] == main:5: error: Variable "b.B" is not valid as a type +main:5: note: See https://mypy.readthedocs.io/en/latest/common_issues.html#variables-vs-type-aliases [case testNamedTupleForwardFunctionIndirect] # flags: --ignore-missing-imports @@ -8440,6 +8445,7 @@ B = func [out] == main:5: error: Variable "a.A" is not valid as a type +main:5: note: See https://mypy.readthedocs.io/en/latest/common_issues.html#variables-vs-type-aliases [case testNamedTupleForwardFunctionIndirectReveal] # flags: --ignore-missing-imports @@ -8467,8 +8473,10 @@ B = func [out] == m.py:4: error: Variable "a.A" is not valid as a type +m.py:4: note: See https://mypy.readthedocs.io/en/latest/common_issues.html#variables-vs-type-aliases == m.py:4: error: Variable "a.A" is not valid as a type +m.py:4: note: See https://mypy.readthedocs.io/en/latest/common_issues.html#variables-vs-type-aliases m.py:5: note: Revealed type is 'A?' m.py:7: note: Revealed type is 'A?' @@ -8484,6 +8492,7 @@ B = int() [out] == main:5: error: Variable "b.B" is not valid as a type +main:5: note: See https://mypy.readthedocs.io/en/latest/common_issues.html#variables-vs-type-aliases [case testAliasForwardFunctionIndirect] # flags: --ignore-missing-imports @@ -8500,6 +8509,7 @@ B = func [out] == main:5: error: Variable "a.A" is not valid as a type +main:5: note: See https://mypy.readthedocs.io/en/latest/common_issues.html#variables-vs-type-aliases [case testLiteralFineGrainedVarConversion] import mod diff --git a/test-data/unit/merge.test b/test-data/unit/merge.test index 407262a99262..aafcbc2427a6 100644 --- a/test-data/unit/merge.test +++ b/test-data/unit/merge.test @@ -779,6 +779,7 @@ foo: int x: foo[A] [out] tmp/target.py:4: error: Variable "target.foo" is not valid as a type +tmp/target.py:4: note: See https://mypy.readthedocs.io/en/latest/common_issues.html#variables-vs-type-aliases ## target NameExpr:3: builtins.int<0> NameExpr:4: foo?[target.A<1>] diff --git a/test-data/unit/pythoneval-asyncio.test b/test-data/unit/pythoneval-asyncio.test index cd95c6d66f94..48b9bd3a0bb7 100644 --- a/test-data/unit/pythoneval-asyncio.test +++ b/test-data/unit/pythoneval-asyncio.test @@ -502,3 +502,4 @@ def bad(arg: P) -> T: [out] _program.py:8: note: Revealed type is 'def [T] (arg: P?) -> T`-1' _program.py:12: error: Variable "_testForwardRefToBadAsyncShouldNotCrash_newsemanal.P" is not valid as a type +_program.py:12: note: See https://mypy.readthedocs.io/en/latest/common_issues.html#variables-vs-type-aliases diff --git a/test-data/unit/semanal-errors.test b/test-data/unit/semanal-errors.test index 144218df6f58..f92a1a5e338f 100644 --- a/test-data/unit/semanal-errors.test +++ b/test-data/unit/semanal-errors.test @@ -137,6 +137,7 @@ z = 0 # type: x main:5: error: Function "__main__.f" is not valid as a type main:5: note: Perhaps you need "Callable[...]" or a callback protocol? main:6: error: Variable "__main__.x" is not valid as a type +main:6: note: See https://mypy.readthedocs.io/en/latest/common_issues.html#variables-vs-type-aliases [case testGlobalVarRedefinition] import typing @@ -802,7 +803,8 @@ cast([int, str], None) # E: Bracketed expression "[...]" is not valid as a typ from typing import cast x = 0 -cast(x, None) # E: Variable "__main__.x" is not valid as a type +cast(x, None) # E: Variable "__main__.x" is not valid as a type \ + # N: See https://mypy.readthedocs.io/en/latest/common_issues.html#variables-vs-type-aliases cast(t, None) # E: Name 't' is not defined cast(__builtins__.x, None) # E: Name '__builtins__.x' is not defined [out] @@ -897,7 +899,8 @@ main:4: error: Type cannot be declared in assignment to non-self attribute from typing import TypeVar, Generic t = TypeVar('t') class A(Generic[t]): pass -A[TypeVar] # E: Variable "typing.TypeVar" is not valid as a type +A[TypeVar] # E: Variable "typing.TypeVar" is not valid as a type \ + # N: See https://mypy.readthedocs.io/en/latest/common_issues.html#variables-vs-type-aliases [out] [case testInvalidTypeInTypeApplication2] diff --git a/test-data/unit/semanal-typealiases.test b/test-data/unit/semanal-typealiases.test index 7230580c40a6..46af11674717 100644 --- a/test-data/unit/semanal-typealiases.test +++ b/test-data/unit/semanal-typealiases.test @@ -404,13 +404,15 @@ MypyFile:1( import typing A = [int, str] -a = 1 # type: A # E: Variable "__main__.A" is not valid as a type +a = 1 # type: A # E: Variable "__main__.A" is not valid as a type \ + # N: See https://mypy.readthedocs.io/en/latest/common_issues.html#variables-vs-type-aliases [case testCantUseStringLiteralAsTypeAlias] from typing import Union A = 'Union[int, str]' -a = 1 # type: A # E: Variable "__main__.A" is not valid as a type +a = 1 # type: A # E: Variable "__main__.A" is not valid as a type \ + # N: See https://mypy.readthedocs.io/en/latest/common_issues.html#variables-vs-type-aliases [case testStringLiteralTypeAsAliasComponent] from typing import Union From 1f9d87e5dfef28e83ab67de8b8690041975edbc5 Mon Sep 17 00:00:00 2001 From: FooTaGe Date: Fri, 31 Jan 2020 20:51:32 +0200 Subject: [PATCH 070/117] Update builtin_types reflect bool subclass of int (#8346) Fixes #8069. --- docs/source/builtin_types.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/builtin_types.rst b/docs/source/builtin_types.rst index b7ee556f15c1..3b26006d3112 100644 --- a/docs/source/builtin_types.rst +++ b/docs/source/builtin_types.rst @@ -8,7 +8,7 @@ Type Description ====================== =============================== ``int`` integer ``float`` floating point number -``bool`` boolean value +``bool`` boolean value (subclass of ``int``) ``str`` string (unicode) ``bytes`` 8-bit string ``object`` an arbitrary object (``object`` is the common base class) From d6401555682723fd2b193ac2f3dac7f259bb7482 Mon Sep 17 00:00:00 2001 From: Marti Raudsepp Date: Fri, 31 Jan 2020 20:55:39 +0200 Subject: [PATCH 071/117] Fix joining of Sequence (e.g. variadic tuple) and fixed-length tuple (#8335) For example: * Tuple[int] + Tuple[bool, ...] becomes Tuple[int, ...] * List[int] + Tuple[bool, ...] becomes Sequence[int] Previously Mypy simply punted and returned `object`. This solves the other part of issue #4975. Fixes issue #8074. --- mypy/join.py | 13 ++++- mypy/test/testtypes.py | 19 +++++++- test-data/unit/check-tuples.test | 81 ++++++++++++++++++++++++++++++++ 3 files changed, 110 insertions(+), 3 deletions(-) diff --git a/mypy/join.py b/mypy/join.py index a2513bd36201..d6a0dc1c3238 100644 --- a/mypy/join.py +++ b/mypy/join.py @@ -177,6 +177,8 @@ def visit_instance(self, t: Instance) -> ProperType: return join_types(t, self.s) elif isinstance(self.s, TypedDictType): return join_types(t, self.s) + elif isinstance(self.s, TupleType): + return join_types(t, self.s) elif isinstance(self.s, LiteralType): return join_types(t, self.s) else: @@ -260,6 +262,15 @@ def visit_overloaded(self, t: Overloaded) -> ProperType: return join_types(t.fallback, s) def visit_tuple_type(self, t: TupleType) -> ProperType: + # When given two fixed-length tuples: + # * If they have the same length, join their subtypes item-wise: + # Tuple[int, bool] + Tuple[bool, bool] becomes Tuple[int, bool] + # + # Otherwise, `t` is a fixed-length tuple but `self.s` is NOT: + # * Joining with a variadic tuple returns variadic tuple: + # Tuple[int, bool] + Tuple[bool, ...] becomes Tuple[int, ...] + # * Joining with any Sequence also returns a Sequence: + # Tuple[int, bool] + List[bool] becomes Sequence[int] if isinstance(self.s, TupleType) and self.s.length() == t.length(): items = [] # type: List[Type] for i in range(t.length()): @@ -269,7 +280,7 @@ def visit_tuple_type(self, t: TupleType) -> ProperType: assert isinstance(fallback, Instance) return TupleType(items, fallback) else: - return self.default(self.s) + return join_types(self.s, mypy.typeops.tuple_fallback(t)) def visit_typeddict_type(self, t: TypedDictType) -> ProperType: if isinstance(self.s, TypedDictType): diff --git a/mypy/test/testtypes.py b/mypy/test/testtypes.py index 4609e0dd1a02..b9dbb0cc60e3 100644 --- a/mypy/test/testtypes.py +++ b/mypy/test/testtypes.py @@ -501,10 +501,21 @@ def test_tuples(self) -> None: self.assert_join(self.tuple(self.fx.a, self.fx.a), self.fx.std_tuple, - self.fx.o) + self.var_tuple(self.fx.anyt)) self.assert_join(self.tuple(self.fx.a), self.tuple(self.fx.a, self.fx.a), - self.fx.o) + self.var_tuple(self.fx.a)) + + def test_var_tuples(self) -> None: + self.assert_join(self.tuple(self.fx.a), + self.var_tuple(self.fx.a), + self.var_tuple(self.fx.a)) + self.assert_join(self.var_tuple(self.fx.a), + self.tuple(self.fx.a), + self.var_tuple(self.fx.a)) + self.assert_join(self.var_tuple(self.fx.a), + self.tuple(), + self.var_tuple(self.fx.a)) def test_function_types(self) -> None: self.assert_join(self.callable(self.fx.a, self.fx.b), @@ -760,6 +771,10 @@ def assert_simple_join(self, s: Type, t: Type, join: Type) -> None: def tuple(self, *a: Type) -> TupleType: return TupleType(list(a), self.fx.std_tuple) + def var_tuple(self, t: Type) -> Instance: + """Construct a variable-length tuple type""" + return Instance(self.fx.std_tuplei, [t]) + def callable(self, *a: Type) -> CallableType: """callable(a1, ..., an, r) constructs a callable with argument types a1, ... an and return type r. diff --git a/test-data/unit/check-tuples.test b/test-data/unit/check-tuples.test index 5a792a77d856..213ed545e6d2 100644 --- a/test-data/unit/check-tuples.test +++ b/test-data/unit/check-tuples.test @@ -1077,6 +1077,87 @@ x, y = g(z) # E: Argument 1 to "g" has incompatible type "int"; expected "Tuple[ [builtins fixtures/tuple.pyi] [out] +[case testFixedTupleJoinVarTuple] +from typing import Tuple + +class A: pass +class B(A): pass + +fixtup = None # type: Tuple[B, B] + +vartup_b = None # type: Tuple[B, ...] +reveal_type(fixtup if int() else vartup_b) # N: Revealed type is 'builtins.tuple[__main__.B]' +reveal_type(vartup_b if int() else fixtup) # N: Revealed type is 'builtins.tuple[__main__.B]' + +vartup_a = None # type: Tuple[A, ...] +reveal_type(fixtup if int() else vartup_a) # N: Revealed type is 'builtins.tuple[__main__.A]' +reveal_type(vartup_a if int() else fixtup) # N: Revealed type is 'builtins.tuple[__main__.A]' + + +[builtins fixtures/tuple.pyi] +[out] + +[case testFixedTupleJoinList] +from typing import Tuple, List + +class A: pass +class B(A): pass + +fixtup = None # type: Tuple[B, B] + +lst_b = None # type: List[B] +reveal_type(fixtup if int() else lst_b) # N: Revealed type is 'typing.Sequence[__main__.B]' +reveal_type(lst_b if int() else fixtup) # N: Revealed type is 'typing.Sequence[__main__.B]' + +lst_a = None # type: List[A] +reveal_type(fixtup if int() else lst_a) # N: Revealed type is 'typing.Sequence[__main__.A]' +reveal_type(lst_a if int() else fixtup) # N: Revealed type is 'typing.Sequence[__main__.A]' + +[builtins fixtures/tuple.pyi] +[out] + +[case testEmptyTupleJoin] +from typing import Tuple, List + +class A: pass + +empty = () + +fixtup = None # type: Tuple[A] +reveal_type(fixtup if int() else empty) # N: Revealed type is 'builtins.tuple[__main__.A]' +reveal_type(empty if int() else fixtup) # N: Revealed type is 'builtins.tuple[__main__.A]' + +vartup = None # type: Tuple[A, ...] +reveal_type(empty if int() else vartup) # N: Revealed type is 'builtins.tuple[__main__.A]' +reveal_type(vartup if int() else empty) # N: Revealed type is 'builtins.tuple[__main__.A]' + +lst = None # type: List[A] +reveal_type(empty if int() else lst) # N: Revealed type is 'typing.Sequence[__main__.A*]' +reveal_type(lst if int() else empty) # N: Revealed type is 'typing.Sequence[__main__.A*]' + +[builtins fixtures/tuple.pyi] +[out] + +[case testTupleSubclassJoin] +from typing import Tuple, NamedTuple + +class NTup(NamedTuple): + a: bool + b: bool + +class SubTuple(Tuple[bool]): ... +class SubVarTuple(Tuple[int, ...]): ... + +ntup = None # type: NTup +subtup = None # type: SubTuple +vartup = None # type: SubVarTuple + +reveal_type(ntup if int() else vartup) # N: Revealed type is 'builtins.tuple[builtins.int]' +reveal_type(subtup if int() else vartup) # N: Revealed type is 'builtins.tuple[builtins.int]' + +[builtins fixtures/tuple.pyi] +[out] + [case testTupleWithUndersizedContext] a = ([1], 'x') if int(): From 150d4921e702512e8afa0fc9cc22e71e720c3069 Mon Sep 17 00:00:00 2001 From: Marti Raudsepp Date: Fri, 31 Jan 2020 20:59:39 +0200 Subject: [PATCH 072/117] Improve decorators documentation (add decorator factories) (#8336) * Previously only bare decorators were explained, but decorator factories are also fairly common in the real world. * Added decorator examples to cheat sheet page. * Explained difference in behavior for class decorators. * Shortened (IMO) excessive example for bare decorators. --- docs/source/cheat_sheet.rst | 20 +++++++++- docs/source/cheat_sheet_py3.rst | 20 +++++++++- docs/source/generics.rst | 71 ++++++++++++++++++++++++++++----- 3 files changed, 99 insertions(+), 12 deletions(-) diff --git a/docs/source/cheat_sheet.rst b/docs/source/cheat_sheet.rst index e282252d968c..26505defbd6b 100644 --- a/docs/source/cheat_sheet.rst +++ b/docs/source/cheat_sheet.rst @@ -113,7 +113,6 @@ Functions # type: (...) -> bool - When you're puzzled or when things are complicated ************************************************** @@ -256,3 +255,22 @@ Miscellaneous return sys.stdin else: return sys.stdout + + +Decorators +********** + +Decorator functions can be expressed via generics. See +:ref:`declaring-decorators` for the more details. + +.. code-block:: python + + from typing import Any, Callable, TypeVar + + F = TypeVar('F', bound=Callable[..., Any]) + + def bare_decorator(func): # type: (F) -> F + ... + + def decorator_args(url): # type: (str) -> Callable[[F], F] + ... diff --git a/docs/source/cheat_sheet_py3.rst b/docs/source/cheat_sheet_py3.rst index 47b36e24d351..7eacba404fe0 100644 --- a/docs/source/cheat_sheet_py3.rst +++ b/docs/source/cheat_sheet_py3.rst @@ -127,7 +127,6 @@ Python 3 supports an annotation syntax for function declarations. quux(3) # Fine quux(__x=3) # Error - When you're puzzled or when things are complicated ************************************************** @@ -311,3 +310,22 @@ Miscellaneous # class of that name later on in the file def f(foo: 'A') -> int: # Ok ... + + +Decorators +********** + +Decorator functions can be expressed via generics. See +:ref:`declaring-decorators` for the more details. + +.. code-block:: python + + from typing import Any, Callable, TypeVar + + F = TypeVar('F', bound=Callable[..., Any]) + + def bare_decorator(func: F) -> F: + ... + + def decorator_args(url: str) -> Callable[[F], F]: + ... diff --git a/docs/source/generics.rst b/docs/source/generics.rst index 937b6ae51ecc..817466d2469a 100644 --- a/docs/source/generics.rst +++ b/docs/source/generics.rst @@ -522,14 +522,19 @@ Declaring decorators One common application of type variable upper bounds is in declaring a decorator that preserves the signature of the function it decorates, -regardless of that signature. Here's a complete example: +regardless of that signature. + +Note that class decorators are handled differently than function decorators in +mypy: decorating a class does not erase its type, even if the decorator has +incomplete type annotations. + +Here's a complete example of a function decorator: .. code-block:: python from typing import Any, Callable, TypeVar, Tuple, cast - FuncType = Callable[..., Any] - F = TypeVar('F', bound=FuncType) + F = TypeVar('F', bound=Callable[..., Any]) # A decorator that preserves the signature. def my_decorator(func: F) -> F: @@ -543,15 +548,8 @@ regardless of that signature. Here's a complete example: def foo(a: int) -> str: return str(a) - # Another. - @my_decorator - def bar(x: float, y: float) -> Tuple[float, float, bool]: - return (x, y, x > y) - a = foo(12) reveal_type(a) # str - b = bar(3.14, 0) - reveal_type(b) # Tuple[float, float, bool] foo('x') # Type check error: incompatible type "str"; expected "int" From the final block we see that the signatures of the decorated @@ -566,6 +564,59 @@ functions are typically small enough that this is not a big problem. This is also the reason for the :py:func:`~typing.cast` call in the ``return`` statement in ``my_decorator()``. See :ref:`casts`. +.. _decorator-factories: + +Decorator factories +------------------- + +Functions that take arguments and return a decorator (also called second-order decorators), are +similarly supported via generics: + +.. code-block:: python + + from typing import Any, Callable, TypeVar + + F = TypeVar('F', bound=Callable[..., Any]) + + def route(url: str) -> Callable[[F], F]: + ... + + @route(url='/') + def index(request: Any) -> str: + return 'Hello world' + +Sometimes the same decorator supports both bare calls and calls with arguments. This can be +achieved by combining with :py:func:`@overload `: + +.. code-block:: python + + from typing import Any, Callable, TypeVar, overload + + F = TypeVar('F', bound=Callable[..., Any]) + + # Bare decorator usage + @overload + def atomic(__func: F) -> F: ... + # Decorator with arguments + @overload + def atomic(*, savepoint: bool = True) -> Callable[[F], F]: ... + + # Implementation + def atomic(__func: Callable[..., Any] = None, *, savepoint: bool = True): + def decorator(func: Callable[..., Any]): + ... # Code goes here + if __func is not None: + return decorator(__func) + else: + return decorator + + # Usage + @atomic + def func1() -> None: ... + + @atomic(savepoint=False) + def func2() -> None: ... + Generic protocols ***************** From 39d6bde2ec7063bd2cec42cce9295479e744bcc7 Mon Sep 17 00:00:00 2001 From: Marti Raudsepp Date: Mon, 3 Feb 2020 19:57:30 +0200 Subject: [PATCH 073/117] Fix joining of fixed-length tuples with mismatching lengths (#8333) For example: Tuple[bool, int] + Tuple[bool] becomes Tuple[int, ...] Previously Mypy simply punted and returned `object`. This solves part of #4975. --- mypy/join.py | 13 ++++++--- mypy/test/testtypes.py | 6 +++++ test-data/unit/check-tuples.test | 46 ++++++++++++++++++++++++++++++++ 3 files changed, 61 insertions(+), 4 deletions(-) diff --git a/mypy/join.py b/mypy/join.py index d6a0dc1c3238..c22574884b61 100644 --- a/mypy/join.py +++ b/mypy/join.py @@ -265,6 +265,8 @@ def visit_tuple_type(self, t: TupleType) -> ProperType: # When given two fixed-length tuples: # * If they have the same length, join their subtypes item-wise: # Tuple[int, bool] + Tuple[bool, bool] becomes Tuple[int, bool] + # * If lengths do not match, return a variadic tuple: + # Tuple[bool, int] + Tuple[bool] becomes Tuple[int, ...] # # Otherwise, `t` is a fixed-length tuple but `self.s` is NOT: # * Joining with a variadic tuple returns variadic tuple: @@ -272,13 +274,16 @@ def visit_tuple_type(self, t: TupleType) -> ProperType: # * Joining with any Sequence also returns a Sequence: # Tuple[int, bool] + List[bool] becomes Sequence[int] if isinstance(self.s, TupleType) and self.s.length() == t.length(): - items = [] # type: List[Type] - for i in range(t.length()): - items.append(self.join(t.items[i], self.s.items[i])) fallback = join_instances(mypy.typeops.tuple_fallback(self.s), mypy.typeops.tuple_fallback(t)) assert isinstance(fallback, Instance) - return TupleType(items, fallback) + if self.s.length() == t.length(): + items = [] # type: List[Type] + for i in range(t.length()): + items.append(self.join(t.items[i], self.s.items[i])) + return TupleType(items, fallback) + else: + return fallback else: return join_types(self.s, mypy.typeops.tuple_fallback(t)) diff --git a/mypy/test/testtypes.py b/mypy/test/testtypes.py index b9dbb0cc60e3..957b3ad7c4ba 100644 --- a/mypy/test/testtypes.py +++ b/mypy/test/testtypes.py @@ -505,6 +505,12 @@ def test_tuples(self) -> None: self.assert_join(self.tuple(self.fx.a), self.tuple(self.fx.a, self.fx.a), self.var_tuple(self.fx.a)) + self.assert_join(self.tuple(self.fx.b), + self.tuple(self.fx.a, self.fx.c), + self.var_tuple(self.fx.a)) + self.assert_join(self.tuple(), + self.tuple(self.fx.a), + self.var_tuple(self.fx.a)) def test_var_tuples(self) -> None: self.assert_join(self.tuple(self.fx.a), diff --git a/test-data/unit/check-tuples.test b/test-data/unit/check-tuples.test index 213ed545e6d2..2995e3b7fc80 100644 --- a/test-data/unit/check-tuples.test +++ b/test-data/unit/check-tuples.test @@ -1158,6 +1158,52 @@ reveal_type(subtup if int() else vartup) # N: Revealed type is 'builtins.tuple[ [builtins fixtures/tuple.pyi] [out] +[case testTupleJoinIrregular] +from typing import Tuple + +tup1 = None # type: Tuple[bool, int] +tup2 = None # type: Tuple[bool] + +reveal_type(tup1 if int() else tup2) # N: Revealed type is 'builtins.tuple[builtins.int]' +reveal_type(tup2 if int() else tup1) # N: Revealed type is 'builtins.tuple[builtins.int]' + +reveal_type(tup1 if int() else ()) # N: Revealed type is 'builtins.tuple[builtins.int]' +reveal_type(() if int() else tup1) # N: Revealed type is 'builtins.tuple[builtins.int]' + +reveal_type(tup2 if int() else ()) # N: Revealed type is 'builtins.tuple[builtins.bool]' +reveal_type(() if int() else tup2) # N: Revealed type is 'builtins.tuple[builtins.bool]' + +[builtins fixtures/tuple.pyi] +[out] + +[case testTupleSubclassJoinIrregular] +from typing import Tuple, NamedTuple + +class NTup1(NamedTuple): + a: bool + +class NTup2(NamedTuple): + a: bool + b: bool + +class SubTuple(Tuple[bool, int, int]): ... + +tup1 = None # type: NTup1 +tup2 = None # type: NTup2 +subtup = None # type: SubTuple + +reveal_type(tup1 if int() else tup2) # N: Revealed type is 'builtins.tuple[builtins.bool]' +reveal_type(tup2 if int() else tup1) # N: Revealed type is 'builtins.tuple[builtins.bool]' + +reveal_type(tup1 if int() else subtup) # N: Revealed type is 'builtins.tuple[builtins.int]' +reveal_type(subtup if int() else tup1) # N: Revealed type is 'builtins.tuple[builtins.int]' + +reveal_type(tup2 if int() else subtup) # N: Revealed type is 'builtins.tuple[builtins.int]' +reveal_type(subtup if int() else tup2) # N: Revealed type is 'builtins.tuple[builtins.int]' + +[builtins fixtures/tuple.pyi] +[out] + [case testTupleWithUndersizedContext] a = ([1], 'x') if int(): From 5887d38fc4377016a257cbfcdeb009c05d99b2f8 Mon Sep 17 00:00:00 2001 From: Nikita Sobolev Date: Thu, 6 Feb 2020 00:09:01 +0300 Subject: [PATCH 074/117] Fixes python syntax in `--local-partial-types` example (#8369) --- docs/source/command_line.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/command_line.rst b/docs/source/command_line.rst index d0fe5430c4e0..c516a67182fd 100644 --- a/docs/source/command_line.rst +++ b/docs/source/command_line.rst @@ -501,7 +501,7 @@ of the above sections. bar = None # Need type annotation here if using --local-partial-types baz = None # type: Optional[int] - def __init__(self) -> None + def __init__(self) -> None: self.bar = 1 reveal_type(Foo().bar) # Union[int, None] without --local-partial-types From 0c0369069c547feb5ccf43c886fcafafd87f476a Mon Sep 17 00:00:00 2001 From: "Michael J. Sullivan" Date: Wed, 5 Feb 2020 17:34:23 -0800 Subject: [PATCH 075/117] Make mypy.api.run_dmypy actually capture the output (#8375) When the main run API was made threadsafe this broke output capturing from run_dmypy. I don't really care about the threadsafety of run_dmypy but I do care about output capture ever working, so I am restoring the old sys.stdout swapping behavior for run_dmypy. I'd take a patch to thread stdout through the client if anybody really cares. --- mypy/api.py | 23 +++++++++++++++++++++-- 1 file changed, 21 insertions(+), 2 deletions(-) diff --git a/mypy/api.py b/mypy/api.py index b1c508324889..ef3016ac31da 100644 --- a/mypy/api.py +++ b/mypy/api.py @@ -18,7 +18,8 @@ Any pretty formatting is left to the caller. The 'run_dmypy' function is similar, but instead mimics invocation of -dmypy. +dmypy. Note that run_dmypy is not thread-safe and modifies sys.stdout +and sys.stderr during its invocation. Note that these APIs don't support incremental generation of error messages. @@ -42,6 +43,8 @@ """ +import sys + from io import StringIO from typing import List, Tuple, TextIO, Callable @@ -69,4 +72,20 @@ def run(args: List[str]) -> Tuple[str, str, int]: def run_dmypy(args: List[str]) -> Tuple[str, str, int]: from mypy.dmypy.client import main - return _run(lambda stdout, stderr: main(args)) + + # A bunch of effort has been put into threading stdout and stderr + # through the main API to avoid the threadsafety problems of + # modifying sys.stdout/sys.stderr, but that hasn't been done for + # the dmypy client, so we just do the non-threadsafe thing. + def f(stdout: TextIO, stderr: TextIO) -> None: + old_stdout = sys.stdout + old_stderr = sys.stderr + try: + sys.stdout = stdout + sys.stderr = stderr + main(args) + finally: + sys.stdout = old_stdout + sys.stderr = old_stderr + + return _run(f) From 17b0a7a6b26f324bcc9a2af6f6bda97e80d6dfda Mon Sep 17 00:00:00 2001 From: Shantanu Date: Wed, 5 Feb 2020 22:33:53 -0800 Subject: [PATCH 076/117] stubtest: rewrite (#8325) * stubtest: don't hardcode python version This will cause us to assume python version is sys.version_info, which is the behaviour we want, since we're comparing things to the runtime * stubtest: fix build * stubtest: recognise typealias * stubtest: use argparse, support custom typeshed dir * stubtest: [minor] blacken * stubtest: [minor] import nits * stubtest: [minor] renames, reorder parameters * stubtest: [wip] start to use runtime objects directly * stubtest: [minor] make parameter names for verify_* consistent * stubtest: gut error handling * stubtest: add support for missing things * stubtest: implement verify module * stubtest: add trace for easier debugging * stubtest: implement verify class * stubtest: implement verify missing * stubtest: implement verify function * stubtest: implement verify var * stubtest: logging improvements * stubtest: improve verify function * stubtest: implement verify overload * stubtest: more improvements to logging * stubtest: add --ignore-missing-stub option * stubtest: [minor] make order more deterministic * stubtest: [minor] descend through stubs less hackily * stubtest: [minor] clean up imports * stubtest: [minor] remove debugging decorator * stubtest: small improvements for functions * stubtest: add --concise option * stubtest: add exit code * stubtest: redo verify function Rework things to avoid false positives / order nitpicks Make checks involving *args, **kwargs a little more sophisticated * stubtest: add ability to whitelist errors * stubtest: [minor] clean up error handling * stubtest: add --check-typeshed option * stubtest: [minor] handle distutils.command a little better * stubtest: adjust module level things we check in stubs * stubtest: check for mistaken positional only args * stubtest: be more permissive about positional-only arg names * stubtest: [minor] make error order more deterministic * stubtest: only mypy build once This makes the script complete 100x faster when using --check-typeshed * stubtest: [minor] remove antigravity from --check-typeshed * stubtest: make verify_var work * stubtest: verify types of argument default values * stubtest: pretend Literal[0, 1] is subtype of bool * stubtest: output unused whitelist entries * stubtest: [minor] deduplicate, sort --output-whitelist, fix exit code * stubtest: add more documentation Also flatten out get_mypy_type_of_runtime_value * stubtest: [minor] rename --output-whitelist to --generate-whitelist * stubtest: [minor] suppress warnings * stubtest: look into the mro for attributes Prevents false positives when not using --ignore-missing-stub * stubtest: better support @property and other decorators * stubtest: check classmethod and staticmethod * stubtest: [minor] support comments in whitelist * stubtest: [refactor] split up verify_funcitem * stubtest: [minor] suggest positional-only name The name doesn't matter, but if someone's fixing something, we might as well make it easy to match runtime name * stubtest: add __str__ for Signature * stubtest: implement smarter overload checking This eliminates ~400 false positives * stubtest: improve typeinfo output, simplify descriptions * stubtest: [minor] blacken Switched laptops, think I ran into a black version difference * stubtest: improve decorator handling, fix classmethod signature * stubtest: fix classmethod and staticmethod introspection This was just broken previously * stubtest: [minor] factor out is_dunder, check suffix * stubtest: fix proper_plugin, other selfcheck errors * stubtest: find submodules when explicitly testing a module * stubtest: remove f-strings for py35 * stubtest: remove variable annotations for py35 * stubtest: remove trailing commas for py35 * stubtest: other changes for py35 * stubtest: [minor] use line length 99 to match project * stubtest: add a flag to ignore positional-only errors * stubtest: check typevar upper bounds for default values is_subtype would always return False, leading to false positives most times TypeVars were used for parameters with default values. We still have some false positives from Unions of TypeVars, but that's less bad, and could almost all be fixed by adjusting the overload of Mapping.get * stubtest: don't crash because of bpo-39504 * stubtest: avoid false positive when defining enums * stubtest: allow multiple whitelists * stubtest: [minor] improve help message --- scripts/stubtest.py | 1185 ++++++++++++++++++++++++++++++++++++------- 1 file changed, 1015 insertions(+), 170 deletions(-) diff --git a/scripts/stubtest.py b/scripts/stubtest.py index 048075f1445e..00475b78168d 100644 --- a/scripts/stubtest.py +++ b/scripts/stubtest.py @@ -1,234 +1,1079 @@ """Tests for stubs. -Verify that various things in stubs are consistent with how things behave -at runtime. +Verify that various things in stubs are consistent with how things behave at runtime. + """ +import argparse +import copy +import enum import importlib +import inspect +import subprocess import sys -from typing import Dict, Any, List, Iterator, NamedTuple, Optional, Mapping, Tuple -from typing_extensions import Type, Final -from collections import defaultdict +import types +import warnings from functools import singledispatch +from pathlib import Path +from typing import Any, Dict, Generic, Iterator, List, Optional, Tuple, TypeVar, Union + +from typing_extensions import Type -from mypy import build -from mypy.build import default_data_dir -from mypy.modulefinder import compute_search_paths, FindModuleCache -from mypy.errors import CompileError +import mypy.build +import mypy.modulefinder +import mypy.types from mypy import nodes from mypy.options import Options +from mypy.util import FancyFormatter -from dumpmodule import module_to_json, DumpNode - - -# TODO: email.contentmanager has a symbol table with a None node. -# This seems like it should not be. - -skip = { - '_importlib_modulespec', - '_subprocess', - 'distutils.command.bdist_msi', - 'distutils.command.bdist_packager', - 'msvcrt', - 'wsgiref.types', - 'mypy_extensions', - 'unittest.mock', # mock.call infinite loops on inspect.getsourcelines - # https://bugs.python.org/issue25532 - # TODO: can we filter only call? -} # type: Final - - -messages = { - 'not_in_runtime': ('{error.stub_type} "{error.name}" defined at line ' - ' {error.line} in stub but is not defined at runtime'), - 'not_in_stub': ('{error.module_type} "{error.name}" defined at line' - ' {error.line} at runtime but is not defined in stub'), - 'no_stubs': 'could not find typeshed {error.name}', - 'inconsistent': ('"{error.name}" is {error.stub_type} in stub but' - ' {error.module_type} at runtime'), -} # type: Final - -Error = NamedTuple('Error', ( - ('module', str), - ('name', str), - ('error_type', str), - ('line', Optional[int]), - ('stub_type', Optional[Type[nodes.Node]]), - ('module_type', Optional[str]), -)) - -ErrorParts = Tuple[ - List[str], - str, - Optional[int], - Optional[Type[nodes.Node]], - Optional[str], -] - - -def test_stub(options: Options, - find_module_cache: FindModuleCache, - name: str) -> Iterator[Error]: - stubs = { - mod: stub for mod, stub in build_stubs(options, find_module_cache, name).items() - if (mod == name or mod.startswith(name + '.')) and mod not in skip - } - for mod, stub in stubs.items(): - instance = dump_module(mod) +class Missing: + """Marker object for things that are missing (from a stub or the runtime).""" + + def __repr__(self) -> str: + return "MISSING" + + +MISSING = Missing() + +T = TypeVar("T") +MaybeMissing = Union[T, Missing] + +_formatter = FancyFormatter(sys.stdout, sys.stderr, False) + + +def _style(message: str, **kwargs: Any) -> str: + """Wrapper around mypy.util for fancy formatting.""" + kwargs.setdefault("color", "none") + return _formatter.style(message, **kwargs) + + +class Error: + def __init__( + self, + object_path: List[str], + message: str, + stub_object: MaybeMissing[nodes.Node], + runtime_object: MaybeMissing[Any], + *, + stub_desc: Optional[str] = None, + runtime_desc: Optional[str] = None + ) -> None: + """Represents an error found by stubtest. + + :param object_path: Location of the object with the error, + e.g. ``["module", "Class", "method"]`` + :param message: Error message + :param stub_object: The mypy node representing the stub + :param runtime_object: Actual object obtained from the runtime + :param stub_desc: Specialised description for the stub object, should you wish + :param runtime_desc: Specialised description for the runtime object, should you wish + + """ + self.object_desc = ".".join(object_path) + self.message = message + self.stub_object = stub_object + self.runtime_object = runtime_object + self.stub_desc = stub_desc or str(getattr(stub_object, "type", stub_object)) + self.runtime_desc = runtime_desc or str(runtime_object) + + def is_missing_stub(self) -> bool: + """Whether or not the error is for something missing from the stub.""" + return isinstance(self.stub_object, Missing) + + def is_positional_only_related(self) -> bool: + """Whether or not the error is for something being (or not being) positional-only.""" + # TODO: This is hacky, use error codes or something more resilient + return "leading double underscore" in self.message + + def get_description(self, concise: bool = False) -> str: + """Returns a description of the error. + + :param concise: Whether to return a concise, one-line description + + """ + if concise: + return _style(self.object_desc, bold=True) + " " + self.message - for identifiers, error_type, line, stub_type, module_type in verify(stub, instance): - yield Error(mod, '.'.join(identifiers), error_type, line, stub_type, module_type) + stub_line = None + stub_file = None + if not isinstance(self.stub_object, Missing): + stub_line = self.stub_object.line + # TODO: Find a way of getting the stub file + + stub_loc_str = "" + if stub_line: + stub_loc_str += " at line {}".format(stub_line) + if stub_file: + stub_loc_str += " in file {}".format(stub_file) + + runtime_line = None + runtime_file = None + if not isinstance(self.runtime_object, Missing): + try: + runtime_line = inspect.getsourcelines(self.runtime_object)[1] + except (OSError, TypeError): + pass + try: + runtime_file = inspect.getsourcefile(self.runtime_object) + except TypeError: + pass + + runtime_loc_str = "" + if runtime_line: + runtime_loc_str += " at line {}".format(runtime_line) + if runtime_file: + runtime_loc_str += " in file {}".format(runtime_file) + + output = [ + _style("error: ", color="red", bold=True), + _style(self.object_desc, bold=True), + " ", + self.message, + "\n", + "Stub:", + _style(stub_loc_str, dim=True), + "\n", + _style(self.stub_desc + "\n", color="blue", dim=True), + "Runtime:", + _style(runtime_loc_str, dim=True), + "\n", + _style(self.runtime_desc + "\n", color="blue", dim=True), + ] + return "".join(output) + + +def test_module(module_name: str) -> Iterator[Error]: + """Tests a given module's stub against introspecting it at runtime. + + Requires the stub to have been built already, accomplished by a call to ``build_stubs``. + + :param module_name: The module to test + + """ + stub = get_stub(module_name) + if stub is None: + yield Error([module_name], "failed to find stubs", MISSING, None) + return + + try: + runtime = importlib.import_module(module_name) + except Exception as e: + yield Error([module_name], "failed to import: {}".format(e), stub, MISSING) + return + + # collections likes to warn us about the things we're doing + with warnings.catch_warnings(): + warnings.simplefilter("ignore") + yield from verify(stub, runtime, [module_name]) @singledispatch -def verify(node: nodes.Node, - module_node: Optional[DumpNode]) -> Iterator[ErrorParts]: - raise TypeError('unknown mypy node ' + str(node)) +def verify( + stub: nodes.Node, runtime: MaybeMissing[Any], object_path: List[str] +) -> Iterator[Error]: + """Entry point for comparing a stub to a runtime object. + + We use single dispatch based on the type of ``stub``. + + :param stub: The mypy node representing a part of the stub + :param runtime: The runtime object corresponding to ``stub`` + """ + yield Error(object_path, "is an unknown mypy node", stub, runtime) @verify.register(nodes.MypyFile) -def verify_mypyfile(stub: nodes.MypyFile, - instance: Optional[DumpNode]) -> Iterator[ErrorParts]: - if instance is None: - yield [], 'not_in_runtime', stub.line, type(stub), None - elif instance['type'] != 'file': - yield [], 'inconsistent', stub.line, type(stub), instance['type'] - else: - stub_children = defaultdict(lambda: None, stub.names) # type: Mapping[str, Optional[nodes.SymbolTableNode]] - instance_children = defaultdict(lambda: None, instance['names']) +def verify_mypyfile( + stub: nodes.MypyFile, runtime: MaybeMissing[types.ModuleType], object_path: List[str] +) -> Iterator[Error]: + if isinstance(runtime, Missing): + yield Error(object_path, "is not present at runtime", stub, runtime) + return + if not isinstance(runtime, types.ModuleType): + yield Error(object_path, "is not a module", stub, runtime) + return - # TODO: I would rather not filter public children here. - # For example, what if the checkersurfaces an inconsistency - # in the typing of a private child - public_nodes = { - name: (stub_children[name], instance_children[name]) - for name in set(stub_children) | set(instance_children) - if not name.startswith('_') - and (stub_children[name] is None or stub_children[name].module_public) # type: ignore - } + # Check things in the stub that are public + to_check = set( + m + for m, o in stub.names.items() + if o.module_public and (not m.startswith("_") or hasattr(runtime, m)) + ) + # Check all things declared in module's __all__ + to_check.update(getattr(runtime, "__all__", [])) + to_check.difference_update({"__file__", "__doc__", "__name__", "__builtins__", "__package__"}) + # We currently don't check things in the module that aren't in the stub, other than things that + # are in __all__, to avoid false positives. - for node, (stub_child, instance_child) in public_nodes.items(): - stub_child = getattr(stub_child, 'node', None) - for identifiers, error_type, line, stub_type, module_type in verify(stub_child, instance_child): - yield ([node] + identifiers, error_type, line, stub_type, module_type) + for entry in sorted(to_check): + yield from verify( + stub.names[entry].node if entry in stub.names else MISSING, + getattr(runtime, entry, MISSING), + object_path + [entry], + ) @verify.register(nodes.TypeInfo) -def verify_typeinfo(stub: nodes.TypeInfo, - instance: Optional[DumpNode]) -> Iterator[ErrorParts]: - if not instance: - yield [], 'not_in_runtime', stub.line, type(stub), None - elif instance['type'] != 'class': - yield [], 'inconsistent', stub.line, type(stub), instance['type'] +def verify_typeinfo( + stub: nodes.TypeInfo, runtime: MaybeMissing[Type[Any]], object_path: List[str] +) -> Iterator[Error]: + if isinstance(runtime, Missing): + yield Error(object_path, "is not present at runtime", stub, runtime, stub_desc=repr(stub)) + return + if not isinstance(runtime, type): + yield Error(object_path, "is not a type", stub, runtime, stub_desc=repr(stub)) + return + + to_check = set(stub.names) + to_check.update(m for m in vars(runtime) if not m.startswith("_")) + + for entry in sorted(to_check): + yield from verify( + next((t.names[entry].node for t in stub.mro if entry in t.names), MISSING), + getattr(runtime, entry, MISSING), + object_path + [entry], + ) + + +def _verify_static_class_methods( + stub: nodes.FuncItem, runtime: types.FunctionType, object_path: List[str] +) -> Iterator[str]: + if runtime.__name__ == "__new__": + # Special cased by Python, so never declared as staticmethod + return + if inspect.isbuiltin(runtime): + # The isinstance checks don't work reliably for builtins, e.g. datetime.datetime.now, so do + # something a little hacky that seems to work well + probably_class_method = isinstance(getattr(runtime, "__self__", None), type) + if probably_class_method and not stub.is_class: + yield "runtime is a classmethod but stub is not" + if not probably_class_method and stub.is_class: + yield "stub is a classmethod but runtime is not" + return + + # Look the object up statically, to avoid binding by the descriptor protocol + static_runtime = importlib.import_module(object_path[0]) + for entry in object_path[1:]: + static_runtime = inspect.getattr_static(static_runtime, entry) + + if isinstance(static_runtime, classmethod) and not stub.is_class: + yield "runtime is a classmethod but stub is not" + if not isinstance(static_runtime, classmethod) and stub.is_class: + yield "stub is a classmethod but runtime is not" + if isinstance(static_runtime, staticmethod) and not stub.is_static: + yield "runtime is a staticmethod but stub is not" + if not isinstance(static_runtime, staticmethod) and stub.is_static: + yield "stub is a staticmethod but runtime is not" + + +def _verify_arg_name( + stub_arg: nodes.Argument, runtime_arg: inspect.Parameter, function_name: str +) -> Iterator[str]: + """Checks whether argument names match.""" + # Ignore exact names for all dunder methods other than __init__ + if is_dunder(function_name, exclude_init=True): + return + + def strip_prefix(s: str, prefix: str) -> str: + return s[len(prefix) :] if s.startswith(prefix) else s + + if strip_prefix(stub_arg.variable.name, "__") == runtime_arg.name: + return + + def names_approx_match(a: str, b: str) -> bool: + a = a.strip("_") + b = b.strip("_") + return a.startswith(b) or b.startswith(a) or len(a) == 1 or len(b) == 1 + + # Be more permissive about names matching for positional-only arguments + if runtime_arg.kind == inspect.Parameter.POSITIONAL_ONLY and names_approx_match( + stub_arg.variable.name, runtime_arg.name + ): + return + # This comes up with namedtuples, so ignore + if stub_arg.variable.name == "_self": + return + yield ( + 'stub argument "{}" differs from runtime argument "{}"'.format( + stub_arg.variable.name, runtime_arg.name + ) + ) + + +def _verify_arg_default_value( + stub_arg: nodes.Argument, runtime_arg: inspect.Parameter +) -> Iterator[str]: + """Checks whether argument default values are compatible.""" + if runtime_arg.default != inspect.Parameter.empty: + if stub_arg.kind not in (nodes.ARG_OPT, nodes.ARG_NAMED_OPT): + yield ( + 'runtime argument "{}" has a default value but stub argument does not'.format( + runtime_arg.name + ) + ) + else: + runtime_type = get_mypy_type_of_runtime_value(runtime_arg.default) + # Fallback to the type annotation type if var type is missing. The type annotation + # is an UnboundType, but I don't know enough to know what the pros and cons here are. + # UnboundTypes have ugly question marks following them, so default to var type. + # Note we do this same fallback when constructing signatures in from_overloadedfuncdef + stub_type = stub_arg.variable.type or stub_arg.type_annotation + if isinstance(stub_type, mypy.types.TypeVarType): + stub_type = stub_type.upper_bound + if ( + runtime_type is not None + and stub_type is not None + # Avoid false positives for marker objects + and type(runtime_arg.default) != object + and not is_subtype_helper(runtime_type, stub_type) + ): + yield ( + 'runtime argument "{}" has a default value of type {}, ' + "which is incompatible with stub argument type {}".format( + runtime_arg.name, runtime_type, stub_type + ) + ) else: - for attr, attr_node in stub.names.items(): - subdump = instance['attributes'].get(attr, None) - for identifiers, error_type, line, stub_type, module_type in verify(attr_node.node, subdump): - yield ([attr] + identifiers, error_type, line, stub_type, module_type) + if stub_arg.kind in (nodes.ARG_OPT, nodes.ARG_NAMED_OPT): + yield ( + 'stub argument "{}" has a default value but runtime argument does not'.format( + stub_arg.variable.name + ) + ) + + +class Signature(Generic[T]): + def __init__(self) -> None: + self.pos = [] # type: List[T] + self.kwonly = {} # type: Dict[str, T] + self.varpos = None # type: Optional[T] + self.varkw = None # type: Optional[T] + + def __str__(self) -> str: + def get_name(arg: Any) -> str: + if isinstance(arg, inspect.Parameter): + return arg.name + if isinstance(arg, nodes.Argument): + return arg.variable.name + raise ValueError + + def get_type(arg: Any) -> Optional[str]: + if isinstance(arg, inspect.Parameter): + return None + if isinstance(arg, nodes.Argument): + return str(arg.variable.type or arg.type_annotation) + raise ValueError + + def has_default(arg: Any) -> bool: + if isinstance(arg, inspect.Parameter): + return arg.default != inspect.Parameter.empty + if isinstance(arg, nodes.Argument): + return arg.kind in (nodes.ARG_OPT, nodes.ARG_NAMED_OPT) + raise ValueError + + def get_desc(arg: Any) -> str: + arg_type = get_type(arg) + return ( + get_name(arg) + + (": {}".format(arg_type) if arg_type else "") + + (" = ..." if has_default(arg) else "") + ) + + ret = "def (" + ret += ", ".join( + [get_desc(arg) for arg in self.pos] + + (["*" + get_name(self.varpos)] if self.varpos else (["*"] if self.kwonly else [])) + + [get_desc(arg) for arg in self.kwonly.values()] + + (["**" + get_name(self.varkw)] if self.varkw else []) + ) + ret += ")" + return ret + + @staticmethod + def from_funcitem(stub: nodes.FuncItem) -> "Signature[nodes.Argument]": + stub_sig = Signature() # type: Signature[nodes.Argument] + for stub_arg in stub.arguments: + if stub_arg.kind in (nodes.ARG_POS, nodes.ARG_OPT): + stub_sig.pos.append(stub_arg) + elif stub_arg.kind in (nodes.ARG_NAMED, nodes.ARG_NAMED_OPT): + stub_sig.kwonly[stub_arg.variable.name] = stub_arg + elif stub_arg.kind == nodes.ARG_STAR: + stub_sig.varpos = stub_arg + elif stub_arg.kind == nodes.ARG_STAR2: + stub_sig.varkw = stub_arg + else: + raise ValueError + return stub_sig + + @staticmethod + def from_inspect_signature(signature: inspect.Signature,) -> "Signature[inspect.Parameter]": + runtime_sig = Signature() # type: Signature[inspect.Parameter] + for runtime_arg in signature.parameters.values(): + if runtime_arg.kind in ( + inspect.Parameter.POSITIONAL_ONLY, + inspect.Parameter.POSITIONAL_OR_KEYWORD, + ): + runtime_sig.pos.append(runtime_arg) + elif runtime_arg.kind == inspect.Parameter.KEYWORD_ONLY: + runtime_sig.kwonly[runtime_arg.name] = runtime_arg + elif runtime_arg.kind == inspect.Parameter.VAR_POSITIONAL: + runtime_sig.varpos = runtime_arg + elif runtime_arg.kind == inspect.Parameter.VAR_KEYWORD: + runtime_sig.varkw = runtime_arg + else: + raise ValueError + return runtime_sig + + @staticmethod + def from_overloadedfuncdef(stub: nodes.OverloadedFuncDef,) -> "Signature[nodes.Argument]": + """Returns a Signature from an OverloadedFuncDef. + + If life were simple, to verify_overloadedfuncdef, we'd just verify_funcitem for each of its + items. Unfortunately, life isn't simple and overloads are pretty deceitful. So instead, we + try and combine the overload's items into a single signature that is compatible with any + lies it might try to tell. + + """ + # For all dunder methods other than __init__, just assume all args are positional-only + assume_positional_only = is_dunder(stub.name, exclude_init=True) + + all_args = {} # type: Dict[str, List[Tuple[nodes.Argument, int]]] + for func in map(_resolve_funcitem_from_decorator, stub.items): + assert func is not None + for index, arg in enumerate(func.arguments): + # For positional-only args, we allow overloads to have different names for the same + # argument. To accomplish this, we just make up a fake index-based name. + name = ( + "__{}".format(index) + if arg.variable.name.startswith("__") or assume_positional_only + else arg.variable.name + ) + all_args.setdefault(name, []).append((arg, index)) + + def get_position(arg_name: str) -> int: + # We just need this to return the positional args in the correct order. + return max(index for _, index in all_args[arg_name]) + + def get_type(arg_name: str) -> mypy.types.ProperType: + with mypy.state.strict_optional_set(True): + all_types = [ + arg.variable.type or arg.type_annotation for arg, _ in all_args[arg_name] + ] + return mypy.typeops.make_simplified_union([t for t in all_types if t]) + + def get_kind(arg_name: str) -> int: + kinds = {arg.kind for arg, _ in all_args[arg_name]} + if nodes.ARG_STAR in kinds: + return nodes.ARG_STAR + if nodes.ARG_STAR2 in kinds: + return nodes.ARG_STAR2 + # The logic here is based on two tenets: + # 1) If an arg is ever optional (or unspecified), it is optional + # 2) If an arg is ever positional, it is positional + is_opt = ( + len(all_args[arg_name]) < len(stub.items) + or nodes.ARG_OPT in kinds + or nodes.ARG_NAMED_OPT in kinds + ) + is_pos = nodes.ARG_OPT in kinds or nodes.ARG_POS in kinds + if is_opt: + return nodes.ARG_OPT if is_pos else nodes.ARG_NAMED_OPT + return nodes.ARG_POS if is_pos else nodes.ARG_NAMED + + sig = Signature() # type: Signature[nodes.Argument] + for arg_name in sorted(all_args, key=get_position): + # example_arg_name gives us a real name (in case we had a fake index-based name) + example_arg_name = all_args[arg_name][0][0].variable.name + arg = nodes.Argument( + nodes.Var(example_arg_name, get_type(arg_name)), + type_annotation=None, + initializer=None, + kind=get_kind(arg_name), + ) + if arg.kind in (nodes.ARG_POS, nodes.ARG_OPT): + sig.pos.append(arg) + elif arg.kind in (nodes.ARG_NAMED, nodes.ARG_NAMED_OPT): + sig.kwonly[arg.variable.name] = arg + elif arg.kind == nodes.ARG_STAR: + sig.varpos = arg + elif arg.kind == nodes.ARG_STAR2: + sig.varkw = arg + else: + raise ValueError + return sig + + +def _verify_signature( + stub: Signature[nodes.Argument], runtime: Signature[inspect.Parameter], function_name: str +) -> Iterator[str]: + # Check positional arguments match up + for stub_arg, runtime_arg in zip(stub.pos, runtime.pos): + yield from _verify_arg_name(stub_arg, runtime_arg, function_name) + yield from _verify_arg_default_value(stub_arg, runtime_arg) + if ( + runtime_arg.kind == inspect.Parameter.POSITIONAL_ONLY + and not stub_arg.variable.name.startswith("__") + and not stub_arg.variable.name.strip("_") == "self" + and not is_dunder(function_name) # noisy for dunder methods + ): + yield ( + 'stub argument "{}" should be positional-only ' + '(rename with a leading double underscore, i.e. "__{}")'.format( + stub_arg.variable.name, runtime_arg.name + ) + ) + if ( + runtime_arg.kind != inspect.Parameter.POSITIONAL_ONLY + and stub_arg.variable.name.startswith("__") + ): + yield ( + 'stub argument "{}" should be positional or keyword ' + "(remove leading double underscore)".format(stub_arg.variable.name) + ) + + # Checks involving *args + if len(stub.pos) == len(runtime.pos): + if stub.varpos is None and runtime.varpos is not None: + yield 'stub does not have *args argument "{}"'.format(runtime.varpos.name) + if stub.varpos is not None and runtime.varpos is None: + yield 'runtime does not have *args argument "{}"'.format(stub.varpos.variable.name) + elif len(stub.pos) > len(runtime.pos): + if runtime.varpos is None: + for stub_arg in stub.pos[len(runtime.pos) :]: + # If the variable is in runtime.kwonly, it's just mislabelled as not a + # keyword-only argument; we report the error while checking keyword-only arguments + if stub_arg.variable.name not in runtime.kwonly: + yield 'runtime does not have argument "{}"'.format(stub_arg.variable.name) + # We do not check whether stub takes *args when the runtime does, for cases where the stub + # just listed out the extra parameters the function takes + elif len(stub.pos) < len(runtime.pos): + if stub.varpos is None: + for runtime_arg in runtime.pos[len(stub.pos) :]: + yield 'stub does not have argument "{}"'.format(runtime_arg.name) + elif runtime.pos is None: + yield 'runtime does not have *args argument "{}"'.format(stub.varpos.variable.name) + + # Check keyword-only args + for arg in sorted(set(stub.kwonly) & set(runtime.kwonly)): + stub_arg, runtime_arg = stub.kwonly[arg], runtime.kwonly[arg] + yield from _verify_arg_name(stub_arg, runtime_arg, function_name) + yield from _verify_arg_default_value(stub_arg, runtime_arg) + + # Checks involving **kwargs + if stub.varkw is None and runtime.varkw is not None: + # We do not check whether stub takes **kwargs when the runtime does, for cases where the + # stub just listed out the extra keyword parameters the function takes + # Also check against positional parameters, to avoid a nitpicky message when an argument + # isn't marked as keyword-only + stub_pos_names = set(stub_arg.variable.name for stub_arg in stub.pos) + if not set(runtime.kwonly).issubset(set(stub.kwonly) | stub_pos_names): + yield 'stub does not have **kwargs argument "{}"'.format(runtime.varkw.name) + if stub.varkw is not None and runtime.varkw is None: + yield 'runtime does not have **kwargs argument "{}"'.format(stub.varkw.variable.name) + if runtime.varkw is None or not set(runtime.kwonly).issubset(set(stub.kwonly)): + for arg in sorted(set(stub.kwonly) - set(runtime.kwonly)): + yield 'runtime does not have argument "{}"'.format(arg) + if stub.varkw is None or not set(stub.kwonly).issubset(set(runtime.kwonly)): + for arg in sorted(set(runtime.kwonly) - set(stub.kwonly)): + if arg in set(stub_arg.variable.name for stub_arg in stub.pos): + yield 'stub argument "{}" is not keyword-only'.format(arg) + else: + yield 'stub does not have argument "{}"'.format(arg) @verify.register(nodes.FuncItem) -def verify_funcitem(stub: nodes.FuncItem, - instance: Optional[DumpNode]) -> Iterator[ErrorParts]: - if not instance: - yield [], 'not_in_runtime', stub.line, type(stub), None - elif 'type' not in instance or instance['type'] not in ('function', 'callable'): - yield [], 'inconsistent', stub.line, type(stub), instance['type'] - # TODO check arguments and return value - - -@verify.register(type(None)) -def verify_none(stub: None, - instance: Optional[DumpNode]) -> Iterator[ErrorParts]: - if instance is None: - yield [], 'not_in_stub', None, None, None - else: - yield [], 'not_in_stub', instance['line'], None, instance['type'] +def verify_funcitem( + stub: nodes.FuncItem, runtime: MaybeMissing[types.FunctionType], object_path: List[str] +) -> Iterator[Error]: + if isinstance(runtime, Missing): + yield Error(object_path, "is not present at runtime", stub, runtime) + return + if ( + not isinstance(runtime, (types.FunctionType, types.BuiltinFunctionType)) + and not isinstance(runtime, (types.MethodType, types.BuiltinMethodType)) + and not inspect.ismethoddescriptor(runtime) + ): + yield Error(object_path, "is not a function", stub, runtime) + return + + for message in _verify_static_class_methods(stub, runtime, object_path): + yield Error(object_path, "is inconsistent, " + message, stub, runtime) + + try: + signature = inspect.signature(runtime) + except (ValueError, RuntimeError): + # inspect.signature throws sometimes + # catch RuntimeError because of https://bugs.python.org/issue39504 + return + + stub_sig = Signature.from_funcitem(stub) + runtime_sig = Signature.from_inspect_signature(signature) + + for message in _verify_signature(stub_sig, runtime_sig, function_name=stub.name): + yield Error( + object_path, + "is inconsistent, " + message, + stub, + runtime, + runtime_desc="def " + str(signature), + ) + + +@verify.register(Missing) +def verify_none( + stub: Missing, runtime: MaybeMissing[Any], object_path: List[str] +) -> Iterator[Error]: + if isinstance(runtime, Missing): + try: + # We shouldn't really get here since that would involve something not existing both in + # the stub and the runtime, however, some modules like distutils.command have some + # weird things going on. Try to see if we can find a runtime object by importing it, + # otherwise crash. + runtime = importlib.import_module(".".join(object_path)) + except ImportError: + raise RuntimeError + yield Error(object_path, "is not present in stub", stub, runtime) @verify.register(nodes.Var) -def verify_var(node: nodes.Var, - module_node: Optional[DumpNode]) -> Iterator[ErrorParts]: - if False: - yield None - # Need to check if types are inconsistent. - #if 'type' not in dump or dump['type'] != node.node.type: - # import ipdb; ipdb.set_trace() - # yield name, 'inconsistent', node.node.line, shed_type, module_type +def verify_var( + stub: nodes.Var, runtime: MaybeMissing[Any], object_path: List[str] +) -> Iterator[Error]: + if isinstance(runtime, Missing): + # Don't always yield an error here, because we often can't find instance variables + if len(object_path) <= 1: + yield Error(object_path, "is not present at runtime", stub, runtime) + return + + runtime_type = get_mypy_type_of_runtime_value(runtime) + if ( + runtime_type is not None + and stub.type is not None + and not is_subtype_helper(runtime_type, stub.type) + ): + should_error = True + # Avoid errors when defining enums, since runtime_type is the enum itself, but we'd + # annotate it with the type of runtime.value + if isinstance(runtime, enum.Enum): + runtime_type = get_mypy_type_of_runtime_value(runtime.value) + if runtime_type is not None and is_subtype_helper(runtime_type, stub.type): + should_error = False + + if should_error: + yield Error( + object_path, + "variable differs from runtime type {}".format(runtime_type), + stub, + runtime, + ) @verify.register(nodes.OverloadedFuncDef) -def verify_overloadedfuncdef(node: nodes.OverloadedFuncDef, - module_node: Optional[DumpNode]) -> Iterator[ErrorParts]: - # Should check types of the union of the overloaded types. - if False: - yield None +def verify_overloadedfuncdef( + stub: nodes.OverloadedFuncDef, runtime: MaybeMissing[Any], object_path: List[str] +) -> Iterator[Error]: + if isinstance(runtime, Missing): + yield Error(object_path, "is not present at runtime", stub, runtime) + return + + if stub.is_property: + # We get here in cases of overloads from property.setter + return + + try: + signature = inspect.signature(runtime) + except ValueError: + return + + stub_sig = Signature.from_overloadedfuncdef(stub) + runtime_sig = Signature.from_inspect_signature(signature) + + for message in _verify_signature(stub_sig, runtime_sig, function_name=stub.name): + # TODO: This is a little hacky, but the addition here is super useful + if "has a default value of type" in message: + message += ( + ". This is often caused by overloads failing to account for explicitly passing " + "in the default value." + ) + yield Error( + object_path, + "is inconsistent, " + message, + stub, + runtime, + stub_desc=str(stub.type) + "\nInferred signature: {}".format(stub_sig), + runtime_desc="def " + str(signature), + ) @verify.register(nodes.TypeVarExpr) -def verify_typevarexpr(node: nodes.TypeVarExpr, - module_node: Optional[DumpNode]) -> Iterator[ErrorParts]: +def verify_typevarexpr( + stub: nodes.TypeVarExpr, runtime: MaybeMissing[Any], object_path: List[str] +) -> Iterator[Error]: if False: yield None +def _verify_property(stub: nodes.Decorator, runtime: Any) -> Iterator[str]: + assert stub.func.is_property + if isinstance(runtime, property): + return + if inspect.isdatadescriptor(runtime): + # It's enough like a property... + return + # Sometimes attributes pretend to be properties, for instance, to express that they + # are read only. So whitelist if runtime_type matches the return type of stub. + runtime_type = get_mypy_type_of_runtime_value(runtime) + func_type = ( + stub.func.type.ret_type if isinstance(stub.func.type, mypy.types.CallableType) else None + ) + if ( + runtime_type is not None + and func_type is not None + and is_subtype_helper(runtime_type, func_type) + ): + return + yield "is inconsistent, cannot reconcile @property on stub with runtime object" + + +def _resolve_funcitem_from_decorator(dec: nodes.OverloadPart) -> Optional[nodes.FuncItem]: + """Returns a FuncItem that corresponds to the output of the decorator. + + Returns None if we can't figure out what that would be. For convenience, this function also + accepts FuncItems. + + """ + if isinstance(dec, nodes.FuncItem): + return dec + if dec.func.is_property: + return None + + def apply_decorator_to_funcitem( + decorator: nodes.Expression, func: nodes.FuncItem + ) -> Optional[nodes.FuncItem]: + if not isinstance(decorator, nodes.NameExpr): + return None + if decorator.fullname is None: + # Happens with namedtuple + return None + if decorator.fullname in ( + "builtins.staticmethod", + "typing.overload", + "abc.abstractmethod", + ): + return func + if decorator.fullname == "builtins.classmethod": + assert func.arguments[0].variable.name in ("cls", "metacls") + ret = copy.copy(func) + # Remove the cls argument, since it's not present in inspect.signature of classmethods + ret.arguments = ret.arguments[1:] + return ret + # Just give up on any other decorators. After excluding properties, we don't run into + # anything else when running on typeshed's stdlib. + return None + + func = dec.func # type: nodes.FuncItem + for decorator in dec.original_decorators: + resulting_func = apply_decorator_to_funcitem(decorator, func) + if resulting_func is None: + return None + func = resulting_func + return func + + @verify.register(nodes.Decorator) -def verify_decorator(node: nodes.Decorator, - module_node: Optional[DumpNode]) -> Iterator[ErrorParts]: - if False: - yield None +def verify_decorator( + stub: nodes.Decorator, runtime: MaybeMissing[Any], object_path: List[str] +) -> Iterator[Error]: + if isinstance(runtime, Missing): + yield Error(object_path, "is not present at runtime", stub, runtime) + return + if stub.func.is_property: + for message in _verify_property(stub, runtime): + yield Error(object_path, message, stub, runtime) + return + + func = _resolve_funcitem_from_decorator(stub) + if func is not None: + yield from verify(func, runtime, object_path) @verify.register(nodes.TypeAlias) -def verify_typealias(node: nodes.TypeAlias, - module_node: Optional[DumpNode]) -> Iterator[ErrorParts]: +def verify_typealias( + stub: nodes.TypeAlias, runtime: MaybeMissing[Any], object_path: List[str] +) -> Iterator[Error]: if False: yield None -def dump_module(name: str) -> DumpNode: - mod = importlib.import_module(name) - return {'type': 'file', 'names': module_to_json(mod)} +def is_dunder(name: str, exclude_init: bool = False) -> bool: + """Returns whether name is a dunder name. + :param exclude_init: Whether to return False for __init__ -def build_stubs(options: Options, - find_module_cache: FindModuleCache, - mod: str) -> Dict[str, nodes.MypyFile]: - sources = find_module_cache.find_modules_recursive(mod) - try: - res = build.build(sources=sources, options=options) - messages = res.errors - except CompileError as error: - messages = error.messages - - if messages: - for msg in messages: - print(msg) - sys.exit(1) - return res.files + """ + if exclude_init and name == "__init__": + return False + return name.startswith("__") and name.endswith("__") + + +def is_subtype_helper(left: mypy.types.Type, right: mypy.types.Type) -> bool: + """Checks whether ``left`` is a subtype of ``right``.""" + left = mypy.types.get_proper_type(left) + right = mypy.types.get_proper_type(right) + if ( + isinstance(left, mypy.types.LiteralType) + and isinstance(left.value, int) + and left.value in (0, 1) + and isinstance(right, mypy.types.Instance) + and right.type.fullname == "builtins.bool" + ): + # Pretend Literal[0, 1] is a subtype of bool to avoid unhelpful errors. + return True + with mypy.state.strict_optional_set(True): + return mypy.subtypes.is_subtype(left, right) + + +def get_mypy_type_of_runtime_value(runtime: Any) -> Optional[mypy.types.Type]: + """Returns a mypy type object representing the type of ``runtime``. + + Returns None if we can't find something that works. + + """ + if runtime is None: + return mypy.types.NoneType() + if isinstance(runtime, property): + # Give up on properties to avoid issues with things that are typed as attributes. + return None + if isinstance(runtime, (types.FunctionType, types.BuiltinFunctionType)): + # TODO: Construct a mypy.types.CallableType + return None + + # Try and look up a stub for the runtime object + stub = get_stub(type(runtime).__module__) + if stub is None: + return None + type_name = type(runtime).__name__ + if type_name not in stub.names: + return None + type_info = stub.names[type_name].node + if not isinstance(type_info, nodes.TypeInfo): + return None + def anytype() -> mypy.types.AnyType: + return mypy.types.AnyType(mypy.types.TypeOfAny.unannotated) -def main(args: List[str]) -> Iterator[Error]: - if len(args) == 1: - print('must provide at least one module to test') + if isinstance(runtime, tuple): + # Special case tuples so we construct a valid mypy.types.TupleType + opt_items = [get_mypy_type_of_runtime_value(v) for v in runtime] + items = [(i if i is not None else anytype()) for i in opt_items] + fallback = mypy.types.Instance(type_info, [anytype()]) + return mypy.types.TupleType(items, fallback) + + # Technically, Literals are supposed to be only bool, int, str or bytes, but this + # seems to work fine + return mypy.types.LiteralType( + value=runtime, + fallback=mypy.types.Instance(type_info, [anytype() for _ in type_info.type_vars]), + ) + + +_all_stubs = {} # type: Dict[str, nodes.MypyFile] + + +def build_stubs(modules: List[str], options: Options, find_submodules: bool = False) -> List[str]: + """Uses mypy to construct stub objects for the given modules. + + This sets global state that ``get_stub`` can access. + + Returns all modules we might want to check. If ``find_submodules`` is False, this is equal + to ``modules``. + + :param modules: List of modules to build stubs for. + :param options: Mypy options for finding and building stubs. + :param find_submodules: Whether to attempt to find submodules of the given modules as well. + + """ + data_dir = mypy.build.default_data_dir() + search_path = mypy.modulefinder.compute_search_paths([], options, data_dir) + find_module_cache = mypy.modulefinder.FindModuleCache(search_path) + + all_modules = [] + sources = [] + for module in modules: + all_modules.append(module) + if not find_submodules: + module_path = find_module_cache.find_module(module) + if module_path is None: + # test_module will yield an error later when it can't find stubs + continue + sources.append(mypy.modulefinder.BuildSource(module_path, module, None)) + else: + found_sources = find_module_cache.find_modules_recursive(module) + sources.extend(found_sources) + all_modules.extend(s.module for s in found_sources if s.module not in all_modules) + + res = mypy.build.build(sources=sources, options=options) + if res.errors: + output = [_style("error: ", color="red", bold=True), " failed mypy build.\n"] + print("".join(output) + "\n".join(res.errors)) sys.exit(1) + + global _all_stubs + _all_stubs = res.files + + return all_modules + + +def get_stub(module: str) -> Optional[nodes.MypyFile]: + """Returns a stub object for the given module, if we've built one.""" + return _all_stubs.get(module) + + +def get_typeshed_stdlib_modules(custom_typeshed_dir: Optional[str]) -> List[str]: + """Returns a list of stdlib modules in typeshed (for current Python version).""" + # This snippet is based on code in mypy.modulefinder.default_lib_path + if custom_typeshed_dir: + typeshed_dir = Path(custom_typeshed_dir) else: - modules = args[1:] + typeshed_dir = Path(mypy.build.default_data_dir()) + if (typeshed_dir / "stubs-auto").exists(): + typeshed_dir /= "stubs-auto" + typeshed_dir /= "typeshed" + + versions = ["2and3", "3"] + for minor in range(sys.version_info.minor + 1): + versions.append("3.{}".format(minor)) + + modules = [] + for version in versions: + base = typeshed_dir / "stdlib" / version + if base.exists(): + output = subprocess.check_output(["find", str(base), "-type", "f"]).decode("utf-8") + paths = [Path(p) for p in output.splitlines()] + for path in paths: + if path.stem == "__init__": + path = path.parent + modules.append(".".join(path.relative_to(base).parts[:-1] + (path.stem,))) + return sorted(modules) + + +def get_whitelist_entries(whitelist_file: Optional[str]) -> Iterator[str]: + if not whitelist_file: + return + + def strip_comments(s: str) -> str: + try: + return s[: s.index("#")].strip() + except ValueError: + return s.strip() + + with open(whitelist_file) as f: + for line in f.readlines(): + entry = strip_comments(line) + if entry: + yield entry + + +def main() -> int: + assert sys.version_info >= (3, 5), "This script requires at least Python 3.5" + + parser = argparse.ArgumentParser( + description="Compares stubs to objects introspected from the runtime." + ) + parser.add_argument("modules", nargs="*", help="Modules to test") + parser.add_argument("--concise", action="store_true", help="Make output concise") + parser.add_argument( + "--ignore-missing-stub", + action="store_true", + help="Ignore errors for stub missing things that are present at runtime", + ) + parser.add_argument( + "--ignore-positional-only", + action="store_true", + help="Ignore errors for whether an argument should or shouldn't be positional-only", + ) + parser.add_argument( + "--custom-typeshed-dir", metavar="DIR", help="Use the custom typeshed in DIR" + ) + parser.add_argument( + "--check-typeshed", action="store_true", help="Check all stdlib modules in typeshed" + ) + parser.add_argument( + "--whitelist", + action="append", + metavar="FILE", + default=[], + help=( + "Use file as a whitelist. Can be passed multiple times to combine multiple " + "whitelists. Whitelist can be created with --generate-whitelist" + ), + ) + parser.add_argument( + "--generate-whitelist", + action="store_true", + help="Print a whitelist (to stdout) to be used with --whitelist", + ) + args = parser.parse_args() + + # Load the whitelist. This is a series of strings corresponding to Error.object_desc + # Values in the dict will store whether we used the whitelist entry or not. + whitelist = { + entry: False + for whitelist_file in args.whitelist + for entry in get_whitelist_entries(whitelist_file) + } + + # If we need to generate a whitelist, we store Error.object_desc for each error here. + generated_whitelist = set() + + modules = args.modules + if args.check_typeshed: + assert not args.modules, "Cannot pass both --check-typeshed and a list of modules" + modules = get_typeshed_stdlib_modules(args.custom_typeshed_dir) + modules.remove("antigravity") # it's super annoying + + assert modules, "No modules to check" options = Options() options.incremental = False - data_dir = default_data_dir() - search_path = compute_search_paths([], options, data_dir) - find_module_cache = FindModuleCache(search_path) + options.custom_typeshed_dir = args.custom_typeshed_dir + modules = build_stubs(modules, options, find_submodules=not args.check_typeshed) + + exit_code = 0 for module in modules: - for error in test_stub(options, find_module_cache, module): - yield error + for error in test_module(module): + # Filter errors + if args.ignore_missing_stub and error.is_missing_stub(): + continue + if args.ignore_positional_only and error.is_positional_only_related(): + continue + if error.object_desc in whitelist: + whitelist[error.object_desc] = True + continue + + # We have errors, so change exit code, and output whatever necessary + exit_code = 1 + if args.generate_whitelist: + generated_whitelist.add(error.object_desc) + continue + print(error.get_description(concise=args.concise)) + + # Print unused whitelist entries + for w in whitelist: + if not whitelist[w]: + exit_code = 1 + print("note: unused whitelist entry {}".format(w)) + + # Print the generated whitelist + if args.generate_whitelist: + for e in sorted(generated_whitelist): + print(e) + exit_code = 0 + return exit_code -if __name__ == '__main__': - for err in main(sys.argv): - print(messages[err.error_type].format(error=err)) +if __name__ == "__main__": + sys.exit(main()) From a6c45093d0475cb3bb13b28df448a27abf6a96fb Mon Sep 17 00:00:00 2001 From: "Michael J. Sullivan" Date: Thu, 6 Feb 2020 11:45:41 -0800 Subject: [PATCH 077/117] Use repr to format out/err in dmypy verbose stats output (#8374) This prevents control characters being interpreted, though it does uglify the output in its own way. --- mypy/dmypy/client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mypy/dmypy/client.py b/mypy/dmypy/client.py index e00bb437a40e..cf16cb270b01 100644 --- a/mypy/dmypy/client.py +++ b/mypy/dmypy/client.py @@ -413,7 +413,7 @@ def show_stats(response: Mapping[str, object]) -> None: if key not in ('out', 'err'): print("%-24s: %10s" % (key, "%.3f" % value if isinstance(value, float) else value)) else: - value = str(value).replace('\n', '\\n') + value = repr(value)[1:-1] if len(value) > 50: value = value[:40] + ' ...' print("%-24s: %s" % (key, value)) From 8888b1aedf6d4ba543ae12f538f190ce5e65c20d Mon Sep 17 00:00:00 2001 From: Jakub Stasiak Date: Thu, 6 Feb 2020 20:51:09 +0100 Subject: [PATCH 078/117] Support typing.Annotated on top of typing_extensions.Annotated (#8371) This is to handle PEP 593 support recently merged into CPython[1]. [1] https://github.com/python/cpython/pull/18260 --- mypy/typeanal.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/mypy/typeanal.py b/mypy/typeanal.py index ed5d0e0474e4..183a9a792c91 100644 --- a/mypy/typeanal.py +++ b/mypy/typeanal.py @@ -43,6 +43,7 @@ 'typing.Union', 'typing.Literal', 'typing_extensions.Literal', + 'typing.Annotated', 'typing_extensions.Annotated', } # type: Final @@ -311,7 +312,7 @@ def try_analyze_special_unbound_type(self, t: UnboundType, fullname: str) -> Opt return UninhabitedType(is_noreturn=True) elif fullname in ('typing_extensions.Literal', 'typing.Literal'): return self.analyze_literal_type(t) - elif fullname == 'typing_extensions.Annotated': + elif fullname in ('typing_extensions.Annotated', 'typing.Annotated'): if len(t.args) < 2: self.fail("Annotated[...] must have exactly one type argument" " and at least one annotation", t) From e06180223ddb0da34ea224761c873705f2ab4f2e Mon Sep 17 00:00:00 2001 From: "Michael J. Sullivan" Date: Fri, 7 Feb 2020 13:03:29 -0800 Subject: [PATCH 079/117] Fix an incremental crash bug caused by ad-hoc intersection (#8381) Ad-hoc intersection was generating type names with dots in them, which are interpreted as module separators. I fixed this by making the names worse and just using the bare name of the class. There are other options that could be pursued: * Replacing "." with some other character and then either switching it back when displayed or just printing it. One option that I was very tempted by was to use U+2024 ONE DOT LEADER. * Compute the intersection names on the fly when formatting. I decided to do the most obvious and the least tricky thing because this isn't important. --- mypy/checker.py | 8 +++-- test-data/unit/check-incremental.test | 46 ++++++++++++++++++++++++++- test-data/unit/check-isinstance.test | 6 ++-- test-data/unit/check-protocols.test | 2 +- 4 files changed, 54 insertions(+), 8 deletions(-) diff --git a/mypy/checker.py b/mypy/checker.py index d80e0ec02b69..fc32bc4a03a5 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -3689,7 +3689,6 @@ def intersect_instances(self, assert isinstance(curr_module, MypyFile) base_classes = [] - formatted_names = [] for inst in instances: expanded = [inst] if inst.type.is_intersection: @@ -3697,10 +3696,13 @@ def intersect_instances(self, for expanded_inst in expanded: base_classes.append(expanded_inst) - formatted_names.append(format_type_bare(expanded_inst)) + # We use the pretty_names_list for error messages but can't + # use it for the real name that goes into the symbol table + # because it can have dots in it. pretty_names_list = pretty_seq(format_type_distinctly(*base_classes, bare=True), "and") - short_name = ''.format(pretty_names_list) + names_list = pretty_seq([x.type.name for x in base_classes], "and") + short_name = ''.format(names_list) full_name = gen_unique_name(short_name, curr_module.names) old_msg = self.msg diff --git a/test-data/unit/check-incremental.test b/test-data/unit/check-incremental.test index 0178226ea97f..aac3d37a0716 100644 --- a/test-data/unit/check-incremental.test +++ b/test-data/unit/check-incremental.test @@ -5187,6 +5187,51 @@ reveal_type(Foo().x) [out2] tmp/b.py:2: note: Revealed type is 'a.' +[case testIsInstanceAdHocIntersectionIncrementalNoChangeSameName] +import b +[file c.py] +class B: pass +[file a.py] +import c +class B: pass + +class Foo: + def __init__(self) -> None: + x: c.B + assert isinstance(x, B) + self.x = x +[file b.py] +from a import Foo +[file b.py.2] +from a import Foo +reveal_type(Foo().x) +[builtins fixtures/isinstance.pyi] +[out] +[out2] +tmp/b.py:2: note: Revealed type is 'a.' + + +[case testIsInstanceAdHocIntersectionIncrementalNoChangeTuple] +import b +[file a.py] +from typing import Tuple +class B: pass + +class Foo: + def __init__(self) -> None: + x: Tuple[int, ...] + assert isinstance(x, B) + self.x = x +[file b.py] +from a import Foo +[file b.py.2] +from a import Foo +reveal_type(Foo().x) +[builtins fixtures/isinstance.pyi] +[out] +[out2] +tmp/b.py:2: note: Revealed type is 'a.' + [case testIsInstanceAdHocIntersectionIncrementalIsInstanceChange] import c [file a.py] @@ -5316,4 +5361,3 @@ reveal_type(z) tmp/c.py:2: note: Revealed type is 'a.A' [out2] tmp/c.py:2: note: Revealed type is 'a.' - diff --git a/test-data/unit/check-isinstance.test b/test-data/unit/check-isinstance.test index 953178bc84e9..e41b88fff8a7 100644 --- a/test-data/unit/check-isinstance.test +++ b/test-data/unit/check-isinstance.test @@ -2369,14 +2369,14 @@ else: y: A[Parent] if isinstance(y, B): - reveal_type(y) # N: Revealed type is '__main__.' + reveal_type(y) # N: Revealed type is '__main__.' reveal_type(y.f()) # N: Revealed type is '__main__.Parent*' else: reveal_type(y) # N: Revealed type is '__main__.A[__main__.Parent]' z: A[Child] if isinstance(z, B): - reveal_type(z) # N: Revealed type is '__main__.' + reveal_type(z) # N: Revealed type is '__main__.1' reveal_type(z.f()) # N: Revealed type is '__main__.Child*' else: reveal_type(z) # N: Revealed type is '__main__.A[__main__.Child]' @@ -2518,7 +2518,7 @@ class A: pass x: A if isinstance(x, A2): - reveal_type(x) # N: Revealed type is '__main__.' + reveal_type(x) # N: Revealed type is '__main__.' [file foo.py] class A: pass diff --git a/test-data/unit/check-protocols.test b/test-data/unit/check-protocols.test index 8773e91d0840..b78becc88be4 100644 --- a/test-data/unit/check-protocols.test +++ b/test-data/unit/check-protocols.test @@ -1564,7 +1564,7 @@ if isinstance(c1i, P1): else: reveal_type(c1i) # Unreachable if isinstance(c1i, P): - reveal_type(c1i) # N: Revealed type is '__main__.' + reveal_type(c1i) # N: Revealed type is '__main__.' else: reveal_type(c1i) # N: Revealed type is '__main__.C1[builtins.int]' From c059437858a917d25bf07724d75489160385daed Mon Sep 17 00:00:00 2001 From: Shantanu Date: Fri, 7 Feb 2020 14:54:39 -0800 Subject: [PATCH 080/117] mypy self check: add python_version = 3.5 (#8382) --- mypy_self_check.ini | 1 + 1 file changed, 1 insertion(+) diff --git a/mypy_self_check.ini b/mypy_self_check.ini index cb9eb4b2aa35..2b7ed2b157c5 100644 --- a/mypy_self_check.ini +++ b/mypy_self_check.ini @@ -18,3 +18,4 @@ show_error_codes = True pretty = True always_false = MYPYC plugins = misc/proper_plugin.py +python_version = 3.5 From 3bd6e47f53e5ab09e30d7065e91d744fbc0821e3 Mon Sep 17 00:00:00 2001 From: Xuanda Yang Date: Sat, 8 Feb 2020 07:06:08 +0800 Subject: [PATCH 081/117] Fix covariant overriding of decorated methods (#8350) Fixes #5836. --- mypy/checker.py | 39 ++++++++++++++++++++----------- test-data/unit/check-classes.test | 22 +++++++++++++++++ 2 files changed, 48 insertions(+), 13 deletions(-) diff --git a/mypy/checker.py b/mypy/checker.py index fc32bc4a03a5..71cf906a7d27 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -1439,7 +1439,7 @@ def check_method_or_accessor_override_for_base(self, defn: Union[FuncDef, self.msg.cant_override_final(name, base.name, defn) # Second, final can't override anything writeable independently of types. if defn.is_final: - self.check_no_writable(name, base_attr.node, defn) + self.check_if_final_var_override_writable(name, base_attr.node, defn) # Check the type of override. if name not in ('__init__', '__new__', '__init_subclass__'): @@ -1534,7 +1534,10 @@ def check_method_override_for_base_with_name( # that this doesn't affect read-only properties which can have # covariant overrides. # - # TODO: Allow covariance for read-only attributes? + pass + elif (base_attr.node and not self.is_writable_attribute(base_attr.node) + and is_subtype(typ, original_type)): + # If the attribute is read-only, allow covariance pass else: self.msg.signature_incompatible_with_supertype( @@ -1920,7 +1923,7 @@ class C(B, A[int]): ... # this is unsafe because... if is_final_node(second.node): self.msg.cant_override_final(name, base2.name, ctx) if is_final_node(first.node): - self.check_no_writable(name, second.node, ctx) + self.check_if_final_var_override_writable(name, second.node, ctx) # __slots__ is special and the type can vary across class hierarchy. if name == '__slots__': ok = True @@ -2385,10 +2388,14 @@ def check_compatibility_final_super(self, node: Var, self.msg.cant_override_final(node.name, base.name, node) return False if node.is_final: - self.check_no_writable(node.name, base_node, node) + self.check_if_final_var_override_writable(node.name, base_node, node) return True - def check_no_writable(self, name: str, base_node: Optional[Node], ctx: Context) -> None: + def check_if_final_var_override_writable(self, + name: str, + base_node: + Optional[Node], + ctx: Context) -> None: """Check that a final variable doesn't override writeable attribute. This is done to prevent situations like this: @@ -2400,14 +2407,10 @@ class D(C): x: C = D() x.attr = 3 # Oops! """ - if isinstance(base_node, Var): - ok = False - elif isinstance(base_node, OverloadedFuncDef) and base_node.is_property: - first_item = cast(Decorator, base_node.items[0]) - ok = not first_item.var.is_settable_property - else: - ok = True - if not ok: + writable = True + if base_node: + writable = self.is_writable_attribute(base_node) + if writable: self.msg.final_cant_override_writable(name, ctx) def get_final_context(self) -> bool: @@ -4868,6 +4871,16 @@ def conditional_type_map_with_intersection(self, new_yes_type = make_simplified_union(out) return {expr: new_yes_type}, {} + def is_writable_attribute(self, node: Node) -> bool: + """Check if an attribute is writable""" + if isinstance(node, Var): + return True + elif isinstance(node, OverloadedFuncDef) and node.is_property: + first_item = cast(Decorator, node.items[0]) + return first_item.var.is_settable_property + else: + return False + def conditional_type_map(expr: Expression, current_type: Optional[Type], diff --git a/test-data/unit/check-classes.test b/test-data/unit/check-classes.test index 56b591c041d4..ed547510b46c 100644 --- a/test-data/unit/check-classes.test +++ b/test-data/unit/check-classes.test @@ -512,6 +512,28 @@ class B(A): def h(cls) -> int: pass [builtins fixtures/classmethod.pyi] +[case testAllowCovarianceInReadOnlyAttributes] +from typing import Callable, TypeVar + +T = TypeVar('T') + +class X: + pass + + +class Y(X): + pass + +def dec(f: Callable[..., T]) -> T: pass + +class A: + @dec + def f(self) -> X: pass + +class B(A): + @dec + def f(self) -> Y: pass + -- Constructors -- ------------ From d6c2c01fd04043bff55222f1f43a9fe2e15c0ccf Mon Sep 17 00:00:00 2001 From: Jan Verbeek <55185397+janverb@users.noreply.github.com> Date: Sat, 8 Feb 2020 00:06:52 +0100 Subject: [PATCH 082/117] Support determining whether a literal is truthy (#8368) This means types like Union[Literal[False], str] can be narrowed down more easily. --- mypy/test/testtypes.py | 6 +++--- mypy/types.py | 8 +++++++- test-data/unit/check-narrowing.test | 19 +++++++++++++++++++ 3 files changed, 29 insertions(+), 4 deletions(-) diff --git a/mypy/test/testtypes.py b/mypy/test/testtypes.py index 957b3ad7c4ba..c65bfc7b9418 100644 --- a/mypy/test/testtypes.py +++ b/mypy/test/testtypes.py @@ -736,10 +736,10 @@ def test_literal_type(self) -> None: self.assert_join(UnionType([lit1, lit2]), lit2, UnionType([lit1, lit2])) self.assert_join(UnionType([lit1, lit2]), a, a) self.assert_join(UnionType([lit1, lit3]), a, UnionType([a, lit3])) - self.assert_join(UnionType([d, lit3]), lit3, UnionType([d, lit3])) + self.assert_join(UnionType([d, lit3]), lit3, d) self.assert_join(UnionType([d, lit3]), d, UnionType([d, lit3])) - self.assert_join(UnionType([a, lit1]), lit1, UnionType([a, lit1])) - self.assert_join(UnionType([a, lit1]), lit2, UnionType([a, lit1])) + self.assert_join(UnionType([a, lit1]), lit1, a) + self.assert_join(UnionType([a, lit1]), lit2, a) self.assert_join(UnionType([lit1, lit2]), UnionType([lit1, lit2]), UnionType([lit1, lit2])) diff --git a/mypy/types.py b/mypy/types.py index f377753425f0..c214f82c6776 100644 --- a/mypy/types.py +++ b/mypy/types.py @@ -1591,10 +1591,16 @@ class LiteralType(ProperType): def __init__(self, value: LiteralValue, fallback: Instance, line: int = -1, column: int = -1) -> None: - super().__init__(line, column) self.value = value + super().__init__(line, column) self.fallback = fallback + def can_be_false_default(self) -> bool: + return not self.value + + def can_be_true_default(self) -> bool: + return bool(self.value) + def accept(self, visitor: 'TypeVisitor[T]') -> T: return visitor.visit_literal_type(self) diff --git a/test-data/unit/check-narrowing.test b/test-data/unit/check-narrowing.test index d1bfda860a21..45d4a625f8c7 100644 --- a/test-data/unit/check-narrowing.test +++ b/test-data/unit/check-narrowing.test @@ -965,3 +965,22 @@ else: reveal_type(a) # E: Statement is unreachable reveal_type(b) [builtins fixtures/primitives.pyi] + +[case testNarrowingLiteralTruthiness] +from typing import Union +from typing_extensions import Literal + +str_or_false: Union[Literal[False], str] + +if str_or_false: + reveal_type(str_or_false) # N: Revealed type is 'builtins.str' +else: + reveal_type(str_or_false) # N: Revealed type is 'Union[Literal[False], builtins.str]' + +true_or_false: Literal[True, False] + +if true_or_false: + reveal_type(true_or_false) # N: Revealed type is 'Literal[True]' +else: + reveal_type(true_or_false) # N: Revealed type is 'Literal[False]' +[builtins fixtures/primitives.pyi] From dca8489ba70f838d7f9f1b6cb322fcf7c9dad1b3 Mon Sep 17 00:00:00 2001 From: Marcio Mazza Date: Mon, 10 Feb 2020 23:21:35 +0100 Subject: [PATCH 083/117] Add Tuples of variable size to the cheat sheet (#8364) --- docs/source/cheat_sheet_py3.rst | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/docs/source/cheat_sheet_py3.rst b/docs/source/cheat_sheet_py3.rst index 7eacba404fe0..002ed6241180 100644 --- a/docs/source/cheat_sheet_py3.rst +++ b/docs/source/cheat_sheet_py3.rst @@ -65,8 +65,11 @@ Built-in types # For mappings, we need the types of both keys and values x: Dict[str, float] = {'field': 2.0} - # For tuples, we specify the types of all the elements + # For tuples of fixed size, we specify the types of all the elements x: Tuple[int, str, float] = (3, "yes", 7.5) + + # For tuples of variable size, we use one type and ellipsis + x: Tuple[int, ...] = (1, 2, 3) # Use Optional[] for values that could be None x: Optional[str] = some_function() From 1104a9f1cf44a0f8469b4ce82b9fc5e75539195f Mon Sep 17 00:00:00 2001 From: Shantanu Date: Tue, 11 Feb 2020 00:00:59 -0800 Subject: [PATCH 084/117] mypy: attempt to fix build by pinning virtualenv (#8387) --- .github/workflows/test.yml | 2 +- .travis.yml | 1 + appveyor.yml | 2 +- test-requirements.txt | 2 +- 4 files changed, 4 insertions(+), 3 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index f76468695a77..993608826518 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -42,7 +42,7 @@ jobs: python-version: ${{ matrix.python }} architecture: ${{ matrix.arch }} - name: install tox - run: pip install --upgrade setuptools tox==3.9.0 + run: pip install --upgrade setuptools 'virtualenv<20' tox==3.9.0 - name: setup tox environment run: tox -e ${{ matrix.toxenv }} --notest - name: test diff --git a/.travis.yml b/.travis.yml index ee6c107f49f1..c0f275bfde8b 100644 --- a/.travis.yml +++ b/.travis.yml @@ -81,6 +81,7 @@ jobs: install: - pip install -U pip setuptools +- pip install -U 'virtualenv<20' - pip install -U tox==3.9.0 - tox --notest diff --git a/appveyor.yml b/appveyor.yml index 06d4a480b280..4abe31728f56 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -14,7 +14,7 @@ environment: install: - "git submodule update --init mypy/typeshed" - - "%PYTHON%\\python.exe -m pip install -U setuptools tox==3.9.0" + - '%PYTHON%\\python.exe -m pip install -U setuptools "virtualenv<20" tox==3.9.0' - "%PYTHON%\\python.exe -m tox -e py37 --notest" build: off diff --git a/test-requirements.txt b/test-requirements.txt index 649438743053..f339c593e47b 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -12,6 +12,6 @@ pytest-forked>=1.0.0,<1.1.0 pytest-cov>=2.4.0 typing>=3.5.2; python_version < '3.5' py>=1.5.2 -virtualenv +virtualenv<20 setuptools importlib-metadata==0.20 From 9ff569cfc43a2931cc4e7d3b84a0f0f85f59d565 Mon Sep 17 00:00:00 2001 From: "Michael J. Sullivan" Date: Tue, 11 Feb 2020 12:07:09 -0800 Subject: [PATCH 085/117] Fix initialize_unix_colors when sys.stdout is overridden with a TextIO (#8388) --- mypy/util.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/mypy/util.py b/mypy/util.py index f7c96e520f5c..c75aec7a89d4 100644 --- a/mypy/util.py +++ b/mypy/util.py @@ -542,7 +542,14 @@ def initialize_unix_colors(self) -> bool: if not CURSES_ENABLED: return False try: - curses.setupterm() + # setupterm wants a fd to potentially write an "initialization sequence". + # We override sys.stdout for the daemon API so if stdout doesn't have an fd, + # just give it /dev/null. + if hasattr(sys.stdout, 'fileno'): + curses.setupterm() + else: + with open("/dev/null", "rb") as f: + curses.setupterm(fd=f.fileno()) except curses.error: # Most likely terminfo not found. return False From 770276d70c3892f4154be85b96ec028705941184 Mon Sep 17 00:00:00 2001 From: "Michael J. Sullivan" Date: Wed, 12 Feb 2020 13:09:29 -0800 Subject: [PATCH 086/117] Actually fix the api curses crash. (#8396) I refactored after I tested, which was a mistake. --- mypy/util.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/mypy/util.py b/mypy/util.py index c75aec7a89d4..fe36297d297a 100644 --- a/mypy/util.py +++ b/mypy/util.py @@ -5,6 +5,7 @@ import subprocess import sys import hashlib +import io from typing import ( TypeVar, List, Tuple, Optional, Dict, Sequence, Iterable, Container, IO, Callable @@ -545,11 +546,13 @@ def initialize_unix_colors(self) -> bool: # setupterm wants a fd to potentially write an "initialization sequence". # We override sys.stdout for the daemon API so if stdout doesn't have an fd, # just give it /dev/null. - if hasattr(sys.stdout, 'fileno'): - curses.setupterm() - else: + try: + fd = sys.stdout.fileno() + except io.UnsupportedOperation: with open("/dev/null", "rb") as f: curses.setupterm(fd=f.fileno()) + else: + curses.setupterm(fd=fd) except curses.error: # Most likely terminfo not found. return False From a07dbd00638c35729e30dfc4ae441a8985183563 Mon Sep 17 00:00:00 2001 From: Shantanu Date: Wed, 12 Feb 2020 22:22:12 -0800 Subject: [PATCH 087/117] stubtest: add tests (#8380) * stubtest: move into mypy * stubtest: add entry point to setup.py * stubtest: use mypy.utils.check_python_version * stubtest: split up main to make it easier to test * stubtest: improvements to signature checking Fixes some false negatives and a minor false positive. Makes the logic more readable and improve comments. * stubtest: [minor] follow project style / track coverage better * stubtest: [minor] output filename more consistently * stubtest: [minor] remove no longer necessary optional * stubtest: fix module level variables missing at runtime Dumb mistake causing false negatives, mainly seems to surface a lot of platform differences * stubtest: handle compile errors * stubtest: [minor] remove black's commas * stubtest: [minor] handle a case in get_mypy_type_of_runtime_value Doesn't make a difference to typeshed * stubtest: add tests * stubtest: [minor] remove black's colon spaces To comply with project style * stubtest: [minor] catch more warnings * stubtest: replace use of find for Windows compatibility This is nicer too * teststubtest: NamedTemporaryFile doesn't work on Windows * stubtest: add annotation to help mypyc out * stubtest: [minor] make str(signature) deterministic * mypyc: exclude stubtest.py * stubtest: fix LiteralType misuse for mypyc EAFP, since bytes and enums should work, and default value error messages can be more informative with literal types * stubtest: work around a bug in early versions of py35 --- {scripts => mypy}/stubtest.py | 246 +++++++------ mypy/test/teststubtest.py | 667 ++++++++++++++++++++++++++++++++++ setup.py | 4 + tox.ini | 2 +- 4 files changed, 815 insertions(+), 104 deletions(-) rename {scripts => mypy}/stubtest.py (89%) create mode 100644 mypy/test/teststubtest.py diff --git a/scripts/stubtest.py b/mypy/stubtest.py similarity index 89% rename from scripts/stubtest.py rename to mypy/stubtest.py index 00475b78168d..8d87f6e7f5a0 100644 --- a/scripts/stubtest.py +++ b/mypy/stubtest.py @@ -9,7 +9,6 @@ import enum import importlib import inspect -import subprocess import sys import types import warnings @@ -37,7 +36,17 @@ def __repr__(self) -> str: MISSING = Missing() T = TypeVar("T") -MaybeMissing = Union[T, Missing] +if sys.version_info >= (3, 5, 3): + MaybeMissing = Union[T, Missing] +else: + # work around a bug in 3.5.2 and earlier's typing.py + class MaybeMissingMeta(type): + def __getitem__(self, arg: Any) -> Any: + return Union[arg, Missing] + + class MaybeMissing(metaclass=MaybeMissingMeta): # type: ignore + pass + _formatter = FancyFormatter(sys.stdout, sys.stderr, False) @@ -96,7 +105,7 @@ def get_description(self, concise: bool = False) -> str: return _style(self.object_desc, bold=True) + " " + self.message stub_line = None - stub_file = None + stub_file = None # type: None if not isinstance(self.stub_object, Missing): stub_line = self.stub_object.line # TODO: Find a way of getting the stub file @@ -105,7 +114,7 @@ def get_description(self, concise: bool = False) -> str: if stub_line: stub_loc_str += " at line {}".format(stub_line) if stub_file: - stub_loc_str += " in file {}".format(stub_file) + stub_loc_str += " in file {}".format(Path(stub_file)) runtime_line = None runtime_file = None @@ -123,7 +132,7 @@ def get_description(self, concise: bool = False) -> str: if runtime_line: runtime_loc_str += " at line {}".format(runtime_line) if runtime_file: - runtime_loc_str += " in file {}".format(runtime_file) + runtime_loc_str += " in file {}".format(Path(runtime_file)) output = [ _style("error: ", color="red", bold=True), @@ -157,12 +166,13 @@ def test_module(module_name: str) -> Iterator[Error]: return try: - runtime = importlib.import_module(module_name) + with warnings.catch_warnings(): + warnings.simplefilter("ignore") + runtime = importlib.import_module(module_name) except Exception as e: yield Error([module_name], "failed to import: {}".format(e), stub, MISSING) return - # collections likes to warn us about the things we're doing with warnings.catch_warnings(): warnings.simplefilter("ignore") yield from verify(stub, runtime, [module_name]) @@ -276,7 +286,7 @@ def _verify_arg_name( return def strip_prefix(s: str, prefix: str) -> str: - return s[len(prefix) :] if s.startswith(prefix) else s + return s[len(prefix):] if s.startswith(prefix) else s if strip_prefix(stub_arg.variable.name, "__") == runtime_arg.name: return @@ -356,21 +366,21 @@ def get_name(arg: Any) -> str: return arg.name if isinstance(arg, nodes.Argument): return arg.variable.name - raise ValueError + raise AssertionError def get_type(arg: Any) -> Optional[str]: if isinstance(arg, inspect.Parameter): return None if isinstance(arg, nodes.Argument): return str(arg.variable.type or arg.type_annotation) - raise ValueError + raise AssertionError def has_default(arg: Any) -> bool: if isinstance(arg, inspect.Parameter): return arg.default != inspect.Parameter.empty if isinstance(arg, nodes.Argument): return arg.kind in (nodes.ARG_OPT, nodes.ARG_NAMED_OPT) - raise ValueError + raise AssertionError def get_desc(arg: Any) -> str: arg_type = get_type(arg) @@ -380,11 +390,12 @@ def get_desc(arg: Any) -> str: + (" = ..." if has_default(arg) else "") ) + kw_only = sorted(self.kwonly.values(), key=lambda a: (has_default(a), get_name(a))) ret = "def (" ret += ", ".join( [get_desc(arg) for arg in self.pos] + (["*" + get_name(self.varpos)] if self.varpos else (["*"] if self.kwonly else [])) - + [get_desc(arg) for arg in self.kwonly.values()] + + [get_desc(arg) for arg in kw_only] + (["**" + get_name(self.varkw)] if self.varkw else []) ) ret += ")" @@ -403,11 +414,11 @@ def from_funcitem(stub: nodes.FuncItem) -> "Signature[nodes.Argument]": elif stub_arg.kind == nodes.ARG_STAR2: stub_sig.varkw = stub_arg else: - raise ValueError + raise AssertionError return stub_sig @staticmethod - def from_inspect_signature(signature: inspect.Signature,) -> "Signature[inspect.Parameter]": + def from_inspect_signature(signature: inspect.Signature) -> "Signature[inspect.Parameter]": runtime_sig = Signature() # type: Signature[inspect.Parameter] for runtime_arg in signature.parameters.values(): if runtime_arg.kind in ( @@ -422,11 +433,11 @@ def from_inspect_signature(signature: inspect.Signature,) -> "Signature[inspect. elif runtime_arg.kind == inspect.Parameter.VAR_KEYWORD: runtime_sig.varkw = runtime_arg else: - raise ValueError + raise AssertionError return runtime_sig @staticmethod - def from_overloadedfuncdef(stub: nodes.OverloadedFuncDef,) -> "Signature[nodes.Argument]": + def from_overloadedfuncdef(stub: nodes.OverloadedFuncDef) -> "Signature[nodes.Argument]": """Returns a Signature from an OverloadedFuncDef. If life were simple, to verify_overloadedfuncdef, we'd just verify_funcitem for each of its @@ -500,7 +511,7 @@ def get_kind(arg_name: str) -> int: elif arg.kind == nodes.ARG_STAR2: sig.varkw = arg else: - raise ValueError + raise AssertionError return sig @@ -532,26 +543,34 @@ def _verify_signature( "(remove leading double underscore)".format(stub_arg.variable.name) ) - # Checks involving *args - if len(stub.pos) == len(runtime.pos): - if stub.varpos is None and runtime.varpos is not None: - yield 'stub does not have *args argument "{}"'.format(runtime.varpos.name) - if stub.varpos is not None and runtime.varpos is None: - yield 'runtime does not have *args argument "{}"'.format(stub.varpos.variable.name) - elif len(stub.pos) > len(runtime.pos): + # Check unmatched positional args + if len(stub.pos) > len(runtime.pos): + # There are cases where the stub exhaustively lists out the extra parameters the function + # would take through *args. Hence, a) we can't check that the runtime actually takes those + # parameters and b) below, we don't enforce that the stub takes *args, since runtime logic + # may prevent those arguments from actually being accepted. if runtime.varpos is None: - for stub_arg in stub.pos[len(runtime.pos) :]: + for stub_arg in stub.pos[len(runtime.pos):]: # If the variable is in runtime.kwonly, it's just mislabelled as not a - # keyword-only argument; we report the error while checking keyword-only arguments + # keyword-only argument if stub_arg.variable.name not in runtime.kwonly: yield 'runtime does not have argument "{}"'.format(stub_arg.variable.name) - # We do not check whether stub takes *args when the runtime does, for cases where the stub - # just listed out the extra parameters the function takes + else: + yield 'stub argument "{}" is not keyword-only'.format(stub_arg.variable.name) + if stub.varpos is not None: + yield 'runtime does not have *args argument "{}"'.format(stub.varpos.variable.name) elif len(stub.pos) < len(runtime.pos): - if stub.varpos is None: - for runtime_arg in runtime.pos[len(stub.pos) :]: + for runtime_arg in runtime.pos[len(stub.pos):]: + if runtime_arg.name not in stub.kwonly: yield 'stub does not have argument "{}"'.format(runtime_arg.name) - elif runtime.pos is None: + else: + yield 'runtime argument "{}" is not keyword-only'.format(runtime_arg.name) + + # Checks involving *args + if len(stub.pos) <= len(runtime.pos) or runtime.varpos is None: + if stub.varpos is None and runtime.varpos is not None: + yield 'stub does not have *args argument "{}"'.format(runtime.varpos.name) + if stub.varpos is not None and runtime.varpos is None: yield 'runtime does not have *args argument "{}"'.format(stub.varpos.variable.name) # Check keyword-only args @@ -560,26 +579,31 @@ def _verify_signature( yield from _verify_arg_name(stub_arg, runtime_arg, function_name) yield from _verify_arg_default_value(stub_arg, runtime_arg) + # Check unmatched keyword-only args + if runtime.varkw is None or not set(runtime.kwonly).issubset(set(stub.kwonly)): + for arg in sorted(set(stub.kwonly) - set(runtime.kwonly)): + yield 'runtime does not have argument "{}"'.format(arg) + if stub.varkw is None or not set(stub.kwonly).issubset(set(runtime.kwonly)): + for arg in sorted(set(runtime.kwonly) - set(stub.kwonly)): + if arg in set(stub_arg.variable.name for stub_arg in stub.pos): + # Don't report this if we've reported it before + if len(stub.pos) > len(runtime.pos) and runtime.varpos is not None: + yield 'stub argument "{}" is not keyword-only'.format(arg) + else: + yield 'stub does not have argument "{}"'.format(arg) + # Checks involving **kwargs if stub.varkw is None and runtime.varkw is not None: - # We do not check whether stub takes **kwargs when the runtime does, for cases where the - # stub just listed out the extra keyword parameters the function takes + # There are cases where the stub exhaustively lists out the extra parameters the function + # would take through **kwargs, so we don't enforce that the stub takes **kwargs. # Also check against positional parameters, to avoid a nitpicky message when an argument # isn't marked as keyword-only stub_pos_names = set(stub_arg.variable.name for stub_arg in stub.pos) + # Ideally we'd do a strict subset check, but in practice the errors from that aren't useful if not set(runtime.kwonly).issubset(set(stub.kwonly) | stub_pos_names): yield 'stub does not have **kwargs argument "{}"'.format(runtime.varkw.name) if stub.varkw is not None and runtime.varkw is None: yield 'runtime does not have **kwargs argument "{}"'.format(stub.varkw.variable.name) - if runtime.varkw is None or not set(runtime.kwonly).issubset(set(stub.kwonly)): - for arg in sorted(set(stub.kwonly) - set(runtime.kwonly)): - yield 'runtime does not have argument "{}"'.format(arg) - if stub.varkw is None or not set(stub.kwonly).issubset(set(runtime.kwonly)): - for arg in sorted(set(runtime.kwonly) - set(stub.kwonly)): - if arg in set(stub_arg.variable.name for stub_arg in stub.pos): - yield 'stub argument "{}" is not keyword-only'.format(arg) - else: - yield 'stub does not have argument "{}"'.format(arg) @verify.register(nodes.FuncItem) @@ -642,7 +666,7 @@ def verify_var( ) -> Iterator[Error]: if isinstance(runtime, Missing): # Don't always yield an error here, because we often can't find instance variables - if len(object_path) <= 1: + if len(object_path) <= 2: yield Error(object_path, "is not present at runtime", stub, runtime) return @@ -857,6 +881,8 @@ def get_mypy_type_of_runtime_value(runtime: Any) -> Optional[mypy.types.Type]: if type_name not in stub.names: return None type_info = stub.names[type_name].node + if isinstance(type_info, nodes.Var): + return type_info.type if not isinstance(type_info, nodes.TypeInfo): return None @@ -865,17 +891,22 @@ def anytype() -> mypy.types.AnyType: if isinstance(runtime, tuple): # Special case tuples so we construct a valid mypy.types.TupleType - opt_items = [get_mypy_type_of_runtime_value(v) for v in runtime] - items = [(i if i is not None else anytype()) for i in opt_items] + optional_items = [get_mypy_type_of_runtime_value(v) for v in runtime] + items = [(i if i is not None else anytype()) for i in optional_items] fallback = mypy.types.Instance(type_info, [anytype()]) return mypy.types.TupleType(items, fallback) - # Technically, Literals are supposed to be only bool, int, str or bytes, but this - # seems to work fine - return mypy.types.LiteralType( - value=runtime, - fallback=mypy.types.Instance(type_info, [anytype() for _ in type_info.type_vars]), - ) + fallback = mypy.types.Instance(type_info, [anytype() for _ in type_info.type_vars]) + try: + # Literals are supposed to be only bool, int, str, bytes or enums, but this seems to work + # well (when not using mypyc, for which bytes and enums are also problematic). + return mypy.types.LiteralType( + value=runtime, + fallback=fallback, + ) + except TypeError: + # Ask for forgiveness if we're using mypyc. + return fallback _all_stubs = {} # type: Dict[str, nodes.MypyFile] @@ -913,11 +944,16 @@ def build_stubs(modules: List[str], options: Options, find_submodules: bool = Fa sources.extend(found_sources) all_modules.extend(s.module for s in found_sources if s.module not in all_modules) - res = mypy.build.build(sources=sources, options=options) + try: + res = mypy.build.build(sources=sources, options=options) + except mypy.errors.CompileError as e: + output = [_style("error: ", color="red", bold=True), "failed mypy compile.\n", str(e)] + print("".join(output)) + raise RuntimeError if res.errors: - output = [_style("error: ", color="red", bold=True), " failed mypy build.\n"] + output = [_style("error: ", color="red", bold=True), "failed mypy build.\n"] print("".join(output) + "\n".join(res.errors)) - sys.exit(1) + raise RuntimeError global _all_stubs _all_stubs = res.files @@ -949,19 +985,14 @@ def get_typeshed_stdlib_modules(custom_typeshed_dir: Optional[str]) -> List[str] for version in versions: base = typeshed_dir / "stdlib" / version if base.exists(): - output = subprocess.check_output(["find", str(base), "-type", "f"]).decode("utf-8") - paths = [Path(p) for p in output.splitlines()] - for path in paths: + for path in base.rglob("*.pyi"): if path.stem == "__init__": path = path.parent modules.append(".".join(path.relative_to(base).parts[:-1] + (path.stem,))) return sorted(modules) -def get_whitelist_entries(whitelist_file: Optional[str]) -> Iterator[str]: - if not whitelist_file: - return - +def get_whitelist_entries(whitelist_file: str) -> Iterator[str]: def strip_comments(s: str) -> str: try: return s[: s.index("#")].strip() @@ -975,47 +1006,8 @@ def strip_comments(s: str) -> str: yield entry -def main() -> int: - assert sys.version_info >= (3, 5), "This script requires at least Python 3.5" - - parser = argparse.ArgumentParser( - description="Compares stubs to objects introspected from the runtime." - ) - parser.add_argument("modules", nargs="*", help="Modules to test") - parser.add_argument("--concise", action="store_true", help="Make output concise") - parser.add_argument( - "--ignore-missing-stub", - action="store_true", - help="Ignore errors for stub missing things that are present at runtime", - ) - parser.add_argument( - "--ignore-positional-only", - action="store_true", - help="Ignore errors for whether an argument should or shouldn't be positional-only", - ) - parser.add_argument( - "--custom-typeshed-dir", metavar="DIR", help="Use the custom typeshed in DIR" - ) - parser.add_argument( - "--check-typeshed", action="store_true", help="Check all stdlib modules in typeshed" - ) - parser.add_argument( - "--whitelist", - action="append", - metavar="FILE", - default=[], - help=( - "Use file as a whitelist. Can be passed multiple times to combine multiple " - "whitelists. Whitelist can be created with --generate-whitelist" - ), - ) - parser.add_argument( - "--generate-whitelist", - action="store_true", - help="Print a whitelist (to stdout) to be used with --whitelist", - ) - args = parser.parse_args() - +def test_stubs(args: argparse.Namespace) -> int: + """This is stubtest! It's time to test the stubs!""" # Load the whitelist. This is a series of strings corresponding to Error.object_desc # Values in the dict will store whether we used the whitelist entry or not. whitelist = { @@ -1039,7 +1031,10 @@ def main() -> int: options.incremental = False options.custom_typeshed_dir = args.custom_typeshed_dir - modules = build_stubs(modules, options, find_submodules=not args.check_typeshed) + try: + modules = build_stubs(modules, options, find_submodules=not args.check_typeshed) + except RuntimeError: + return 1 exit_code = 0 for module in modules: @@ -1075,5 +1070,50 @@ def main() -> int: return exit_code +def parse_options(args: List[str]) -> argparse.Namespace: + parser = argparse.ArgumentParser( + description="Compares stubs to objects introspected from the runtime." + ) + parser.add_argument("modules", nargs="*", help="Modules to test") + parser.add_argument("--concise", action="store_true", help="Make output concise") + parser.add_argument( + "--ignore-missing-stub", + action="store_true", + help="Ignore errors for stub missing things that are present at runtime", + ) + parser.add_argument( + "--ignore-positional-only", + action="store_true", + help="Ignore errors for whether an argument should or shouldn't be positional-only", + ) + parser.add_argument( + "--custom-typeshed-dir", metavar="DIR", help="Use the custom typeshed in DIR" + ) + parser.add_argument( + "--check-typeshed", action="store_true", help="Check all stdlib modules in typeshed" + ) + parser.add_argument( + "--whitelist", + action="append", + metavar="FILE", + default=[], + help=( + "Use file as a whitelist. Can be passed multiple times to combine multiple " + "whitelists. Whitelist can be created with --generate-whitelist" + ), + ) + parser.add_argument( + "--generate-whitelist", + action="store_true", + help="Print a whitelist (to stdout) to be used with --whitelist", + ) + return parser.parse_args(args) + + +def main() -> int: + mypy.util.check_python_version("stubtest") + return test_stubs(parse_options(sys.argv[1:])) + + if __name__ == "__main__": sys.exit(main()) diff --git a/mypy/test/teststubtest.py b/mypy/test/teststubtest.py new file mode 100644 index 000000000000..d48fb12ccd10 --- /dev/null +++ b/mypy/test/teststubtest.py @@ -0,0 +1,667 @@ +import contextlib +import inspect +import io +import os +import re +import sys +import tempfile +import textwrap +import unittest +from typing import Any, Callable, Iterator, List, Optional + +import mypy.stubtest +from mypy.stubtest import parse_options, test_stubs + + +@contextlib.contextmanager +def use_tmp_dir() -> Iterator[None]: + current = os.getcwd() + with tempfile.TemporaryDirectory() as tmp: + try: + os.chdir(tmp) + yield + finally: + os.chdir(current) + + +TEST_MODULE_NAME = "test_module" + + +def run_stubtest(stub: str, runtime: str, options: List[str]) -> str: + with use_tmp_dir(): + with open("{}.pyi".format(TEST_MODULE_NAME), "w") as f: + f.write(stub) + with open("{}.py".format(TEST_MODULE_NAME), "w") as f: + f.write(runtime) + + if sys.path[0] != ".": + sys.path.insert(0, ".") + if TEST_MODULE_NAME in sys.modules: + del sys.modules[TEST_MODULE_NAME] + + output = io.StringIO() + with contextlib.redirect_stdout(output): + test_stubs(parse_options([TEST_MODULE_NAME] + options)) + + return output.getvalue() + + +class Case: + def __init__(self, stub: str, runtime: str, error: Optional[str]): + self.stub = stub + self.runtime = runtime + self.error = error + + +def collect_cases(fn: Callable[..., Iterator[Case]]) -> Callable[..., None]: + """Repeatedly invoking run_stubtest is slow, so use this decorator to combine cases. + + We could also manually combine cases, but this allows us to keep the contrasting stub and + runtime definitions next to each other. + + """ + + def test(*args: Any, **kwargs: Any) -> None: + cases = list(fn(*args, **kwargs)) + expected_errors = set( + "{}.{}".format(TEST_MODULE_NAME, c.error) for c in cases if c.error is not None + ) + output = run_stubtest( + stub="\n\n".join(textwrap.dedent(c.stub.lstrip("\n")) for c in cases), + runtime="\n\n".join(textwrap.dedent(c.runtime.lstrip("\n")) for c in cases), + options=["--generate-whitelist"], + ) + + actual_errors = set(output.splitlines()) + assert actual_errors == expected_errors, output + + return test + + +class StubtestUnit(unittest.TestCase): + @collect_cases + def test_basic_good(self) -> Iterator[Case]: + yield Case( + stub="def f(number: int, text: str) -> None: ...", + runtime="def f(number, text): pass", + error=None, + ) + yield Case( + stub=""" + class X: + def f(self, number: int, text: str) -> None: ... + """, + runtime=""" + class X: + def f(self, number, text): pass + """, + error=None, + ) + + @collect_cases + def test_types(self) -> Iterator[Case]: + yield Case( + stub="def mistyped_class() -> None: ...", + runtime="class mistyped_class: pass", + error="mistyped_class", + ) + yield Case( + stub="class mistyped_fn: ...", runtime="def mistyped_fn(): pass", error="mistyped_fn" + ) + yield Case( + stub=""" + class X: + def mistyped_var(self) -> int: ... + """, + runtime=""" + class X: + mistyped_var = 1 + """, + error="X.mistyped_var", + ) + + @collect_cases + def test_arg_name(self) -> Iterator[Case]: + yield Case( + stub="def bad(number: int, text: str) -> None: ...", + runtime="def bad(num, text) -> None: pass", + error="bad", + ) + if sys.version_info >= (3, 8): + yield Case( + stub="def good_posonly(__number: int, text: str) -> None: ...", + runtime="def good_posonly(num, /, text): pass", + error=None, + ) + yield Case( + stub="def bad_posonly(__number: int, text: str) -> None: ...", + runtime="def bad_posonly(flag, /, text): pass", + error="bad_posonly", + ) + yield Case( + stub=""" + class BadMethod: + def f(self, number: int, text: str) -> None: ... + """, + runtime=""" + class BadMethod: + def f(self, n, text): pass + """, + error="BadMethod.f", + ) + yield Case( + stub=""" + class GoodDunder: + def __exit__(self, t, v, tb) -> None: ... + """, + runtime=""" + class GoodDunder: + def __exit__(self, exc_type, exc_val, exc_tb): pass + """, + error=None, + ) + + @collect_cases + def test_arg_kind(self) -> Iterator[Case]: + yield Case( + stub="def runtime_kwonly(number: int, text: str) -> None: ...", + runtime="def runtime_kwonly(number, *, text): pass", + error="runtime_kwonly", + ) + yield Case( + stub="def stub_kwonly(number: int, *, text: str) -> None: ...", + runtime="def stub_kwonly(number, text): pass", + error="stub_kwonly", + ) + yield Case( + stub="def stub_posonly(__number: int, text: str) -> None: ...", + runtime="def stub_posonly(number, text): pass", + error="stub_posonly", + ) + if sys.version_info >= (3, 8): + yield Case( + stub="def good_posonly(__number: int, text: str) -> None: ...", + runtime="def good_posonly(number, /, text): pass", + error=None, + ) + yield Case( + stub="def runtime_posonly(number: int, text: str) -> None: ...", + runtime="def runtime_posonly(number, /, text): pass", + error="runtime_posonly", + ) + + @collect_cases + def test_default_value(self) -> Iterator[Case]: + yield Case( + stub="def f1(text: str = ...) -> None: ...", + runtime="def f1(text = 'asdf'): pass", + error=None, + ) + yield Case( + stub="def f2(text: str = ...) -> None: ...", runtime="def f2(text): pass", error="f2" + ) + yield Case( + stub="def f3(text: str) -> None: ...", + runtime="def f3(text = 'asdf'): pass", + error="f3", + ) + yield Case( + stub="def f4(text: str = ...) -> None: ...", + runtime="def f4(text = None): pass", + error="f4", + ) + yield Case( + stub="def f5(data: bytes = ...) -> None: ...", + runtime="def f5(data = 'asdf'): pass", + error="f5", + ) + yield Case( + stub=""" + from typing import TypeVar + T = TypeVar("T", bound=str) + def f6(text: T = ...) -> None: ... + """, + runtime="def f6(text = None): pass", + error="f6", + ) + + @collect_cases + def test_static_class_method(self) -> Iterator[Case]: + yield Case( + stub=""" + class Good: + @classmethod + def f(cls, number: int, text: str) -> None: ... + """, + runtime=""" + class Good: + @classmethod + def f(cls, number, text): pass + """, + error=None, + ) + yield Case( + stub=""" + class Bad1: + def f(cls, number: int, text: str) -> None: ... + """, + runtime=""" + class Bad1: + @classmethod + def f(cls, number, text): pass + """, + error="Bad1.f", + ) + yield Case( + stub=""" + class Bad2: + @classmethod + def f(cls, number: int, text: str) -> None: ... + """, + runtime=""" + class Bad2: + @staticmethod + def f(self, number, text): pass + """, + error="Bad2.f", + ) + yield Case( + stub=""" + class Bad3: + @staticmethod + def f(cls, number: int, text: str) -> None: ... + """, + runtime=""" + class Bad3: + @classmethod + def f(self, number, text): pass + """, + error="Bad3.f", + ) + yield Case( + stub=""" + class GoodNew: + def __new__(cls, *args, **kwargs): ... + """, + runtime=""" + class GoodNew: + def __new__(cls, *args, **kwargs): pass + """, + error=None, + ) + + @collect_cases + def test_arg_mismatch(self) -> Iterator[Case]: + yield Case( + stub="def f1(a, *, b, c) -> None: ...", runtime="def f1(a, *, b, c): pass", error=None + ) + yield Case( + stub="def f2(a, *, b) -> None: ...", runtime="def f2(a, *, b, c): pass", error="f2" + ) + yield Case( + stub="def f3(a, *, b, c) -> None: ...", runtime="def f3(a, *, b): pass", error="f3" + ) + yield Case( + stub="def f4(a, *, b, c) -> None: ...", runtime="def f4(a, b, *, c): pass", error="f4" + ) + yield Case( + stub="def f5(a, b, *, c) -> None: ...", runtime="def f5(a, *, b, c): pass", error="f5" + ) + + @collect_cases + def test_varargs_varkwargs(self) -> Iterator[Case]: + yield Case( + stub="def f1(*args, **kwargs) -> None: ...", + runtime="def f1(*args, **kwargs): pass", + error=None, + ) + yield Case( + stub="def f2(*args, **kwargs) -> None: ...", + runtime="def f2(**kwargs): pass", + error="f2", + ) + yield Case( + stub="def g1(a, b, c, d) -> None: ...", runtime="def g1(a, *args): pass", error=None + ) + yield Case( + stub="def g2(a, b, c, d, *args) -> None: ...", runtime="def g2(a): pass", error="g2" + ) + yield Case( + stub="def g3(a, b, c, d, *args) -> None: ...", + runtime="def g3(a, *args): pass", + error=None, + ) + yield Case( + stub="def h1(a) -> None: ...", runtime="def h1(a, b, c, d, *args): pass", error="h1" + ) + yield Case( + stub="def h2(a, *args) -> None: ...", runtime="def h2(a, b, c, d): pass", error="h2" + ) + yield Case( + stub="def h3(a, *args) -> None: ...", + runtime="def h3(a, b, c, d, *args): pass", + error="h3", + ) + yield Case( + stub="def j1(a: int, *args) -> None: ...", runtime="def j1(a): pass", error="j1" + ) + yield Case( + stub="def j2(a: int) -> None: ...", runtime="def j2(a, *args): pass", error="j2" + ) + yield Case( + stub="def j3(a, b, c) -> None: ...", runtime="def j3(a, *args, c): pass", error="j3" + ) + yield Case(stub="def k1(a, **kwargs) -> None: ...", runtime="def k1(a): pass", error="k1") + yield Case( + # In theory an error, but led to worse results in practice + stub="def k2(a) -> None: ...", + runtime="def k2(a, **kwargs): pass", + error=None, + ) + yield Case( + stub="def k3(a, b) -> None: ...", runtime="def k3(a, **kwargs): pass", error="k3" + ) + yield Case( + stub="def k4(a, *, b) -> None: ...", runtime="def k4(a, **kwargs): pass", error=None + ) + yield Case( + stub="def k5(a, *, b) -> None: ...", + runtime="def k5(a, *, b, c, **kwargs): pass", + error="k5", + ) + + @collect_cases + def test_overload(self) -> Iterator[Case]: + yield Case( + stub=""" + from typing import overload + + @overload + def f1(a: int, *, c: int = ...) -> int: ... + @overload + def f1(a: int, b: int, c: int = ...) -> str: ... + """, + runtime="def f1(a, b = 0, c = 0): pass", + error=None, + ) + yield Case( + stub=""" + @overload + def f2(a: int, *, c: int = ...) -> int: ... + @overload + def f2(a: int, b: int, c: int = ...) -> str: ... + """, + runtime="def f2(a, b, c = 0): pass", + error="f2", + ) + yield Case( + stub=""" + @overload + def f3(a: int) -> int: ... + @overload + def f3(a: int, b: str) -> str: ... + """, + runtime="def f3(a, b = None): pass", + error="f3", + ) + yield Case( + stub=""" + @overload + def f4(a: int, *args, b: int, **kwargs) -> int: ... + @overload + def f4(a: str, *args, b: int, **kwargs) -> str: ... + """, + runtime="def f4(a, *args, b, **kwargs): pass", + error=None, + ) + if sys.version_info >= (3, 8): + yield Case( + stub=""" + @overload + def f5(__a: int) -> int: ... + @overload + def f5(__b: str) -> str: ... + """, + runtime="def f5(x, /): pass", + error=None, + ) + + @collect_cases + def test_property(self) -> Iterator[Case]: + yield Case( + stub=""" + class Good: + @property + def f(self) -> int: ... + """, + runtime=""" + class Good: + @property + def f(self) -> int: return 1 + """, + error=None, + ) + yield Case( + stub=""" + class Bad: + @property + def f(self) -> int: ... + """, + runtime=""" + class Bad: + def f(self) -> int: return 1 + """, + error="Bad.f", + ) + yield Case( + stub=""" + class GoodReadOnly: + @property + def f(self) -> int: ... + """, + runtime=""" + class GoodReadOnly: + f = 1 + """, + error=None, + ) + yield Case( + stub=""" + class BadReadOnly: + @property + def f(self) -> str: ... + """, + runtime=""" + class BadReadOnly: + f = 1 + """, + error="BadReadOnly.f", + ) + + @collect_cases + def test_var(self) -> Iterator[Case]: + yield Case(stub="x1: int", runtime="x1 = 5", error=None) + yield Case(stub="x2: str", runtime="x2 = 5", error="x2") + yield Case("from typing import Tuple", "", None) # dummy case + yield Case( + stub=""" + x3: Tuple[int, int] + """, + runtime="x3 = (1, 3)", + error=None, + ) + yield Case( + stub=""" + x4: Tuple[int, int] + """, + runtime="x4 = (1, 3, 5)", + error="x4", + ) + yield Case( + stub=""" + class X: + f: int + """, + runtime=""" + class X: + def __init__(self): + self.f = "asdf" + """, + error=None, + ) + + @collect_cases + def test_enum(self) -> Iterator[Case]: + yield Case( + stub=""" + import enum + class X(enum.Enum): + a: int + b: str + c: str + """, + runtime=""" + import enum + class X(enum.Enum): + a = 1 + b = "asdf" + c = 2 + """, + error="X.c", + ) + + @collect_cases + def test_decorator(self) -> Iterator[Case]: + yield Case( + stub=""" + from typing import Any, Callable + def decorator(f: Callable[[], int]) -> Callable[..., Any]: ... + @decorator + def f() -> Any: ... + """, + runtime=""" + def decorator(f): return f + @decorator + def f(): return 3 + """, + error=None, + ) + + @collect_cases + def test_missing(self) -> Iterator[Case]: + yield Case(stub="x = 5", runtime="", error="x") + yield Case(stub="def f(): ...", runtime="", error="f") + yield Case(stub="class X: ...", runtime="", error="X") + yield Case( + stub=""" + from typing import overload + @overload + def h(x: int): ... + @overload + def h(x: str): ... + """, + runtime="", + error="h", + ) + yield Case("", "__all__ = []", None) # dummy case + yield Case(stub="", runtime="__all__ += ['y']\ny = 5", error="y") + yield Case(stub="", runtime="__all__ += ['g']\ndef g(): pass", error="g") + + +def remove_color_code(s: str) -> str: + return re.sub("\\x1b.*?m", "", s) # this works! + + +class StubtestMiscUnit(unittest.TestCase): + def test_output(self) -> None: + output = run_stubtest( + stub="def bad(number: int, text: str) -> None: ...", + runtime="def bad(num, text): pass", + options=[], + ) + expected = ( + 'error: {0}.bad is inconsistent, stub argument "number" differs from runtime ' + 'argument "num"\nStub: at line 1\ndef (number: builtins.int, text: builtins.str)\n' + "Runtime: at line 1 in file {0}.py\ndef (num, text)\n\n".format(TEST_MODULE_NAME) + ) + assert remove_color_code(output) == expected + + output = run_stubtest( + stub="def bad(number: int, text: str) -> None: ...", + runtime="def bad(num, text): pass", + options=["--concise"], + ) + expected = ( + "{}.bad is inconsistent, " + 'stub argument "number" differs from runtime argument "num"\n'.format(TEST_MODULE_NAME) + ) + assert remove_color_code(output) == expected + + def test_ignore_flags(self) -> None: + output = run_stubtest( + stub="", runtime="__all__ = ['f']\ndef f(): pass", options=["--ignore-missing-stub"] + ) + assert not output + + output = run_stubtest( + stub="def f(__a): ...", runtime="def f(a): pass", options=["--ignore-positional-only"] + ) + assert not output + + def test_whitelist(self) -> None: + # Can't use this as a context because Windows + whitelist = tempfile.NamedTemporaryFile(mode="w", delete=False) + try: + with whitelist: + whitelist.write("{}.bad\n# a comment".format(TEST_MODULE_NAME)) + + output = run_stubtest( + stub="def bad(number: int, text: str) -> None: ...", + runtime="def bad(num, text) -> None: pass", + options=["--whitelist", whitelist.name], + ) + assert not output + + output = run_stubtest(stub="", runtime="", options=["--whitelist", whitelist.name]) + assert output == "note: unused whitelist entry {}.bad\n".format(TEST_MODULE_NAME) + finally: + os.unlink(whitelist.name) + + def test_mypy_build(self) -> None: + output = run_stubtest(stub="+", runtime="", options=[]) + assert remove_color_code(output) == ( + "error: failed mypy compile.\n{}.pyi:1: " + "error: invalid syntax\n".format(TEST_MODULE_NAME) + ) + + output = run_stubtest(stub="def f(): ...\ndef f(): ...", runtime="", options=[]) + assert remove_color_code(output) == ( + "error: failed mypy build.\n{}.pyi:2: " + "error: Name 'f' already defined on line 1\n".format(TEST_MODULE_NAME) + ) + + def test_missing_stubs(self) -> None: + output = io.StringIO() + with contextlib.redirect_stdout(output): + test_stubs(parse_options(["not_a_module"])) + assert "error: not_a_module failed to find stubs" in remove_color_code(output.getvalue()) + + def test_get_typeshed_stdlib_modules(self) -> None: + stdlib = mypy.stubtest.get_typeshed_stdlib_modules(None) + assert "builtins" in stdlib + assert "os" in stdlib + + def test_signature(self) -> None: + def f(a: int, b: int, *, c: int, d: int = 0, **kwargs: Any) -> None: + pass + + assert ( + str(mypy.stubtest.Signature.from_inspect_signature(inspect.signature(f))) + == "def (a, b, *, c, d = ..., **kwargs)" + ) + + +class StubtestIntegration(unittest.TestCase): + def test_typeshed(self) -> None: + # check we don't crash while checking typeshed + test_stubs(parse_options(["--check-typeshed"])) diff --git a/setup.py b/setup.py index 1a66f51c5bed..a393c4035205 100644 --- a/setup.py +++ b/setup.py @@ -100,6 +100,9 @@ def run(self): # We don't populate __file__ properly at the top level or something? # Also I think there would be problems with how we generate version.py. 'version.py', + + # Written by someone who doesn't know how to deal with mypyc + 'stubtest.py', )) + ( # Don't want to grab this accidentally os.path.join('mypyc', 'lib-rt', 'setup.py'), @@ -182,6 +185,7 @@ def run(self): scripts=['scripts/mypyc'], entry_points={'console_scripts': ['mypy=mypy.__main__:console_entry', 'stubgen=mypy.stubgen:main', + 'stubtest=mypy.stubtest:main', 'dmypy=mypy.dmypy.client:console_entry', ]}, classifiers=classifiers, diff --git a/tox.ini b/tox.ini index 18cf56f9c3a8..ac7cdc72fdb7 100644 --- a/tox.ini +++ b/tox.ini @@ -51,7 +51,7 @@ description = type check ourselves basepython = python3.7 commands = python -m mypy --config-file mypy_self_check.ini -p mypy -p mypyc - python -m mypy --config-file mypy_self_check.ini misc/proper_plugin.py scripts/stubtest.py scripts/mypyc + python -m mypy --config-file mypy_self_check.ini misc/proper_plugin.py scripts/mypyc [testenv:docs] description = invoke sphinx-build to build the HTML docs From 75dcfc3cc6ad244eb0f6c58e0983b5636ad6ed54 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Fri, 14 Feb 2020 14:52:47 +0000 Subject: [PATCH 088/117] Fix join between subclass of 'unicode' and 'str' (#8402) There was one case where type promotion wasn't considered. Fixes #8394. --- mypy/join.py | 8 ++++++-- test-data/unit/check-inference.test | 7 +++++++ 2 files changed, 13 insertions(+), 2 deletions(-) diff --git a/mypy/join.py b/mypy/join.py index c22574884b61..1da70fcf0c3c 100644 --- a/mypy/join.py +++ b/mypy/join.py @@ -353,8 +353,7 @@ def default(self, typ: Type) -> ProperType: def join_instances(t: Instance, s: Instance) -> ProperType: - """Calculate the join of two instance types. - """ + """Calculate the join of two instance types.""" if t.type == s.type: # Simplest case: join two types with the same base type (but # potentially different arguments). @@ -395,6 +394,11 @@ def join_instances_via_supertype(t: Instance, s: Instance) -> ProperType: if best is None or is_better(res, best): best = res assert best is not None + promote = get_proper_type(t.type._promote) + if isinstance(promote, Instance): + res = join_instances(promote, s) + if is_better(res, best): + best = res return best diff --git a/test-data/unit/check-inference.test b/test-data/unit/check-inference.test index e762de9be3e6..a825743f4484 100644 --- a/test-data/unit/check-inference.test +++ b/test-data/unit/check-inference.test @@ -3137,3 +3137,10 @@ y = defaultdict(list) # E: Need type annotation for 'y' y['a'] = [] reveal_type(y) # N: Revealed type is 'collections.defaultdict[Any, Any]' [builtins fixtures/dict.pyi] + +[case testJoinOfStrAndUnicodeSubclass_python2] +class S(unicode): pass +reveal_type(S() if bool() else '') # N: Revealed type is 'builtins.unicode' +reveal_type('' if bool() else S()) # N: Revealed type is 'builtins.unicode' +reveal_type(S() if bool() else str()) # N: Revealed type is 'builtins.unicode' +reveal_type(str() if bool() else S()) # N: Revealed type is 'builtins.unicode' From 35bdb07d656ffbf66223c3b7d5f44c2ff84c8c39 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Fri, 14 Feb 2020 14:53:03 +0000 Subject: [PATCH 089/117] Don't run stubgen tests by default in runtests.py (#8403) The tests are slow (around 56s sequentially on my laptop) and not very likely to break changes not touching stubtest, so it seems reasonable to make them opt-in outside of CI. --- runtests.py | 42 +++++++++++++++++++++++++++--------------- 1 file changed, 27 insertions(+), 15 deletions(-) diff --git a/runtests.py b/runtests.py index c4fe1fce8981..80280282a8f3 100755 --- a/runtests.py +++ b/runtests.py @@ -28,27 +28,38 @@ MYPYC_EXTERNAL = 'TestExternal' MYPYC_COMMAND_LINE = 'TestCommandLine' ERROR_STREAM = 'ErrorStreamSuite' - - -ALL_NON_FAST = [CMDLINE, - SAMPLES, - TYPESHED, - PEP561, - EVALUATION, - DAEMON, - STUBGEN_CMD, - STUBGEN_PY, - MYPYC_RUN, - MYPYC_RUN_MULTI, - MYPYC_EXTERNAL, - MYPYC_COMMAND_LINE, - ERROR_STREAM] +STUBTEST = 'StubtestUnit' +STUBTEST_MISC = 'StubtestMiscUnit' +STUBTEST_INTEGRATION = 'StubtestIntegration' + + +ALL_NON_FAST = [ + CMDLINE, + SAMPLES, + TYPESHED, + PEP561, + EVALUATION, + DAEMON, + STUBGEN_CMD, + STUBGEN_PY, + MYPYC_RUN, + MYPYC_RUN_MULTI, + MYPYC_EXTERNAL, + MYPYC_COMMAND_LINE, + ERROR_STREAM, + STUBTEST, + STUBTEST_MISC, + STUBTEST_INTEGRATION, +] # These must be enabled by explicitly including 'mypyc-extra' on the command line. MYPYC_OPT_IN = [MYPYC_RUN, MYPYC_RUN_MULTI] +# These must be enabled by explicitly including 'stubtest' on the command line. +STUBTEST_OPT_IN = [STUBTEST, STUBTEST_MISC, STUBTEST_INTEGRATION] + # We split the pytest run into three parts to improve test # parallelization. Each run should have tests that each take a roughly similar # time to run. @@ -76,6 +87,7 @@ # Mypyc tests that aren't run by default, since they are slow and rarely # fail for commits that don't touch mypyc 'mypyc-extra': 'pytest -k "%s"' % ' or '.join(MYPYC_OPT_IN), + 'stubtest': 'pytest -k "%s"' % ' or '.join(STUBTEST_OPT_IN), } # Stop run immediately if these commands fail From b921a83e0a460899c82ba84d84a6a262841addb1 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Fri, 14 Feb 2020 15:39:03 +0000 Subject: [PATCH 090/117] Refactor: move is_typeshed_file() to mypy.util (#8404) This way we can call it in contexts that don't have access to an `Errors` instance. --- mypy/checker.py | 3 ++- mypy/errors.py | 8 ++------ mypy/semanal.py | 4 ++-- mypy/semanal_main.py | 5 +++-- mypy/util.py | 6 ++++++ 5 files changed, 15 insertions(+), 11 deletions(-) diff --git a/mypy/checker.py b/mypy/checker.py index 71cf906a7d27..db5f0fb126dc 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -78,6 +78,7 @@ from mypy import state, errorcodes as codes from mypy.traverser import has_return_statement, all_return_statements from mypy.errorcodes import ErrorCode +from mypy.util import is_typeshed_file T = TypeVar('T') @@ -233,7 +234,7 @@ def __init__(self, errors: Errors, modules: Dict[str, MypyFile], options: Option self.pass_num = 0 self.current_node_deferred = False self.is_stub = tree.is_stub - self.is_typeshed_stub = errors.is_typeshed_file(path) + self.is_typeshed_stub = is_typeshed_file(path) self.inferred_attribute_types = None if options.strict_optional_whitelist is None: self.suppress_none_errors = not options.show_none_errors diff --git a/mypy/errors.py b/mypy/errors.py index 89d9baec93f2..06651b764d62 100644 --- a/mypy/errors.py +++ b/mypy/errors.py @@ -11,7 +11,7 @@ from mypy.version import __version__ as mypy_version from mypy.errorcodes import ErrorCode from mypy import errorcodes as codes -from mypy.util import DEFAULT_SOURCE_OFFSET +from mypy.util import DEFAULT_SOURCE_OFFSET, is_typeshed_file T = TypeVar('T') allowed_duplicates = ['@overload', 'Got:', 'Expected:'] # type: Final @@ -372,7 +372,7 @@ def clear_errors_in_targets(self, path: str, targets: Set[str]) -> None: def generate_unused_ignore_errors(self, file: str) -> None: ignored_lines = self.ignored_lines[file] - if not self.is_typeshed_file(file) and file not in self.ignored_files: + if not is_typeshed_file(file) and file not in self.ignored_files: for line in set(ignored_lines) - self.used_ignored_lines[file]: # Don't use report since add_error_info will ignore the error! info = ErrorInfo(self.import_context(), file, self.current_module(), None, @@ -380,10 +380,6 @@ def generate_unused_ignore_errors(self, file: str) -> None: None, False, False) self._add_error_info(file, info) - def is_typeshed_file(self, file: str) -> bool: - # gross, but no other clear way to tell - return 'typeshed' in os.path.normpath(file).split(os.sep) - def num_messages(self) -> int: """Return the number of generated messages.""" return sum(len(x) for x in self.error_info_map.values()) diff --git a/mypy/semanal.py b/mypy/semanal.py index 72ea96173be8..be455a737202 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -103,7 +103,7 @@ Plugin, ClassDefContext, SemanticAnalyzerPluginInterface, DynamicClassDefContext ) -from mypy.util import correct_relative_import, unmangle, module_prefix +from mypy.util import correct_relative_import, unmangle, module_prefix, is_typeshed_file from mypy.scope import Scope from mypy.semanal_shared import ( SemanticAnalyzerInterface, set_callable_name, calculate_tuple_fallback, PRIORITY_FALLBACKS @@ -481,7 +481,7 @@ def file_context(self, self.cur_mod_id = file_node.fullname scope.enter_file(self.cur_mod_id) self.is_stub_file = file_node.path.lower().endswith('.pyi') - self._is_typeshed_stub_file = self.errors.is_typeshed_file(file_node.path) + self._is_typeshed_stub_file = is_typeshed_file(file_node.path) self.globals = file_node.names self.tvar_scope = TypeVarScope() diff --git a/mypy/semanal_main.py b/mypy/semanal_main.py index cac2a83214b8..c3f4dd809127 100644 --- a/mypy/semanal_main.py +++ b/mypy/semanal_main.py @@ -44,6 +44,7 @@ from mypy.semanal_infer import infer_decorator_signature_if_simple from mypy.checker import FineGrainedDeferredNode from mypy.server.aststrip import SavedAttributes +from mypy.util import is_typeshed_file import mypy.build if TYPE_CHECKING: @@ -353,7 +354,7 @@ def check_type_arguments(graph: 'Graph', scc: List[str], errors: Errors) -> None assert state.tree analyzer = TypeArgumentAnalyzer(errors, state.options, - errors.is_typeshed_file(state.path or '')) + is_typeshed_file(state.path or '')) with state.wrap_context(): with strict_optional_set(state.options.strict_optional): state.tree.accept(analyzer) @@ -368,7 +369,7 @@ def check_type_arguments_in_targets(targets: List[FineGrainedDeferredNode], stat """ analyzer = TypeArgumentAnalyzer(errors, state.options, - errors.is_typeshed_file(state.path or '')) + is_typeshed_file(state.path or '')) with state.wrap_context(): with strict_optional_set(state.options.strict_optional): for target in targets: diff --git a/mypy/util.py b/mypy/util.py index fe36297d297a..f8d9368804ba 100644 --- a/mypy/util.py +++ b/mypy/util.py @@ -1,4 +1,5 @@ """Utility functions with no non-trivial dependencies.""" + import os import pathlib import re @@ -700,3 +701,8 @@ def format_error(self, n_errors: int, n_files: int, n_sources: int, if not use_color: return msg return self.style(msg, 'red', bold=True) + + +def is_typeshed_file(file: str) -> bool: + # gross, but no other clear way to tell + return 'typeshed' in os.path.normpath(file).split(os.sep) From 83012da090b1b1b8f082663778acb44b2cd7ecb0 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Sat, 15 Feb 2020 19:34:10 +0000 Subject: [PATCH 091/117] [mypyc] Refactor: extract non-local control classes from mypyc.genops (#8406) This is the first PR in my quest to tidy up mypyc.genops. My goal is to eventually break it up into files no longer than 1500 lines each, and with a clean dependency structure. In the initial stages there will be many cyclic dependencies, but I plan to fix those later on. --- mypyc/genops.py | 164 ++----------------------------------- mypyc/nonlocalcontrol.py | 170 +++++++++++++++++++++++++++++++++++++++ 2 files changed, 178 insertions(+), 156 deletions(-) create mode 100644 mypyc/nonlocalcontrol.py diff --git a/mypyc/genops.py b/mypyc/genops.py index da1423705131..78f67a4b5bb3 100644 --- a/mypyc/genops.py +++ b/mypyc/genops.py @@ -12,13 +12,15 @@ def f(x: int) -> int: r2 = x * r0 :: int r3 = r2 + r1 :: int return r3 + +The IR is implemented in mypyc.ops. """ + from typing import ( TypeVar, Callable, Dict, List, Tuple, Optional, Union, Sequence, Set, Any, Iterable, cast ) from typing_extensions import overload, NoReturn from collections import OrderedDict -from abc import abstractmethod import importlib.util import itertools @@ -93,7 +95,7 @@ def f(x: int) -> int: from mypyc.ops_exc import ( raise_exception_op, raise_exception_with_tb_op, reraise_exception_op, error_catch_op, restore_exc_info_op, exc_matches_op, get_exc_value_op, - get_exc_info_op, keep_propagating_op, set_stop_iteration_value, + get_exc_info_op, keep_propagating_op ) from mypyc.genops_for import ForGenerator, ForRange, ForList, ForIterable, ForEnumerate, ForZip from mypyc.rt_subtype import is_runtime_subtype @@ -102,6 +104,10 @@ def f(x: int) -> int: from mypyc.crash import catch_errors from mypyc.options import CompilerOptions from mypyc.errors import Errors +from mypyc.nonlocalcontrol import ( + NonlocalControl, BaseNonlocalControl, LoopNonlocalControl, ExceptNonlocalControl, + FinallyNonlocalControl, TryFinallyNonlocalControl, GeneratorNonlocalControl +) GenFunc = Callable[[], None] DictEntry = Tuple[Optional[Value], Value] @@ -935,160 +941,6 @@ def wrapper(f: Specializer) -> Specializer: return wrapper -class NonlocalControl: - """Represents a stack frame of constructs that modify nonlocal control flow. - - The nonlocal control flow constructs are break, continue, and - return, and their behavior is modified by a number of other - constructs. The most obvious is loop, which override where break - and continue jump to, but also `except` (which needs to clear - exc_info when left) and (eventually) finally blocks (which need to - ensure that the finally block is always executed when leaving the - try/except blocks). - """ - @abstractmethod - def gen_break(self, builder: 'IRBuilder', line: int) -> None: pass - - @abstractmethod - def gen_continue(self, builder: 'IRBuilder', line: int) -> None: pass - - @abstractmethod - def gen_return(self, builder: 'IRBuilder', value: Value, line: int) -> None: pass - - -class BaseNonlocalControl(NonlocalControl): - def gen_break(self, builder: 'IRBuilder', line: int) -> None: - assert False, "break outside of loop" - - def gen_continue(self, builder: 'IRBuilder', line: int) -> None: - assert False, "continue outside of loop" - - def gen_return(self, builder: 'IRBuilder', value: Value, line: int) -> None: - builder.add(Return(value)) - - -class LoopNonlocalControl(NonlocalControl): - def __init__(self, outer: NonlocalControl, - continue_block: BasicBlock, break_block: BasicBlock) -> None: - self.outer = outer - self.continue_block = continue_block - self.break_block = break_block - - def gen_break(self, builder: 'IRBuilder', line: int) -> None: - builder.add(Goto(self.break_block)) - - def gen_continue(self, builder: 'IRBuilder', line: int) -> None: - builder.add(Goto(self.continue_block)) - - def gen_return(self, builder: 'IRBuilder', value: Value, line: int) -> None: - self.outer.gen_return(builder, value, line) - - -class GeneratorNonlocalControl(BaseNonlocalControl): - def gen_return(self, builder: 'IRBuilder', value: Value, line: int) -> None: - # Assign an invalid next label number so that the next time __next__ is called, we jump to - # the case in which StopIteration is raised. - builder.assign(builder.fn_info.generator_class.next_label_target, - builder.add(LoadInt(-1)), - line) - # Raise a StopIteration containing a field for the value that should be returned. Before - # doing so, create a new block without an error handler set so that the implicitly thrown - # StopIteration isn't caught by except blocks inside of the generator function. - builder.error_handlers.append(None) - builder.goto_new_block() - # Skip creating a traceback frame when we raise here, because - # we don't care about the traceback frame and it is kind of - # expensive since raising StopIteration is an extremely common case. - # Also we call a special internal function to set StopIteration instead of - # using RaiseStandardError because the obvious thing doesn't work if the - # value is a tuple (???). - builder.primitive_op(set_stop_iteration_value, [value], NO_TRACEBACK_LINE_NO) - builder.add(Unreachable()) - builder.error_handlers.pop() - - -class CleanupNonlocalControl(NonlocalControl): - """Abstract nonlocal control that runs some cleanup code. """ - def __init__(self, outer: NonlocalControl) -> None: - self.outer = outer - - @abstractmethod - def gen_cleanup(self, builder: 'IRBuilder', line: int) -> None: ... - - def gen_break(self, builder: 'IRBuilder', line: int) -> None: - self.gen_cleanup(builder, line) - self.outer.gen_break(builder, line) - - def gen_continue(self, builder: 'IRBuilder', line: int) -> None: - self.gen_cleanup(builder, line) - self.outer.gen_continue(builder, line) - - def gen_return(self, builder: 'IRBuilder', value: Value, line: int) -> None: - self.gen_cleanup(builder, line) - self.outer.gen_return(builder, value, line) - - -class TryFinallyNonlocalControl(NonlocalControl): - def __init__(self, target: BasicBlock) -> None: - self.target = target - self.ret_reg = None # type: Optional[Register] - - def gen_break(self, builder: 'IRBuilder', line: int) -> None: - builder.error("break inside try/finally block is unimplemented", line) - - def gen_continue(self, builder: 'IRBuilder', line: int) -> None: - builder.error("continue inside try/finally block is unimplemented", line) - - def gen_return(self, builder: 'IRBuilder', value: Value, line: int) -> None: - if self.ret_reg is None: - self.ret_reg = builder.alloc_temp(builder.ret_types[-1]) - - builder.add(Assign(self.ret_reg, value)) - builder.add(Goto(self.target)) - - -class ExceptNonlocalControl(CleanupNonlocalControl): - """Nonlocal control for except blocks. - - Just makes sure that sys.exc_info always gets restored when we leave. - This is super annoying. - """ - def __init__(self, outer: NonlocalControl, saved: Union[Value, AssignmentTarget]) -> None: - super().__init__(outer) - self.saved = saved - - def gen_cleanup(self, builder: 'IRBuilder', line: int) -> None: - builder.primitive_op(restore_exc_info_op, [builder.read(self.saved)], line) - - -class FinallyNonlocalControl(CleanupNonlocalControl): - """Nonlocal control for finally blocks. - - Just makes sure that sys.exc_info always gets restored when we - leave and the return register is decrefed if it isn't null. - """ - def __init__(self, outer: NonlocalControl, ret_reg: Optional[Value], saved: Value) -> None: - super().__init__(outer) - self.ret_reg = ret_reg - self.saved = saved - - def gen_cleanup(self, builder: 'IRBuilder', line: int) -> None: - # Do an error branch on the return value register, which - # may be undefined. This will allow it to be properly - # decrefed if it is not null. This is kind of a hack. - if self.ret_reg: - target = BasicBlock() - builder.add(Branch(self.ret_reg, target, target, Branch.IS_ERROR)) - builder.activate_block(target) - - # Restore the old exc_info - target, cleanup = BasicBlock(), BasicBlock() - builder.add(Branch(self.saved, target, cleanup, Branch.IS_ERROR)) - builder.activate_block(cleanup) - builder.primitive_op(restore_exc_info_op, [self.saved], line) - builder.goto_and_activate(target) - - class IRBuilder(ExpressionVisitor[Value], StatementVisitor[None]): def __init__(self, current_module: str, diff --git a/mypyc/nonlocalcontrol.py b/mypyc/nonlocalcontrol.py new file mode 100644 index 000000000000..275f84bc0fd9 --- /dev/null +++ b/mypyc/nonlocalcontrol.py @@ -0,0 +1,170 @@ +from abc import abstractmethod +from typing import Optional, Union + +from mypyc.ops import ( + Branch, BasicBlock, Unreachable, Value, Goto, LoadInt, Assign, Register, Return, + AssignmentTarget, NO_TRACEBACK_LINE_NO +) +from mypyc.ops_exc import set_stop_iteration_value, restore_exc_info_op + +MYPY = False +if MYPY: + from mypyc.genops import IRBuilder + + +class NonlocalControl: + """Represents a stack frame of constructs that modify nonlocal control flow. + + The nonlocal control flow constructs are break, continue, and + return, and their behavior is modified by a number of other + constructs. The most obvious is loop, which override where break + and continue jump to, but also `except` (which needs to clear + exc_info when left) and (eventually) finally blocks (which need to + ensure that the finally block is always executed when leaving the + try/except blocks). + """ + + @abstractmethod + def gen_break(self, builder: 'IRBuilder', line: int) -> None: pass + + @abstractmethod + def gen_continue(self, builder: 'IRBuilder', line: int) -> None: pass + + @abstractmethod + def gen_return(self, builder: 'IRBuilder', value: Value, line: int) -> None: pass + + +class BaseNonlocalControl(NonlocalControl): + def gen_break(self, builder: 'IRBuilder', line: int) -> None: + assert False, "break outside of loop" + + def gen_continue(self, builder: 'IRBuilder', line: int) -> None: + assert False, "continue outside of loop" + + def gen_return(self, builder: 'IRBuilder', value: Value, line: int) -> None: + builder.add(Return(value)) + + +class LoopNonlocalControl(NonlocalControl): + def __init__(self, outer: NonlocalControl, + continue_block: BasicBlock, break_block: BasicBlock) -> None: + self.outer = outer + self.continue_block = continue_block + self.break_block = break_block + + def gen_break(self, builder: 'IRBuilder', line: int) -> None: + builder.add(Goto(self.break_block)) + + def gen_continue(self, builder: 'IRBuilder', line: int) -> None: + builder.add(Goto(self.continue_block)) + + def gen_return(self, builder: 'IRBuilder', value: Value, line: int) -> None: + self.outer.gen_return(builder, value, line) + + +class GeneratorNonlocalControl(BaseNonlocalControl): + def gen_return(self, builder: 'IRBuilder', value: Value, line: int) -> None: + # Assign an invalid next label number so that the next time __next__ is called, we jump to + # the case in which StopIteration is raised. + builder.assign(builder.fn_info.generator_class.next_label_target, + builder.add(LoadInt(-1)), + line) + # Raise a StopIteration containing a field for the value that should be returned. Before + # doing so, create a new block without an error handler set so that the implicitly thrown + # StopIteration isn't caught by except blocks inside of the generator function. + builder.error_handlers.append(None) + builder.goto_new_block() + # Skip creating a traceback frame when we raise here, because + # we don't care about the traceback frame and it is kind of + # expensive since raising StopIteration is an extremely common case. + # Also we call a special internal function to set StopIteration instead of + # using RaiseStandardError because the obvious thing doesn't work if the + # value is a tuple (???). + builder.primitive_op(set_stop_iteration_value, [value], NO_TRACEBACK_LINE_NO) + builder.add(Unreachable()) + builder.error_handlers.pop() + + +class CleanupNonlocalControl(NonlocalControl): + """Abstract nonlocal control that runs some cleanup code. """ + + def __init__(self, outer: NonlocalControl) -> None: + self.outer = outer + + @abstractmethod + def gen_cleanup(self, builder: 'IRBuilder', line: int) -> None: ... + + def gen_break(self, builder: 'IRBuilder', line: int) -> None: + self.gen_cleanup(builder, line) + self.outer.gen_break(builder, line) + + def gen_continue(self, builder: 'IRBuilder', line: int) -> None: + self.gen_cleanup(builder, line) + self.outer.gen_continue(builder, line) + + def gen_return(self, builder: 'IRBuilder', value: Value, line: int) -> None: + self.gen_cleanup(builder, line) + self.outer.gen_return(builder, value, line) + + +class TryFinallyNonlocalControl(NonlocalControl): + def __init__(self, target: BasicBlock) -> None: + self.target = target + self.ret_reg = None # type: Optional[Register] + + def gen_break(self, builder: 'IRBuilder', line: int) -> None: + builder.error("break inside try/finally block is unimplemented", line) + + def gen_continue(self, builder: 'IRBuilder', line: int) -> None: + builder.error("continue inside try/finally block is unimplemented", line) + + def gen_return(self, builder: 'IRBuilder', value: Value, line: int) -> None: + if self.ret_reg is None: + self.ret_reg = builder.alloc_temp(builder.ret_types[-1]) + + builder.add(Assign(self.ret_reg, value)) + builder.add(Goto(self.target)) + + +class ExceptNonlocalControl(CleanupNonlocalControl): + """Nonlocal control for except blocks. + + Just makes sure that sys.exc_info always gets restored when we leave. + This is super annoying. + """ + + def __init__(self, outer: NonlocalControl, saved: Union[Value, AssignmentTarget]) -> None: + super().__init__(outer) + self.saved = saved + + def gen_cleanup(self, builder: 'IRBuilder', line: int) -> None: + builder.primitive_op(restore_exc_info_op, [builder.read(self.saved)], line) + + +class FinallyNonlocalControl(CleanupNonlocalControl): + """Nonlocal control for finally blocks. + + Just makes sure that sys.exc_info always gets restored when we + leave and the return register is decrefed if it isn't null. + """ + + def __init__(self, outer: NonlocalControl, ret_reg: Optional[Value], saved: Value) -> None: + super().__init__(outer) + self.ret_reg = ret_reg + self.saved = saved + + def gen_cleanup(self, builder: 'IRBuilder', line: int) -> None: + # Do an error branch on the return value register, which + # may be undefined. This will allow it to be properly + # decrefed if it is not null. This is kind of a hack. + if self.ret_reg: + target = BasicBlock() + builder.add(Branch(self.ret_reg, target, target, Branch.IS_ERROR)) + builder.activate_block(target) + + # Restore the old exc_info + target, cleanup = BasicBlock(), BasicBlock() + builder.add(Branch(self.saved, target, cleanup, Branch.IS_ERROR)) + builder.activate_block(cleanup) + builder.primitive_op(restore_exc_info_op, [self.saved], line) + builder.goto_and_activate(target) From 39e96f50e4f1de8ce1ef379a83b1d9efb40e9b01 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Sat, 15 Feb 2020 21:09:37 +0000 Subject: [PATCH 092/117] [mypyc] Refactor: extract class-related genops to a new module (#8407) Also extract some utilities. This mostly preserves the original structure of the code, even though it's not optimal after extraction. This is something I plan to clean up later. This introduces an import cycle that I will fix in a later PR. --- mypyc/genclass.py | 517 +++++++++++++++++++++++++++++++++++++++ mypyc/genops.py | 571 ++------------------------------------------ mypyc/genopsutil.py | 104 ++++++++ 3 files changed, 639 insertions(+), 553 deletions(-) create mode 100644 mypyc/genclass.py create mode 100644 mypyc/genopsutil.py diff --git a/mypyc/genclass.py b/mypyc/genclass.py new file mode 100644 index 000000000000..44bdd1ea0ea0 --- /dev/null +++ b/mypyc/genclass.py @@ -0,0 +1,517 @@ +from typing import List, Optional, Union +from typing_extensions import overload + +from mypy.nodes import ( + ClassDef, FuncDef, OverloadedFuncDef, PassStmt, AssignmentStmt, NameExpr, StrExpr, + ExpressionStmt, TempNode, Decorator, Statement, Expression, Lvalue, RefExpr, Var, + is_class_var +) +from mypyc.ops import ( + Op, Value, OpDescription, NonExtClassInfo, Call, FuncDecl, LoadErrorValue, LoadStatic, + InitStatic, FuncSignature, TupleSet, SetAttr, Return, FuncIR, ClassIR, RInstance, + BasicBlock, Branch, MethodCall, RuntimeArg, + NAMESPACE_TYPE, + object_rprimitive, bool_rprimitive, dict_rprimitive, is_optional_type, is_object_rprimitive, + is_none_rprimitive, +) +from mypyc.ops_misc import ( + dataclass_sleight_of_hand, py_setattr_op, pytype_from_template_op, py_calc_meta_op, + type_object_op, py_hasattr_op, not_implemented_op, true_op +) +from mypyc.ops_dict import dict_set_item_op, new_dict_op +from mypyc.ops_tuple import new_tuple_op +from mypyc.genopsutil import is_dataclass_decorator, get_func_def, is_dataclass +from mypyc.common import SELF_NAME + +MYPY = False +if MYPY: + from mypyc.genops import IRBuilder + + +class BuildClassIR: + def __init__(self, builder: 'IRBuilder') -> None: + self.builder = builder + self.mapper = builder.mapper + self.module_name = builder.module_name + + def visit_class_def(self, cdef: ClassDef) -> None: + ir = self.mapper.type_to_ir[cdef.info] + + # We do this check here because the base field of parent + # classes aren't necessarily populated yet at + # prepare_class_def time. + if any(ir.base_mro[i].base != ir. base_mro[i + 1] for i in range(len(ir.base_mro) - 1)): + self.error("Non-trait MRO must be linear", cdef.line) + + if ir.allow_interpreted_subclasses: + for parent in ir.mro: + if not parent.allow_interpreted_subclasses: + self.error( + 'Base class "{}" does not allow interpreted subclasses'.format( + parent.fullname), cdef.line) + + # Currently, we only create non-extension classes for classes that are + # decorated or inherit from Enum. Classes decorated with @trait do not + # apply here, and are handled in a different way. + if ir.is_ext_class: + # If the class is not decorated, generate an extension class for it. + type_obj = self.allocate_class(cdef) # type: Optional[Value] + non_ext = None # type: Optional[NonExtClassInfo] + dataclass_non_ext = self.dataclass_non_ext_info(cdef) + else: + non_ext_bases = self.populate_non_ext_bases(cdef) + non_ext_metaclass = self.find_non_ext_metaclass(cdef, non_ext_bases) + non_ext_dict = self.setup_non_ext_dict(cdef, non_ext_metaclass, non_ext_bases) + # We populate __annotations__ for non-extension classes + # because dataclasses uses it to determine which attributes to compute on. + # TODO: Maybe generate more precise types for annotations + non_ext_anns = self.primitive_op(new_dict_op, [], cdef.line) + non_ext = NonExtClassInfo(non_ext_dict, non_ext_bases, non_ext_anns, non_ext_metaclass) + dataclass_non_ext = None + type_obj = None + + attrs_to_cache = [] # type: List[Lvalue] + + for stmt in cdef.defs.body: + if isinstance(stmt, OverloadedFuncDef) and stmt.is_property: + if not ir.is_ext_class: + # properties with both getters and setters in non_extension + # classes not supported + self.error("Property setters not supported in non-extension classes", + stmt.line) + for item in stmt.items: + with self.builder.catch_errors(stmt.line): + self.builder.visit_method(cdef, non_ext, get_func_def(item)) + elif isinstance(stmt, (FuncDef, Decorator, OverloadedFuncDef)): + # Ignore plugin generated methods (since they have no + # bodies to compile and will need to have the bodies + # provided by some other mechanism.) + if cdef.info.names[stmt.name].plugin_generated: + continue + with self.builder.catch_errors(stmt.line): + self.builder.visit_method(cdef, non_ext, get_func_def(stmt)) + elif isinstance(stmt, PassStmt): + continue + elif isinstance(stmt, AssignmentStmt): + if len(stmt.lvalues) != 1: + self.error("Multiple assignment in class bodies not supported", stmt.line) + continue + lvalue = stmt.lvalues[0] + if not isinstance(lvalue, NameExpr): + self.error("Only assignment to variables is supported in class bodies", + stmt.line) + continue + # We want to collect class variables in a dictionary for both real + # non-extension classes and fake dataclass ones. + var_non_ext = non_ext or dataclass_non_ext + if var_non_ext: + self.add_non_ext_class_attr(var_non_ext, lvalue, stmt, cdef, attrs_to_cache) + if non_ext: + continue + # Variable declaration with no body + if isinstance(stmt.rvalue, TempNode): + continue + # Only treat marked class variables as class variables. + if not (is_class_var(lvalue) or stmt.is_final_def): + continue + typ = self.builder.load_native_type_object(cdef.fullname) + value = self.accept(stmt.rvalue) + self.primitive_op( + py_setattr_op, [typ, self.load_static_unicode(lvalue.name), value], stmt.line) + if self.builder.non_function_scope() and stmt.is_final_def: + self.builder.init_final_static(lvalue, value, cdef.name) + elif isinstance(stmt, ExpressionStmt) and isinstance(stmt.expr, StrExpr): + # Docstring. Ignore + pass + else: + self.error("Unsupported statement in class body", stmt.line) + + if not non_ext: # That is, an extension class + self.generate_attr_defaults(cdef) + self.create_ne_from_eq(cdef) + if dataclass_non_ext: + assert type_obj + self.dataclass_finalize(cdef, dataclass_non_ext, type_obj) + else: + # Dynamically create the class via the type constructor + non_ext_class = self.load_non_ext_class(ir, non_ext, cdef.line) + non_ext_class = self.load_decorated_class(cdef, non_ext_class) + + # Save the decorated class + self.add(InitStatic(non_ext_class, cdef.name, self.module_name, NAMESPACE_TYPE)) + + # Add the non-extension class to the dict + self.primitive_op(dict_set_item_op, + [ + self.builder.load_globals_dict(), + self.load_static_unicode(cdef.name), + non_ext_class + ], cdef.line) + + # Cache any cachable class attributes + self.cache_class_attrs(attrs_to_cache, cdef) + + # Set this attribute back to None until the next non-extension class is visited. + self.non_ext_info = None + + def allocate_class(self, cdef: ClassDef) -> Value: + # OK AND NOW THE FUN PART + base_exprs = cdef.base_type_exprs + cdef.removed_base_type_exprs + if base_exprs: + bases = [self.accept(x) for x in base_exprs] + tp_bases = self.primitive_op(new_tuple_op, bases, cdef.line) + else: + tp_bases = self.add(LoadErrorValue(object_rprimitive, is_borrowed=True)) + modname = self.load_static_unicode(self.module_name) + template = self.add(LoadStatic(object_rprimitive, cdef.name + "_template", + self.module_name, NAMESPACE_TYPE)) + # Create the class + tp = self.primitive_op(pytype_from_template_op, + [template, tp_bases, modname], cdef.line) + # Immediately fix up the trait vtables, before doing anything with the class. + ir = self.mapper.type_to_ir[cdef.info] + if not ir.is_trait and not ir.builtin_base: + self.add(Call( + FuncDecl(cdef.name + '_trait_vtable_setup', + None, self.module_name, + FuncSignature([], bool_rprimitive)), [], -1)) + # Populate a '__mypyc_attrs__' field containing the list of attrs + self.primitive_op(py_setattr_op, [ + tp, self.load_static_unicode('__mypyc_attrs__'), + self.create_mypyc_attrs_tuple(self.mapper.type_to_ir[cdef.info], cdef.line)], + cdef.line) + + # Save the class + self.add(InitStatic(tp, cdef.name, self.module_name, NAMESPACE_TYPE)) + + # Add it to the dict + self.primitive_op(dict_set_item_op, + [ + self.builder.load_globals_dict(), + self.load_static_unicode(cdef.name), + tp, + ], cdef.line) + + return tp + + def populate_non_ext_bases(self, cdef: ClassDef) -> Value: + """ + Populate the base-class tuple passed to the metaclass constructor + for non-extension classes. + """ + ir = self.mapper.type_to_ir[cdef.info] + bases = [] + for cls in cdef.info.mro[1:]: + if cls.fullname == 'builtins.object': + continue + # Add the current class to the base classes list of concrete subclasses + if cls in self.mapper.type_to_ir: + base_ir = self.mapper.type_to_ir[cls] + if base_ir.children is not None: + base_ir.children.append(ir) + + base = self.builder.load_global_str(cls.name, cdef.line) + bases.append(base) + return self.primitive_op(new_tuple_op, bases, cdef.line) + + def find_non_ext_metaclass(self, cdef: ClassDef, bases: Value) -> Value: + """Find the metaclass of a class from its defs and bases. """ + if cdef.metaclass: + declared_metaclass = self.accept(cdef.metaclass) + else: + declared_metaclass = self.primitive_op(type_object_op, [], cdef.line) + + return self.primitive_op(py_calc_meta_op, [declared_metaclass, bases], cdef.line) + + def setup_non_ext_dict(self, cdef: ClassDef, metaclass: Value, bases: Value) -> Value: + """ + Initialize the class dictionary for a non-extension class. This class dictionary + is passed to the metaclass constructor. + """ + + # Check if the metaclass defines a __prepare__ method, and if so, call it. + has_prepare = self.primitive_op(py_hasattr_op, + [metaclass, + self.load_static_unicode('__prepare__')], cdef.line) + + non_ext_dict = self.builder.alloc_temp(dict_rprimitive) + + true_block, false_block, exit_block, = BasicBlock(), BasicBlock(), BasicBlock() + self.builder.add_bool_branch(has_prepare, true_block, false_block) + + self.builder.activate_block(true_block) + cls_name = self.load_static_unicode(cdef.name) + prepare_meth = self.builder.py_get_attr(metaclass, '__prepare__', cdef.line) + prepare_dict = self.builder.py_call(prepare_meth, [cls_name, bases], cdef.line) + self.builder.assign(non_ext_dict, prepare_dict, cdef.line) + self.builder.goto(exit_block) + + self.builder.activate_block(false_block) + self.builder.assign(non_ext_dict, self.primitive_op(new_dict_op, [], cdef.line), cdef.line) + self.builder.goto(exit_block) + self.builder.activate_block(exit_block) + + return non_ext_dict + + def add_non_ext_class_attr(self, non_ext: NonExtClassInfo, lvalue: NameExpr, + stmt: AssignmentStmt, cdef: ClassDef, + attr_to_cache: List[Lvalue]) -> None: + """ + Add a class attribute to __annotations__ of a non-extension class. If the + attribute is assigned to a value, it is also added to __dict__. + """ + + # We populate __annotations__ because dataclasses uses it to determine + # which attributes to compute on. + # TODO: Maybe generate more precise types for annotations + key = self.load_static_unicode(lvalue.name) + typ = self.primitive_op(type_object_op, [], stmt.line) + self.primitive_op(dict_set_item_op, [non_ext.anns, key, typ], stmt.line) + + # Only add the attribute to the __dict__ if the assignment is of the form: + # x: type = value (don't add attributes of the form 'x: type' to the __dict__). + if not isinstance(stmt.rvalue, TempNode): + rvalue = self.accept(stmt.rvalue) + self.builder.add_to_non_ext_dict(non_ext, lvalue.name, rvalue, stmt.line) + # We cache enum attributes to speed up enum attribute lookup since they + # are final. + if ( + cdef.info.bases + and cdef.info.bases[0].type.fullname == 'enum.Enum' + # Skip "_order_" and "__order__", since Enum will remove it + and lvalue.name not in ('_order_', '__order__') + ): + attr_to_cache.append(lvalue) + + def generate_attr_defaults(self, cdef: ClassDef) -> None: + """Generate an initialization method for default attr values (from class vars)""" + cls = self.mapper.type_to_ir[cdef.info] + if cls.builtin_base: + return + + # Pull out all assignments in classes in the mro so we can initialize them + # TODO: Support nested statements + default_assignments = [] + for info in reversed(cdef.info.mro): + if info not in self.mapper.type_to_ir: + continue + for stmt in info.defn.defs.body: + if (isinstance(stmt, AssignmentStmt) + and isinstance(stmt.lvalues[0], NameExpr) + and not is_class_var(stmt.lvalues[0]) + and not isinstance(stmt.rvalue, TempNode)): + if stmt.lvalues[0].name == '__slots__': + continue + + # Skip type annotated assignments in dataclasses + if is_dataclass(cdef) and stmt.type: + continue + + default_assignments.append(stmt) + + if not default_assignments: + return + + self.builder.enter() + self.builder.ret_types[-1] = bool_rprimitive + + rt_args = (RuntimeArg(SELF_NAME, RInstance(cls)),) + self_var = self.builder.read(self.builder.add_self_to_env(cls), -1) + + for stmt in default_assignments: + lvalue = stmt.lvalues[0] + assert isinstance(lvalue, NameExpr) + if not stmt.is_final_def and not self.builder.is_constant(stmt.rvalue): + self.builder.warning('Unsupported default attribute value', stmt.rvalue.line) + + # If the attribute is initialized to None and type isn't optional, + # don't initialize it to anything. + attr_type = cls.attr_type(lvalue.name) + if isinstance(stmt.rvalue, RefExpr) and stmt.rvalue.fullname == 'builtins.None': + if (not is_optional_type(attr_type) and not is_object_rprimitive(attr_type) + and not is_none_rprimitive(attr_type)): + continue + val = self.builder.coerce(self.accept(stmt.rvalue), attr_type, stmt.line) + self.add(SetAttr(self_var, lvalue.name, val, -1)) + + self.add(Return(self.primitive_op(true_op, [], -1))) + + blocks, env, ret_type, _ = self.builder.leave() + ir = FuncIR( + FuncDecl('__mypyc_defaults_setup', + cls.name, self.module_name, + FuncSignature(rt_args, ret_type)), + blocks, env) + self.builder.functions.append(ir) + cls.methods[ir.name] = ir + + def create_ne_from_eq(self, cdef: ClassDef) -> None: + cls = self.mapper.type_to_ir[cdef.info] + if cls.has_method('__eq__') and not cls.has_method('__ne__'): + f = self.gen_glue_ne_method(cls, cdef.line) + cls.method_decls['__ne__'] = f.decl + cls.methods['__ne__'] = f + self.builder.functions.append(f) + + def gen_glue_ne_method(self, cls: ClassIR, line: int) -> FuncIR: + """Generate a __ne__ method from a __eq__ method. """ + self.builder.enter() + + rt_args = (RuntimeArg("self", RInstance(cls)), RuntimeArg("rhs", object_rprimitive)) + + # The environment operates on Vars, so we make some up + fake_vars = [(Var(arg.name), arg.type) for arg in rt_args] + args = [ + self.builder.read( + self.builder.environment.add_local_reg( + var, type, is_arg=True + ), + line + ) + for var, type in fake_vars + ] # type: List[Value] + self.builder.ret_types[-1] = object_rprimitive + + # If __eq__ returns NotImplemented, then __ne__ should also + not_implemented_block, regular_block = BasicBlock(), BasicBlock() + eqval = self.add(MethodCall(args[0], '__eq__', [args[1]], line)) + not_implemented = self.primitive_op(not_implemented_op, [], line) + self.add(Branch( + self.builder.binary_op(eqval, not_implemented, 'is', line), + not_implemented_block, + regular_block, + Branch.BOOL_EXPR)) + + self.builder.activate_block(regular_block) + retval = self.builder.coerce( + self.builder.unary_op(eqval, 'not', line), object_rprimitive, line + ) + self.add(Return(retval)) + + self.builder.activate_block(not_implemented_block) + self.add(Return(not_implemented)) + + blocks, env, ret_type, _ = self.builder.leave() + return FuncIR( + FuncDecl('__ne__', cls.name, self.module_name, + FuncSignature(rt_args, ret_type)), + blocks, env) + + def load_non_ext_class(self, ir: ClassIR, non_ext: NonExtClassInfo, line: int) -> Value: + cls_name = self.load_static_unicode(ir.name) + + self.finish_non_ext_dict(non_ext, line) + + class_type_obj = self.builder.py_call(non_ext.metaclass, + [cls_name, non_ext.bases, non_ext.dict], + line) + return class_type_obj + + def load_decorated_class(self, cdef: ClassDef, type_obj: Value) -> Value: + """ + Given a decorated ClassDef and a register containing a non-extension representation of the + ClassDef created via the type constructor, applies the corresponding decorator functions + on that decorated ClassDef and returns a register containing the decorated ClassDef. + """ + decorators = cdef.decorators + dec_class = type_obj + for d in reversed(decorators): + decorator = d.accept(self.builder) + assert isinstance(decorator, Value) + dec_class = self.builder.py_call(decorator, [dec_class], dec_class.line) + return dec_class + + def cache_class_attrs(self, attrs_to_cache: List[Lvalue], cdef: ClassDef) -> None: + """Add class attributes to be cached to the global cache""" + typ = self.builder.load_native_type_object(cdef.fullname) + for lval in attrs_to_cache: + assert isinstance(lval, NameExpr) + rval = self.builder.py_get_attr(typ, lval.name, cdef.line) + self.builder.init_final_static(lval, rval, cdef.name) + + def create_mypyc_attrs_tuple(self, ir: ClassIR, line: int) -> Value: + attrs = [name for ancestor in ir.mro for name in ancestor.attributes] + if ir.inherits_python: + attrs.append('__dict__') + return self.primitive_op(new_tuple_op, + [self.load_static_unicode(attr) for attr in attrs], + line) + + def finish_non_ext_dict(self, non_ext: NonExtClassInfo, line: int) -> None: + # Add __annotations__ to the class dict. + self.primitive_op(dict_set_item_op, + [non_ext.dict, self.load_static_unicode('__annotations__'), + non_ext.anns], -1) + + # We add a __doc__ attribute so if the non-extension class is decorated with the + # dataclass decorator, dataclass will not try to look for __text_signature__. + # https://github.com/python/cpython/blob/3.7/Lib/dataclasses.py#L957 + filler_doc_str = 'mypyc filler docstring' + self.builder.add_to_non_ext_dict( + non_ext, '__doc__', self.load_static_unicode(filler_doc_str), line) + self.builder.add_to_non_ext_dict( + non_ext, '__module__', self.load_static_unicode(self.module_name), line) + + def dataclass_finalize( + self, cdef: ClassDef, non_ext: NonExtClassInfo, type_obj: Value) -> None: + """Generate code to finish instantiating a dataclass. + + This works by replacing all of the attributes on the class + (which will be descriptors) with whatever they would be in a + non-extension class, calling dataclass, then switching them back. + + The resulting class is an extension class and instances of it do not + have a __dict__ (unless something else requires it). + All methods written explicitly in the source are compiled and + may be called through the vtable while the methods generated + by dataclasses are interpreted and may not be. + + (If we just called dataclass without doing this, it would think that all + of the descriptors for our attributes are default values and generate an + incorrect constructor. We need to do the switch so that dataclass gets the + appropriate defaults.) + """ + self.finish_non_ext_dict(non_ext, cdef.line) + dec = self.accept(next(d for d in cdef.decorators if is_dataclass_decorator(d))) + self.primitive_op( + dataclass_sleight_of_hand, [dec, type_obj, non_ext.dict, non_ext.anns], cdef.line) + + def dataclass_non_ext_info(self, cdef: ClassDef) -> Optional[NonExtClassInfo]: + """Set up a NonExtClassInfo to track dataclass attributes. + + In addition to setting up a normal extension class for dataclasses, + we also collect its class attributes like a non-extension class so + that we can hand them to the dataclass decorator. + """ + if is_dataclass(cdef): + return NonExtClassInfo( + self.primitive_op(new_dict_op, [], cdef.line), + self.add(TupleSet([], cdef.line)), + self.primitive_op(new_dict_op, [], cdef.line), + self.primitive_op(type_object_op, [], cdef.line), + ) + else: + return None + + # Helpers + + def primitive_op(self, desc: OpDescription, args: List[Value], line: int) -> Value: + return self.builder.primitive_op(desc, args, line) + + @overload + def accept(self, node: Expression) -> Value: ... + + @overload + def accept(self, node: Statement) -> None: ... + + def accept(self, node: Union[Statement, Expression]) -> Optional[Value]: + return self.builder.accept(node) + + def error(self, msg: str, line: int) -> None: + self.builder.error(msg, line) + + def add(self, op: Op) -> Value: + return self.builder.add(op) + + def load_static_unicode(self, value: str) -> Value: + return self.builder.load_static_unicode(value) diff --git a/mypyc/genops.py b/mypyc/genops.py index 78f67a4b5bb3..970537bb0f1a 100644 --- a/mypyc/genops.py +++ b/mypyc/genops.py @@ -37,7 +37,7 @@ def f(x: int) -> int: NamedTupleExpr, NewTypeExpr, NonlocalDecl, OverloadedFuncDef, PrintStmt, RaiseStmt, RevealExpr, SetExpr, SliceExpr, StarExpr, SuperExpr, TryStmt, TypeAliasExpr, TypeApplication, TypeVarExpr, TypedDictExpr, UnicodeExpr, WithStmt, YieldFromExpr, YieldExpr, GDEF, ARG_POS, - ARG_OPT, ARG_NAMED, ARG_NAMED_OPT, ARG_STAR, ARG_STAR2, is_class_var, op_methods + ARG_OPT, ARG_NAMED, ARG_STAR, ARG_STAR2, op_methods ) from mypy.types import ( Type, Instance, CallableType, NoneTyp, TupleType, UnionType, AnyType, TypeVarType, PartialType, @@ -70,8 +70,7 @@ def f(x: int) -> int: NAMESPACE_TYPE, NAMESPACE_MODULE, RaiseStandardError, LoadErrorValue, NO_TRACEBACK_LINE_NO, FuncDecl, FUNC_NORMAL, FUNC_STATICMETHOD, FUNC_CLASSMETHOD, - RUnion, is_optional_type, optional_value_type, all_concrete_classes, - DeserMaps, + RUnion, optional_value_type, all_concrete_classes, DeserMaps, ) from mypyc.ops_primitive import binary_ops, unary_ops, func_ops, method_ops, name_ref_ops from mypyc.ops_list import ( @@ -85,12 +84,10 @@ def f(x: int) -> int: from mypyc.ops_misc import ( none_op, none_object_op, true_op, false_op, iter_op, next_op, next_raw_op, check_stop_op, send_op, yield_from_except_op, coro_op, - py_getattr_op, py_setattr_op, py_delattr_op, py_hasattr_op, + py_getattr_op, py_setattr_op, py_delattr_op, py_call_op, py_call_with_kwargs_op, py_method_call_op, - fast_isinstance_op, bool_op, new_slice_op, not_implemented_op, - type_op, pytype_from_template_op, import_op, get_module_dict_op, - ellipsis_op, method_new_op, type_is_op, type_object_op, py_calc_meta_op, - dataclass_sleight_of_hand, + fast_isinstance_op, bool_op, new_slice_op, type_op, import_op, + get_module_dict_op, ellipsis_op, method_new_op, type_is_op, ) from mypyc.ops_exc import ( raise_exception_op, raise_exception_with_tb_op, reraise_exception_op, @@ -108,6 +105,11 @@ def f(x: int) -> int: NonlocalControl, BaseNonlocalControl, LoopNonlocalControl, ExceptNonlocalControl, FinallyNonlocalControl, TryFinallyNonlocalControl, GeneratorNonlocalControl ) +from mypyc.genopsutil import ( + is_dataclass, get_func_def, concrete_arg_kind, get_mypyc_attrs, is_extension_class, + is_trait +) +from mypyc.genclass import BuildClassIR GenFunc = Callable[[], None] DictEntry = Tuple[Optional[Value], Value] @@ -222,94 +224,6 @@ def build_ir(modules: List[MypyFile], return result -def is_trait_decorator(d: Expression) -> bool: - return isinstance(d, RefExpr) and d.fullname == 'mypy_extensions.trait' - - -def is_trait(cdef: ClassDef) -> bool: - return any(is_trait_decorator(d) for d in cdef.decorators) - - -def is_dataclass_decorator(d: Expression) -> bool: - return ( - (isinstance(d, RefExpr) and d.fullname == 'dataclasses.dataclass') - or ( - isinstance(d, CallExpr) - and isinstance(d.callee, RefExpr) - and d.callee.fullname == 'dataclasses.dataclass' - ) - ) - - -def is_dataclass(cdef: ClassDef) -> bool: - return any(is_dataclass_decorator(d) for d in cdef.decorators) - - -def get_mypyc_attr_literal(e: Expression) -> Any: - """Convert an expression from a mypyc_attr decorator to a value. - - Supports a pretty limited range.""" - if isinstance(e, (StrExpr, IntExpr, FloatExpr)): - return e.value - elif isinstance(e, RefExpr) and e.fullname == 'builtins.True': - return True - elif isinstance(e, RefExpr) and e.fullname == 'builtins.False': - return False - elif isinstance(e, RefExpr) and e.fullname == 'builtins.None': - return None - return NotImplemented - - -def get_mypyc_attr_call(d: Expression) -> Optional[CallExpr]: - """Check if an expression is a call to mypyc_attr and return it if so.""" - if ( - isinstance(d, CallExpr) - and isinstance(d.callee, RefExpr) - and d.callee.fullname == 'mypy_extensions.mypyc_attr' - ): - return d - return None - - -def get_mypyc_attrs(stmt: Union[ClassDef, Decorator]) -> Dict[str, Any]: - """Collect all the mypyc_attr attributes on a class definition or a function.""" - attrs = {} # type: Dict[str, Any] - for dec in stmt.decorators: - d = get_mypyc_attr_call(dec) - if d: - for name, arg in zip(d.arg_names, d.args): - if name is None: - if isinstance(arg, StrExpr): - attrs[arg.value] = True - else: - attrs[name] = get_mypyc_attr_literal(arg) - - return attrs - - -def is_extension_class(cdef: ClassDef) -> bool: - if any( - not is_trait_decorator(d) - and not is_dataclass_decorator(d) - and not get_mypyc_attr_call(d) - for d in cdef.decorators - ): - return False - elif (cdef.info.metaclass_type and cdef.info.metaclass_type.type.fullname not in ( - 'abc.ABCMeta', 'typing.TypingMeta', 'typing.GenericMeta')): - return False - return True - - -def get_func_def(op: Union[FuncDef, Decorator, OverloadedFuncDef]) -> FuncDef: - if isinstance(op, OverloadedFuncDef): - assert op.impl - op = op.impl - if isinstance(op, Decorator): - op = op.func - return op - - def get_module_func_defs(module: MypyFile) -> Iterable[FuncDef]: """Collect all of the (non-method) functions declared in a module.""" for name, node in module.names.items(): @@ -736,16 +650,6 @@ def prepare_non_ext_class_def(path: str, module_name: str, cdef: ClassDef, "Non-extension classes may not inherit from extension classes", path, cdef.line) -def concrete_arg_kind(kind: int) -> int: - """Find the concrete version of an arg kind that is being passed.""" - if kind == ARG_OPT: - return ARG_POS - elif kind == ARG_NAMED_OPT: - return ARG_NAMED - else: - return kind - - class FuncInfo(object): """Contains information about functions as they are generated.""" def __init__(self, @@ -1013,7 +917,7 @@ def visit_mypy_file(self, mypyfile: MypyFile) -> None: ir = self.mapper.type_to_ir[cls.info] self.classes.append(ir) - self.enter(FuncInfo(name='')) + self.enter('') # Make sure we have a builtins import self.gen_import('builtins', -1) @@ -1145,126 +1049,8 @@ def is_constant(self, e: Expression) -> bool: and (e.fullname in ('builtins.True', 'builtins.False', 'builtins.None') or (isinstance(e.node, Var) and e.node.is_final)))) - def generate_attr_defaults(self, cdef: ClassDef) -> None: - """Generate an initialization method for default attr values (from class vars)""" - cls = self.mapper.type_to_ir[cdef.info] - if cls.builtin_base: - return - - # Pull out all assignments in classes in the mro so we can initialize them - # TODO: Support nested statements - default_assignments = [] - for info in reversed(cdef.info.mro): - if info not in self.mapper.type_to_ir: - continue - for stmt in info.defn.defs.body: - if (isinstance(stmt, AssignmentStmt) - and isinstance(stmt.lvalues[0], NameExpr) - and not is_class_var(stmt.lvalues[0]) - and not isinstance(stmt.rvalue, TempNode)): - if stmt.lvalues[0].name == '__slots__': - continue - - # Skip type annotated assignments in dataclasses - if is_dataclass(cdef) and stmt.type: - continue - - default_assignments.append(stmt) - - if not default_assignments: - return - - self.enter(FuncInfo()) - self.ret_types[-1] = bool_rprimitive - - rt_args = (RuntimeArg(SELF_NAME, RInstance(cls)),) - self_var = self.read(self.add_self_to_env(cls), -1) - - for stmt in default_assignments: - lvalue = stmt.lvalues[0] - assert isinstance(lvalue, NameExpr) - if not stmt.is_final_def and not self.is_constant(stmt.rvalue): - self.warning('Unsupported default attribute value', stmt.rvalue.line) - - # If the attribute is initialized to None and type isn't optional, - # don't initialize it to anything. - attr_type = cls.attr_type(lvalue.name) - if isinstance(stmt.rvalue, RefExpr) and stmt.rvalue.fullname == 'builtins.None': - if (not is_optional_type(attr_type) and not is_object_rprimitive(attr_type) - and not is_none_rprimitive(attr_type)): - continue - val = self.coerce(self.accept(stmt.rvalue), attr_type, stmt.line) - self.add(SetAttr(self_var, lvalue.name, val, -1)) - - self.add(Return(self.primitive_op(true_op, [], -1))) - - blocks, env, ret_type, _ = self.leave() - ir = FuncIR( - FuncDecl('__mypyc_defaults_setup', - cls.name, self.module_name, - FuncSignature(rt_args, ret_type)), - blocks, env) - self.functions.append(ir) - cls.methods[ir.name] = ir - - def finish_non_ext_dict(self, non_ext: NonExtClassInfo, line: int) -> None: - # Add __annotations__ to the class dict. - self.primitive_op(dict_set_item_op, - [non_ext.dict, self.load_static_unicode('__annotations__'), - non_ext.anns], -1) - - # We add a __doc__ attribute so if the non-extension class is decorated with the - # dataclass decorator, dataclass will not try to look for __text_signature__. - # https://github.com/python/cpython/blob/3.7/Lib/dataclasses.py#L957 - filler_doc_str = 'mypyc filler docstring' - self.add_to_non_ext_dict( - non_ext, '__doc__', self.load_static_unicode(filler_doc_str), line) - self.add_to_non_ext_dict( - non_ext, '__module__', self.load_static_unicode(self.module_name), line) - - def load_non_ext_class(self, ir: ClassIR, non_ext: NonExtClassInfo, line: int) -> Value: - cls_name = self.load_static_unicode(ir.name) - - self.finish_non_ext_dict(non_ext, line) - - class_type_obj = self.py_call(non_ext.metaclass, - [cls_name, non_ext.bases, non_ext.dict], - line) - return class_type_obj - - def load_decorated_class(self, cdef: ClassDef, type_obj: Value) -> Value: - """ - Given a decorated ClassDef and a register containing a non-extension representation of the - ClassDef created via the type constructor, applies the corresponding decorator functions - on that decorated ClassDef and returns a register containing the decorated ClassDef. - """ - decorators = cdef.decorators - dec_class = type_obj - for d in reversed(decorators): - decorator = d.accept(self) - assert isinstance(decorator, Value) - dec_class = self.py_call(decorator, [dec_class], dec_class.line) - return dec_class - - def populate_non_ext_bases(self, cdef: ClassDef) -> Value: - """ - Populate the base-class tuple passed to the metaclass constructor - for non-extension classes. - """ - ir = self.mapper.type_to_ir[cdef.info] - bases = [] - for cls in cdef.info.mro[1:]: - if cls.fullname == 'builtins.object': - continue - # Add the current class to the base classes list of concrete subclasses - if cls in self.mapper.type_to_ir: - base_ir = self.mapper.type_to_ir[cls] - if base_ir.children is not None: - base_ir.children.append(ir) - - base = self.load_global_str(cls.name, cdef.line) - bases.append(base) - return self.primitive_op(new_tuple_op, bases, cdef.line) + def visit_class_def(self, cdef: ClassDef) -> None: + BuildClassIR(self).visit_class_def(cdef) def add_to_non_ext_dict(self, non_ext: NonExtClassInfo, key: str, val: Value, line: int) -> None: @@ -1272,286 +1058,6 @@ def add_to_non_ext_dict(self, non_ext: NonExtClassInfo, key_unicode = self.load_static_unicode(key) self.primitive_op(dict_set_item_op, [non_ext.dict, key_unicode, val], line) - def add_non_ext_class_attr(self, non_ext: NonExtClassInfo, lvalue: NameExpr, - stmt: AssignmentStmt, cdef: ClassDef, - attr_to_cache: List[Lvalue]) -> None: - """ - Add a class attribute to __annotations__ of a non-extension class. If the - attribute is assigned to a value, it is also added to __dict__. - """ - - # We populate __annotations__ because dataclasses uses it to determine - # which attributes to compute on. - # TODO: Maybe generate more precise types for annotations - key = self.load_static_unicode(lvalue.name) - typ = self.primitive_op(type_object_op, [], stmt.line) - self.primitive_op(dict_set_item_op, [non_ext.anns, key, typ], stmt.line) - - # Only add the attribute to the __dict__ if the assignment is of the form: - # x: type = value (don't add attributes of the form 'x: type' to the __dict__). - if not isinstance(stmt.rvalue, TempNode): - rvalue = self.accept(stmt.rvalue) - self.add_to_non_ext_dict(non_ext, lvalue.name, rvalue, stmt.line) - # We cache enum attributes to speed up enum attribute lookup since they - # are final. - if ( - cdef.info.bases - and cdef.info.bases[0].type.fullname == 'enum.Enum' - # Skip "_order_" and "__order__", since Enum will remove it - and lvalue.name not in ('_order_', '__order__') - ): - attr_to_cache.append(lvalue) - - def find_non_ext_metaclass(self, cdef: ClassDef, bases: Value) -> Value: - """Find the metaclass of a class from its defs and bases. """ - if cdef.metaclass: - declared_metaclass = self.accept(cdef.metaclass) - else: - declared_metaclass = self.primitive_op(type_object_op, [], cdef.line) - - return self.primitive_op(py_calc_meta_op, [declared_metaclass, bases], cdef.line) - - def setup_non_ext_dict(self, cdef: ClassDef, metaclass: Value, bases: Value) -> Value: - """ - Initialize the class dictionary for a non-extension class. This class dictionary - is passed to the metaclass constructor. - """ - - # Check if the metaclass defines a __prepare__ method, and if so, call it. - has_prepare = self.primitive_op(py_hasattr_op, - [metaclass, - self.load_static_unicode('__prepare__')], cdef.line) - - non_ext_dict = self.alloc_temp(dict_rprimitive) - - true_block, false_block, exit_block, = BasicBlock(), BasicBlock(), BasicBlock() - self.add_bool_branch(has_prepare, true_block, false_block) - - self.activate_block(true_block) - cls_name = self.load_static_unicode(cdef.name) - prepare_meth = self.py_get_attr(metaclass, '__prepare__', cdef.line) - prepare_dict = self.py_call(prepare_meth, [cls_name, bases], cdef.line) - self.assign(non_ext_dict, prepare_dict, cdef.line) - self.goto(exit_block) - - self.activate_block(false_block) - self.assign(non_ext_dict, self.primitive_op(new_dict_op, [], cdef.line), cdef.line) - self.goto(exit_block) - self.activate_block(exit_block) - - return non_ext_dict - - def cache_class_attrs(self, attrs_to_cache: List[Lvalue], cdef: ClassDef) -> None: - """Add class attributes to be cached to the global cache""" - typ = self.load_native_type_object(cdef.fullname) - for lval in attrs_to_cache: - assert isinstance(lval, NameExpr) - rval = self.py_get_attr(typ, lval.name, cdef.line) - self.init_final_static(lval, rval, cdef.name) - - def dataclass_non_ext_info(self, cdef: ClassDef) -> Optional[NonExtClassInfo]: - """Set up a NonExtClassInfo to track dataclass attributes. - - In addition to setting up a normal extension class for dataclasses, - we also collect its class attributes like a non-extension class so - that we can hand them to the dataclass decorator. - """ - if is_dataclass(cdef): - return NonExtClassInfo( - self.primitive_op(new_dict_op, [], cdef.line), - self.add(TupleSet([], cdef.line)), - self.primitive_op(new_dict_op, [], cdef.line), - self.primitive_op(type_object_op, [], cdef.line), - ) - else: - return None - - def dataclass_finalize( - self, cdef: ClassDef, non_ext: NonExtClassInfo, type_obj: Value) -> None: - """Generate code to finish instantiating a dataclass. - - This works by replacing all of the attributes on the class - (which will be descriptors) with whatever they would be in a - non-extension class, calling dataclass, then switching them back. - - The resulting class is an extension class and instances of it do not - have a __dict__ (unless something else requires it). - All methods written explicitly in the source are compiled and - may be called through the vtable while the methods generated - by dataclasses are interpreted and may not be. - - (If we just called dataclass without doing this, it would think that all - of the descriptors for our attributes are default values and generate an - incorrect constructor. We need to do the switch so that dataclass gets the - appropriate defaults.) - """ - self.finish_non_ext_dict(non_ext, cdef.line) - dec = self.accept(next(d for d in cdef.decorators if is_dataclass_decorator(d))) - self.primitive_op( - dataclass_sleight_of_hand, [dec, type_obj, non_ext.dict, non_ext.anns], cdef.line) - - def visit_class_def(self, cdef: ClassDef) -> None: - ir = self.mapper.type_to_ir[cdef.info] - - # We do this check here because the base field of parent - # classes aren't necessarily populated yet at - # prepare_class_def time. - if any(ir.base_mro[i].base != ir. base_mro[i + 1] for i in range(len(ir.base_mro) - 1)): - self.error("Non-trait MRO must be linear", cdef.line) - - if ir.allow_interpreted_subclasses: - for parent in ir.mro: - if not parent.allow_interpreted_subclasses: - self.error( - 'Base class "{}" does not allow interpreted subclasses'.format( - parent.fullname), cdef.line) - - # Currently, we only create non-extension classes for classes that are - # decorated or inherit from Enum. Classes decorated with @trait do not - # apply here, and are handled in a different way. - if ir.is_ext_class: - # If the class is not decorated, generate an extension class for it. - type_obj = self.allocate_class(cdef) # type: Optional[Value] - non_ext = None # type: Optional[NonExtClassInfo] - dataclass_non_ext = self.dataclass_non_ext_info(cdef) - else: - non_ext_bases = self.populate_non_ext_bases(cdef) - non_ext_metaclass = self.find_non_ext_metaclass(cdef, non_ext_bases) - non_ext_dict = self.setup_non_ext_dict(cdef, non_ext_metaclass, non_ext_bases) - # We populate __annotations__ for non-extension classes - # because dataclasses uses it to determine which attributes to compute on. - # TODO: Maybe generate more precise types for annotations - non_ext_anns = self.primitive_op(new_dict_op, [], cdef.line) - non_ext = NonExtClassInfo(non_ext_dict, non_ext_bases, non_ext_anns, non_ext_metaclass) - dataclass_non_ext = None - type_obj = None - - attrs_to_cache = [] # type: List[Lvalue] - - for stmt in cdef.defs.body: - if isinstance(stmt, OverloadedFuncDef) and stmt.is_property: - if not ir.is_ext_class: - # properties with both getters and setters in non_extension - # classes not supported - self.error("Property setters not supported in non-extension classes", - stmt.line) - for item in stmt.items: - with self.catch_errors(stmt.line): - self.visit_method(cdef, non_ext, get_func_def(item)) - elif isinstance(stmt, (FuncDef, Decorator, OverloadedFuncDef)): - # Ignore plugin generated methods (since they have no - # bodies to compile and will need to have the bodies - # provided by some other mechanism.) - if cdef.info.names[stmt.name].plugin_generated: - continue - with self.catch_errors(stmt.line): - self.visit_method(cdef, non_ext, get_func_def(stmt)) - elif isinstance(stmt, PassStmt): - continue - elif isinstance(stmt, AssignmentStmt): - if len(stmt.lvalues) != 1: - self.error("Multiple assignment in class bodies not supported", stmt.line) - continue - lvalue = stmt.lvalues[0] - if not isinstance(lvalue, NameExpr): - self.error("Only assignment to variables is supported in class bodies", - stmt.line) - continue - # We want to collect class variables in a dictionary for both real - # non-extension classes and fake dataclass ones. - var_non_ext = non_ext or dataclass_non_ext - if var_non_ext: - self.add_non_ext_class_attr(var_non_ext, lvalue, stmt, cdef, attrs_to_cache) - if non_ext: - continue - # Variable declaration with no body - if isinstance(stmt.rvalue, TempNode): - continue - # Only treat marked class variables as class variables. - if not (is_class_var(lvalue) or stmt.is_final_def): - continue - typ = self.load_native_type_object(cdef.fullname) - value = self.accept(stmt.rvalue) - self.primitive_op( - py_setattr_op, [typ, self.load_static_unicode(lvalue.name), value], stmt.line) - if self.non_function_scope() and stmt.is_final_def: - self.init_final_static(lvalue, value, cdef.name) - elif isinstance(stmt, ExpressionStmt) and isinstance(stmt.expr, StrExpr): - # Docstring. Ignore - pass - else: - self.error("Unsupported statement in class body", stmt.line) - - if not non_ext: # That is, an extension class - self.generate_attr_defaults(cdef) - self.create_ne_from_eq(cdef) - if dataclass_non_ext: - assert type_obj - self.dataclass_finalize(cdef, dataclass_non_ext, type_obj) - else: - # Dynamically create the class via the type constructor - non_ext_class = self.load_non_ext_class(ir, non_ext, cdef.line) - non_ext_class = self.load_decorated_class(cdef, non_ext_class) - - # Save the decorated class - self.add(InitStatic(non_ext_class, cdef.name, self.module_name, NAMESPACE_TYPE)) - - # Add the non-extension class to the dict - self.primitive_op(dict_set_item_op, - [self.load_globals_dict(), self.load_static_unicode(cdef.name), - non_ext_class], cdef.line) - - # Cache any cachable class attributes - self.cache_class_attrs(attrs_to_cache, cdef) - - # Set this attribute back to None until the next non-extension class is visited. - self.non_ext_info = None - - def create_mypyc_attrs_tuple(self, ir: ClassIR, line: int) -> Value: - attrs = [name for ancestor in ir.mro for name in ancestor.attributes] - if ir.inherits_python: - attrs.append('__dict__') - return self.primitive_op(new_tuple_op, - [self.load_static_unicode(attr) for attr in attrs], - line) - - def allocate_class(self, cdef: ClassDef) -> Value: - # OK AND NOW THE FUN PART - base_exprs = cdef.base_type_exprs + cdef.removed_base_type_exprs - if base_exprs: - bases = [self.accept(x) for x in base_exprs] - tp_bases = self.primitive_op(new_tuple_op, bases, cdef.line) - else: - tp_bases = self.add(LoadErrorValue(object_rprimitive, is_borrowed=True)) - modname = self.load_static_unicode(self.module_name) - template = self.add(LoadStatic(object_rprimitive, cdef.name + "_template", - self.module_name, NAMESPACE_TYPE)) - # Create the class - tp = self.primitive_op(pytype_from_template_op, - [template, tp_bases, modname], cdef.line) - # Immediately fix up the trait vtables, before doing anything with the class. - ir = self.mapper.type_to_ir[cdef.info] - if not ir.is_trait and not ir.builtin_base: - self.add(Call( - FuncDecl(cdef.name + '_trait_vtable_setup', - None, self.module_name, - FuncSignature([], bool_rprimitive)), [], -1)) - # Populate a '__mypyc_attrs__' field containing the list of attrs - self.primitive_op(py_setattr_op, [ - tp, self.load_static_unicode('__mypyc_attrs__'), - self.create_mypyc_attrs_tuple(self.mapper.type_to_ir[cdef.info], cdef.line)], - cdef.line) - - # Save the class - self.add(InitStatic(tp, cdef.name, self.module_name, NAMESPACE_TYPE)) - - # Add it to the dict - self.primitive_op(dict_set_item_op, - [self.load_globals_dict(), self.load_static_unicode(cdef.name), - tp], cdef.line) - - return tp - def gen_import(self, id: str, line: int) -> None: self.imports[id] = None @@ -1680,7 +1186,7 @@ def f(self, x: object) -> int: ... If do_pycall is True, then make the call using the C API instead of a native call. """ - self.enter(FuncInfo()) + self.enter() self.ret_types[-1] = sig.ret_type rt_args = list(sig.args) @@ -1722,7 +1228,7 @@ def gen_glue_property(self, sig: FuncSignature, target: FuncIR, cls: ClassIR, ba If do_pygetattr is True, then get the attribute using the C API instead of a native call. """ - self.enter(FuncInfo()) + self.enter() rt_arg = RuntimeArg(SELF_NAME, RInstance(cls)) arg = self.read(self.add_self_to_env(cls), line) @@ -1750,49 +1256,6 @@ def assign_if_null(self, target: AssignmentTargetRegister, self.goto(body_block) self.activate_block(body_block) - def gen_glue_ne_method(self, cls: ClassIR, line: int) -> FuncIR: - """Generate a __ne__ method from a __eq__ method. """ - self.enter(FuncInfo()) - - rt_args = (RuntimeArg("self", RInstance(cls)), RuntimeArg("rhs", object_rprimitive)) - - # The environment operates on Vars, so we make some up - fake_vars = [(Var(arg.name), arg.type) for arg in rt_args] - args = [self.read(self.environment.add_local_reg(var, type, is_arg=True), line) - for var, type in fake_vars] # type: List[Value] - self.ret_types[-1] = object_rprimitive - - # If __eq__ returns NotImplemented, then __ne__ should also - not_implemented_block, regular_block = BasicBlock(), BasicBlock() - eqval = self.add(MethodCall(args[0], '__eq__', [args[1]], line)) - not_implemented = self.primitive_op(not_implemented_op, [], line) - self.add(Branch( - self.binary_op(eqval, not_implemented, 'is', line), - not_implemented_block, - regular_block, - Branch.BOOL_EXPR)) - - self.activate_block(regular_block) - retval = self.coerce(self.unary_op(eqval, 'not', line), object_rprimitive, line) - self.add(Return(retval)) - - self.activate_block(not_implemented_block) - self.add(Return(not_implemented)) - - blocks, env, ret_type, _ = self.leave() - return FuncIR( - FuncDecl('__ne__', cls.name, self.module_name, - FuncSignature(rt_args, ret_type)), - blocks, env) - - def create_ne_from_eq(self, cdef: ClassDef) -> None: - cls = self.mapper.type_to_ir[cdef.info] - if cls.has_method('__eq__') and not cls.has_method('__ne__'): - f = self.gen_glue_ne_method(cls, cdef.line) - cls.method_decls['__ne__'] = f.decl - cls.methods['__ne__'] = f - self.functions.append(f) - def calculate_arg_defaults(self, fn_info: FuncInfo, env: Environment, @@ -4537,7 +4000,9 @@ def visit_star_expr(self, o: StarExpr) -> Value: # Helpers - def enter(self, fn_info: FuncInfo) -> None: + def enter(self, fn_info: Union[FuncInfo, str] = '') -> None: + if isinstance(fn_info, str): + fn_info = FuncInfo(name=fn_info) self.environment = Environment(fn_info.name) self.environments.append(self.environment) self.fn_info = fn_info diff --git a/mypyc/genopsutil.py b/mypyc/genopsutil.py new file mode 100644 index 000000000000..a08646c03e38 --- /dev/null +++ b/mypyc/genopsutil.py @@ -0,0 +1,104 @@ +from typing import Dict, Any, Union, Optional + +from mypy.nodes import ( + ClassDef, FuncDef, Decorator, OverloadedFuncDef, StrExpr, CallExpr, RefExpr, Expression, + IntExpr, FloatExpr, ARG_NAMED, ARG_NAMED_OPT, ARG_POS, ARG_OPT +) + + +def is_trait_decorator(d: Expression) -> bool: + return isinstance(d, RefExpr) and d.fullname == 'mypy_extensions.trait' + + +def is_trait(cdef: ClassDef) -> bool: + return any(is_trait_decorator(d) for d in cdef.decorators) + + +def is_dataclass_decorator(d: Expression) -> bool: + return ( + (isinstance(d, RefExpr) and d.fullname == 'dataclasses.dataclass') + or ( + isinstance(d, CallExpr) + and isinstance(d.callee, RefExpr) + and d.callee.fullname == 'dataclasses.dataclass' + ) + ) + + +def is_dataclass(cdef: ClassDef) -> bool: + return any(is_dataclass_decorator(d) for d in cdef.decorators) + + +def get_mypyc_attr_literal(e: Expression) -> Any: + """Convert an expression from a mypyc_attr decorator to a value. + + Supports a pretty limited range.""" + if isinstance(e, (StrExpr, IntExpr, FloatExpr)): + return e.value + elif isinstance(e, RefExpr) and e.fullname == 'builtins.True': + return True + elif isinstance(e, RefExpr) and e.fullname == 'builtins.False': + return False + elif isinstance(e, RefExpr) and e.fullname == 'builtins.None': + return None + return NotImplemented + + +def get_mypyc_attr_call(d: Expression) -> Optional[CallExpr]: + """Check if an expression is a call to mypyc_attr and return it if so.""" + if ( + isinstance(d, CallExpr) + and isinstance(d.callee, RefExpr) + and d.callee.fullname == 'mypy_extensions.mypyc_attr' + ): + return d + return None + + +def get_mypyc_attrs(stmt: Union[ClassDef, Decorator]) -> Dict[str, Any]: + """Collect all the mypyc_attr attributes on a class definition or a function.""" + attrs = {} # type: Dict[str, Any] + for dec in stmt.decorators: + d = get_mypyc_attr_call(dec) + if d: + for name, arg in zip(d.arg_names, d.args): + if name is None: + if isinstance(arg, StrExpr): + attrs[arg.value] = True + else: + attrs[name] = get_mypyc_attr_literal(arg) + + return attrs + + +def is_extension_class(cdef: ClassDef) -> bool: + if any( + not is_trait_decorator(d) + and not is_dataclass_decorator(d) + and not get_mypyc_attr_call(d) + for d in cdef.decorators + ): + return False + elif (cdef.info.metaclass_type and cdef.info.metaclass_type.type.fullname not in ( + 'abc.ABCMeta', 'typing.TypingMeta', 'typing.GenericMeta')): + return False + return True + + +def get_func_def(op: Union[FuncDef, Decorator, OverloadedFuncDef]) -> FuncDef: + if isinstance(op, OverloadedFuncDef): + assert op.impl + op = op.impl + if isinstance(op, Decorator): + op = op.func + return op + + +def concrete_arg_kind(kind: int) -> int: + """Find the concrete version of an arg kind that is being passed.""" + if kind == ARG_OPT: + return ARG_POS + elif kind == ARG_NAMED_OPT: + return ARG_NAMED + else: + return kind From a8f06c8651a4dd9b97f4989df70791b57a14e7f6 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Sun, 16 Feb 2020 19:32:56 +0000 Subject: [PATCH 093/117] [mypyc] Refactor: extract function related genops to new modules (#8410) The changes are fairly minimal beyond moving things around. There's more refactoring that can be done later, such as removing the import cycle. --- mypyc/genclass.py | 8 +- mypyc/genfunc.py | 1307 +++++++++++++++++++++++++++++++++++++ mypyc/genops.py | 1402 +--------------------------------------- mypyc/genopscontext.py | 167 +++++ mypyc/genopsutil.py | 30 +- 5 files changed, 1526 insertions(+), 1388 deletions(-) create mode 100644 mypyc/genfunc.py create mode 100644 mypyc/genopscontext.py diff --git a/mypyc/genclass.py b/mypyc/genclass.py index 44bdd1ea0ea0..a8d282477d03 100644 --- a/mypyc/genclass.py +++ b/mypyc/genclass.py @@ -20,7 +20,9 @@ ) from mypyc.ops_dict import dict_set_item_op, new_dict_op from mypyc.ops_tuple import new_tuple_op -from mypyc.genopsutil import is_dataclass_decorator, get_func_def, is_dataclass +from mypyc.genopsutil import ( + is_dataclass_decorator, get_func_def, is_dataclass, is_constant, add_self_to_env +) from mypyc.common import SELF_NAME MYPY = False @@ -316,12 +318,12 @@ def generate_attr_defaults(self, cdef: ClassDef) -> None: self.builder.ret_types[-1] = bool_rprimitive rt_args = (RuntimeArg(SELF_NAME, RInstance(cls)),) - self_var = self.builder.read(self.builder.add_self_to_env(cls), -1) + self_var = self.builder.read(add_self_to_env(self.builder.environment, cls), -1) for stmt in default_assignments: lvalue = stmt.lvalues[0] assert isinstance(lvalue, NameExpr) - if not stmt.is_final_def and not self.builder.is_constant(stmt.rvalue): + if not stmt.is_final_def and not is_constant(stmt.rvalue): self.builder.warning('Unsupported default attribute value', stmt.rvalue.line) # If the attribute is initialized to None and type isn't optional, diff --git a/mypyc/genfunc.py b/mypyc/genfunc.py new file mode 100644 index 000000000000..b9008874391f --- /dev/null +++ b/mypyc/genfunc.py @@ -0,0 +1,1307 @@ +"""Transform mypy AST functions to IR (and related things). + +This also deals with generators, async functions and nested functions. +""" + +from typing import Optional, List, Tuple, Union + +from mypy.nodes import ( + ClassDef, FuncDef, OverloadedFuncDef, Decorator, Var, YieldFromExpr, AwaitExpr, YieldExpr, + FuncItem, SymbolNode, LambdaExpr, ARG_OPT +) +from mypy.types import CallableType, get_proper_type +from mypyc.ops import ( + BasicBlock, FuncSignature, Value, FuncIR, ClassIR, RuntimeArg, object_rprimitive, FuncDecl, + Return, Call, SetAttr, LoadInt, NonExtClassInfo, Op, Unreachable, RaiseStandardError, RType, + Environment, GetAttr, Register, Branch, AssignmentTarget, TupleGet, OpDescription, Goto, + int_rprimitive, RInstance, AssignmentTargetRegister, AssignmentTargetAttr, LoadStatic, + InitStatic, FUNC_CLASSMETHOD, FUNC_STATICMETHOD, FUNC_NORMAL +) +from mypyc.ops_misc import ( + check_stop_op, yield_from_except_op, next_raw_op, iter_op, coro_op, send_op, py_setattr_op, + method_new_op +) +from mypyc.ops_exc import raise_exception_with_tb_op +from mypyc.ops_dict import dict_set_item_op +from mypyc.common import ( + SELF_NAME, ENV_ATTR_NAME, NEXT_LABEL_ATTR_NAME, LAMBDA_NAME, decorator_helper_name +) +from mypyc.sametype import is_same_method_signature +from mypyc.genopsutil import concrete_arg_kind, is_constant, add_self_to_env +from mypyc.genopscontext import FuncInfo, GeneratorClass, ImplicitClass + +MYPY = False +if MYPY: + from mypyc.genops import IRBuilder + + +class BuildFuncIR: + def __init__(self, builder: 'IRBuilder') -> None: + self.builder = builder + self.module_name = builder.module_name + self.environments = builder.environments + self.functions = builder.functions + self.mapper = builder.mapper + + # Top-level visit functions + + def visit_func_def(self, fdef: FuncDef) -> None: + func_ir, func_reg = self.gen_func_item(fdef, fdef.name, self.mapper.fdef_to_sig(fdef)) + + # If the function that was visited was a nested function, then either look it up in our + # current environment or define it if it was not already defined. + if func_reg: + self.assign(self.get_func_target(fdef), func_reg, fdef.line) + self.functions.append(func_ir) + + def visit_overloaded_func_def(self, o: OverloadedFuncDef) -> None: + # Handle regular overload case + assert o.impl + self.builder.accept(o.impl) + + def visit_decorator(self, dec: Decorator) -> None: + func_ir, func_reg = self.gen_func_item(dec.func, dec.func.name, + self.mapper.fdef_to_sig(dec.func)) + + if dec.func in self.builder.nested_fitems: + assert func_reg is not None + decorated_func = self.load_decorated_func(dec.func, func_reg) + self.assign(self.get_func_target(dec.func), decorated_func, dec.func.line) + func_reg = decorated_func + else: + # Obtain the the function name in order to construct the name of the helper function. + name = dec.func.fullname.split('.')[-1] + helper_name = decorator_helper_name(name) + + # Load the callable object representing the non-decorated function, and decorate it. + orig_func = self.builder.load_global_str(helper_name, dec.line) + decorated_func = self.load_decorated_func(dec.func, orig_func) + + # Set the callable object representing the decorated function as a global. + self.primitive_op(dict_set_item_op, + [self.builder.load_globals_dict(), + self.builder.load_static_unicode(dec.func.name), decorated_func], + decorated_func.line) + + self.functions.append(func_ir) + + def visit_method( + self, cdef: ClassDef, non_ext: Optional[NonExtClassInfo], fdef: FuncDef) -> None: + if non_ext: + self.handle_non_ext_method(non_ext, cdef, fdef) + else: + self.handle_ext_method(cdef, fdef) + + def visit_lambda_expr(self, expr: LambdaExpr) -> Value: + typ = get_proper_type(self.builder.types[expr]) + assert isinstance(typ, CallableType) + + runtime_args = [] + for arg, arg_type in zip(expr.arguments, typ.arg_types): + arg.variable.type = arg_type + runtime_args.append( + RuntimeArg(arg.variable.name, self.builder.type_to_rtype(arg_type), arg.kind)) + ret_type = self.builder.type_to_rtype(typ.ret_type) + + fsig = FuncSignature(runtime_args, ret_type) + + fname = '{}{}'.format(LAMBDA_NAME, self.builder.lambda_counter) + self.builder.lambda_counter += 1 + func_ir, func_reg = self.gen_func_item(expr, fname, fsig) + assert func_reg is not None + + self.functions.append(func_ir) + return func_reg + + def visit_yield_expr(self, expr: YieldExpr) -> Value: + if expr.expr: + retval = self.builder.accept(expr.expr) + else: + retval = self.builder.none() + return self.emit_yield(retval, expr.line) + + def visit_yield_from_expr(self, o: YieldFromExpr) -> Value: + return self.handle_yield_from_and_await(o) + + def visit_await_expr(self, o: AwaitExpr) -> Value: + return self.handle_yield_from_and_await(o) + + # Internal functions + + def gen_func_item(self, + fitem: FuncItem, + name: str, + sig: FuncSignature, + cdef: Optional[ClassDef] = None, + ) -> Tuple[FuncIR, Optional[Value]]: + # TODO: do something about abstract methods. + + """Generates and returns the FuncIR for a given FuncDef. + + If the given FuncItem is a nested function, then we generate a callable class representing + the function and use that instead of the actual function. if the given FuncItem contains a + nested function, then we generate an environment class so that inner nested functions can + access the environment of the given FuncDef. + + Consider the following nested function. + def a() -> None: + def b() -> None: + def c() -> None: + return None + return None + return None + + The classes generated would look something like the following. + + has pointer to +-------+ + +--------------------------> | a_env | + | +-------+ + | ^ + | | has pointer to + +-------+ associated with +-------+ + | b_obj | -------------------> | b_env | + +-------+ +-------+ + ^ + | + +-------+ has pointer to | + | c_obj | --------------------------+ + +-------+ + """ + + func_reg = None # type: Optional[Value] + + # We treat lambdas as always being nested because we always generate + # a class for lambdas, no matter where they are. (It would probably also + # work to special case toplevel lambdas and generate a non-class function.) + is_nested = fitem in self.builder.nested_fitems or isinstance(fitem, LambdaExpr) + contains_nested = fitem in self.builder.encapsulating_funcs.keys() + is_decorated = fitem in self.builder.fdefs_to_decorators + in_non_ext = False + class_name = None + if cdef: + ir = self.mapper.type_to_ir[cdef.info] + in_non_ext = not ir.is_ext_class + class_name = cdef.name + + self.enter(FuncInfo(fitem, name, class_name, self.gen_func_ns(), + is_nested, contains_nested, is_decorated, in_non_ext)) + + # Functions that contain nested functions need an environment class to store variables that + # are free in their nested functions. Generator functions need an environment class to + # store a variable denoting the next instruction to be executed when the __next__ function + # is called, along with all the variables inside the function itself. + if self.fn_info.contains_nested or self.fn_info.is_generator: + self.setup_env_class() + + if self.fn_info.is_nested or self.fn_info.in_non_ext: + self.setup_callable_class() + + if self.fn_info.is_generator: + # Do a first-pass and generate a function that just returns a generator object. + self.gen_generator_func() + blocks, env, ret_type, fn_info = self.leave() + func_ir, func_reg = self.gen_func_ir(blocks, sig, env, fn_info, cdef) + + # Re-enter the FuncItem and visit the body of the function this time. + self.enter(fn_info) + self.setup_env_for_generator_class() + self.load_outer_envs(self.fn_info.generator_class) + if self.fn_info.is_nested and isinstance(fitem, FuncDef): + self.setup_func_for_recursive_call(fitem, self.fn_info.generator_class) + self.create_switch_for_generator_class() + self.add_raise_exception_blocks_to_generator_class(fitem.line) + else: + self.load_env_registers() + self.gen_arg_defaults() + + if self.fn_info.contains_nested and not self.fn_info.is_generator: + self.finalize_env_class() + + self.builder.ret_types[-1] = sig.ret_type + + # Add all variables and functions that are declared/defined within this + # function and are referenced in functions nested within this one to this + # function's environment class so the nested functions can reference + # them even if they are declared after the nested function's definition. + # Note that this is done before visiting the body of this function. + + env_for_func = self.fn_info # type: Union[FuncInfo, ImplicitClass] + if self.fn_info.is_generator: + env_for_func = self.fn_info.generator_class + elif self.fn_info.is_nested or self.fn_info.in_non_ext: + env_for_func = self.fn_info.callable_class + + if self.fn_info.fitem in self.builder.free_variables: + # Sort the variables to keep things deterministic + for var in sorted(self.builder.free_variables[self.fn_info.fitem], + key=lambda x: x.name): + if isinstance(var, Var): + rtype = self.builder.type_to_rtype(var.type) + self.builder.add_var_to_env_class(var, rtype, env_for_func, reassign=False) + + if self.fn_info.fitem in self.builder.encapsulating_funcs: + for nested_fn in self.builder.encapsulating_funcs[self.fn_info.fitem]: + if isinstance(nested_fn, FuncDef): + # The return type is 'object' instead of an RInstance of the + # callable class because differently defined functions with + # the same name and signature across conditional blocks + # will generate different callable classes, so the callable + # class that gets instantiated must be generic. + self.builder.add_var_to_env_class(nested_fn, object_rprimitive, + env_for_func, reassign=False) + + self.builder.accept(fitem.body) + self.builder.maybe_add_implicit_return() + + if self.fn_info.is_generator: + self.populate_switch_for_generator_class() + + blocks, env, ret_type, fn_info = self.leave() + + if fn_info.is_generator: + helper_fn_decl = self.add_helper_to_generator_class(blocks, sig, env, fn_info) + self.add_next_to_generator_class(fn_info, helper_fn_decl, sig) + self.add_send_to_generator_class(fn_info, helper_fn_decl, sig) + self.add_iter_to_generator_class(fn_info) + self.add_throw_to_generator_class(fn_info, helper_fn_decl, sig) + self.add_close_to_generator_class(fn_info) + if fitem.is_coroutine: + self.add_await_to_generator_class(fn_info) + + else: + func_ir, func_reg = self.gen_func_ir(blocks, sig, env, fn_info, cdef) + + self.calculate_arg_defaults(fn_info, env, func_reg) + + return (func_ir, func_reg) + + def gen_func_ir(self, + blocks: List[BasicBlock], + sig: FuncSignature, + env: Environment, + fn_info: FuncInfo, + cdef: Optional[ClassDef]) -> Tuple[FuncIR, Optional[Value]]: + """Generates the FuncIR for a function given the blocks, environment, and function info of + a particular function and returns it. If the function is nested, also returns the register + containing the instance of the corresponding callable class. + """ + func_reg = None # type: Optional[Value] + if fn_info.is_nested or fn_info.in_non_ext: + func_ir = self.add_call_to_callable_class(blocks, sig, env, fn_info) + self.add_get_to_callable_class(fn_info) + func_reg = self.instantiate_callable_class(fn_info) + else: + assert isinstance(fn_info.fitem, FuncDef) + func_decl = self.mapper.func_to_decl[fn_info.fitem] + if fn_info.is_decorated: + class_name = None if cdef is None else cdef.name + func_decl = FuncDecl(fn_info.name, class_name, self.module_name, sig, + func_decl.kind, + func_decl.is_prop_getter, func_decl.is_prop_setter) + func_ir = FuncIR(func_decl, blocks, env, fn_info.fitem.line, + traceback_name=fn_info.fitem.name) + else: + func_ir = FuncIR(func_decl, blocks, env, + fn_info.fitem.line, traceback_name=fn_info.fitem.name) + return (func_ir, func_reg) + + def handle_ext_method(self, cdef: ClassDef, fdef: FuncDef) -> None: + # Perform the function of visit_method for methods inside extension classes. + name = fdef.name + class_ir = self.mapper.type_to_ir[cdef.info] + func_ir, func_reg = self.gen_func_item(fdef, name, self.mapper.fdef_to_sig(fdef), cdef) + self.functions.append(func_ir) + + if self.is_decorated(fdef): + # Obtain the the function name in order to construct the name of the helper function. + _, _, name = fdef.fullname.rpartition('.') + helper_name = decorator_helper_name(name) + # Read the PyTypeObject representing the class, get the callable object + # representing the non-decorated method + typ = self.builder.load_native_type_object(cdef.fullname) + orig_func = self.builder.py_get_attr(typ, helper_name, fdef.line) + + # Decorate the non-decorated method + decorated_func = self.load_decorated_func(fdef, orig_func) + + # Set the callable object representing the decorated method as an attribute of the + # extension class. + self.primitive_op(py_setattr_op, + [ + typ, + self.builder.load_static_unicode(name), + decorated_func + ], + fdef.line) + + if fdef.is_property: + # If there is a property setter, it will be processed after the getter, + # We populate the optional setter field with none for now. + assert name not in class_ir.properties + class_ir.properties[name] = (func_ir, None) + + elif fdef in self.builder.prop_setters: + # The respective property getter must have been processed already + assert name in class_ir.properties + getter_ir, _ = class_ir.properties[name] + class_ir.properties[name] = (getter_ir, func_ir) + + class_ir.methods[func_ir.decl.name] = func_ir + + # If this overrides a parent class method with a different type, we need + # to generate a glue method to mediate between them. + for base in class_ir.mro[1:]: + if (name in base.method_decls and name != '__init__' + and not is_same_method_signature(class_ir.method_decls[name].sig, + base.method_decls[name].sig)): + + # TODO: Support contravariant subtyping in the input argument for + # property setters. Need to make a special glue method for handling this, + # similar to gen_glue_property. + + f = self.gen_glue(base.method_decls[name].sig, func_ir, class_ir, base, fdef) + class_ir.glue_methods[(base, name)] = f + self.functions.append(f) + + # If the class allows interpreted children, create glue + # methods that dispatch via the Python API. These will go in a + # "shadow vtable" that will be assigned to interpreted + # children. + if class_ir.allow_interpreted_subclasses: + f = self.gen_glue(func_ir.sig, func_ir, class_ir, class_ir, fdef, do_py_ops=True) + class_ir.glue_methods[(class_ir, name)] = f + self.functions.append(f) + + def handle_non_ext_method( + self, non_ext: NonExtClassInfo, cdef: ClassDef, fdef: FuncDef) -> None: + # Perform the function of visit_method for methods inside non-extension classes. + name = fdef.name + func_ir, func_reg = self.gen_func_item(fdef, name, self.mapper.fdef_to_sig(fdef), cdef) + assert func_reg is not None + self.functions.append(func_ir) + + if self.is_decorated(fdef): + # The undecorated method is a generated callable class + orig_func = func_reg + func_reg = self.load_decorated_func(fdef, orig_func) + + # TODO: Support property setters in non-extension classes + if fdef.is_property: + prop = self.builder.load_module_attr_by_fullname('builtins.property', fdef.line) + func_reg = self.builder.py_call(prop, [func_reg], fdef.line) + + elif self.mapper.func_to_decl[fdef].kind == FUNC_CLASSMETHOD: + cls_meth = self.builder.load_module_attr_by_fullname('builtins.classmethod', fdef.line) + func_reg = self.builder.py_call(cls_meth, [func_reg], fdef.line) + + elif self.mapper.func_to_decl[fdef].kind == FUNC_STATICMETHOD: + stat_meth = self.builder.load_module_attr_by_fullname( + 'builtins.staticmethod', fdef.line + ) + func_reg = self.builder.py_call(stat_meth, [func_reg], fdef.line) + + self.builder.add_to_non_ext_dict(non_ext, name, func_reg, fdef.line) + + def gen_arg_defaults(self) -> None: + """Generate blocks for arguments that have default values. + + If the passed value is an error value, then assign the default + value to the argument. + """ + fitem = self.fn_info.fitem + for arg in fitem.arguments: + if arg.initializer: + target = self.environment.lookup(arg.variable) + + def get_default() -> Value: + assert arg.initializer is not None + + # If it is constant, don't bother storing it + if is_constant(arg.initializer): + return self.builder.accept(arg.initializer) + + # Because gen_arg_defaults runs before calculate_arg_defaults, we + # add the static/attribute to final_names/the class here. + elif not self.fn_info.is_nested: + name = fitem.fullname + '.' + arg.variable.name + self.builder.final_names.append((name, target.type)) + return self.add(LoadStatic(target.type, name, self.module_name)) + else: + name = arg.variable.name + self.fn_info.callable_class.ir.attributes[name] = target.type + return self.add( + GetAttr(self.fn_info.callable_class.self_reg, name, arg.line)) + assert isinstance(target, AssignmentTargetRegister) + self.builder.assign_if_null(target, + get_default, + arg.initializer.line) + + def calculate_arg_defaults(self, + fn_info: FuncInfo, + env: Environment, + func_reg: Optional[Value]) -> None: + """Calculate default argument values and store them. + + They are stored in statics for top level functions and in + the function objects for nested functions (while constants are + still stored computed on demand). + """ + fitem = fn_info.fitem + for arg in fitem.arguments: + # Constant values don't get stored but just recomputed + if arg.initializer and not is_constant(arg.initializer): + value = self.builder.coerce( + self.builder.accept(arg.initializer), + env.lookup(arg.variable).type, + arg.line + ) + if not fn_info.is_nested: + name = fitem.fullname + '.' + arg.variable.name + self.add(InitStatic(value, name, self.module_name)) + else: + assert func_reg is not None + self.add(SetAttr(func_reg, arg.variable.name, value, arg.line)) + + def gen_generator_func(self) -> None: + self.setup_generator_class() + self.load_env_registers() + self.gen_arg_defaults() + self.finalize_env_class() + self.add(Return(self.instantiate_generator_class())) + + def instantiate_generator_class(self) -> Value: + fitem = self.fn_info.fitem + generator_reg = self.add(Call(self.fn_info.generator_class.ir.ctor, [], fitem.line)) + + # Get the current environment register. If the current function is nested, then the + # generator class gets instantiated from the callable class' '__call__' method, and hence + # we use the callable class' environment register. Otherwise, we use the original + # function's environment register. + if self.fn_info.is_nested: + curr_env_reg = self.fn_info.callable_class.curr_env_reg + else: + curr_env_reg = self.fn_info.curr_env_reg + + # Set the generator class' environment attribute to point at the environment class + # defined in the current scope. + self.add(SetAttr(generator_reg, ENV_ATTR_NAME, curr_env_reg, fitem.line)) + + # Set the generator class' environment class' NEXT_LABEL_ATTR_NAME attribute to 0. + zero_reg = self.add(LoadInt(0)) + self.add(SetAttr(curr_env_reg, NEXT_LABEL_ATTR_NAME, zero_reg, fitem.line)) + return generator_reg + + def setup_generator_class(self) -> ClassIR: + name = '{}_gen'.format(self.fn_info.namespaced_name()) + + generator_class_ir = ClassIR(name, self.module_name, is_generated=True) + generator_class_ir.attributes[ENV_ATTR_NAME] = RInstance(self.fn_info.env_class) + generator_class_ir.mro = [generator_class_ir] + + self.builder.classes.append(generator_class_ir) + self.fn_info.generator_class = GeneratorClass(generator_class_ir) + return generator_class_ir + + def create_switch_for_generator_class(self) -> None: + self.add(Goto(self.fn_info.generator_class.switch_block)) + self.fn_info.generator_class.blocks.append(self.builder.new_block()) + + def populate_switch_for_generator_class(self) -> None: + cls = self.fn_info.generator_class + line = self.fn_info.fitem.line + + self.builder.activate_block(cls.switch_block) + for label, true_block in enumerate(cls.blocks): + false_block = BasicBlock() + comparison = self.builder.binary_op( + cls.next_label_reg, self.add(LoadInt(label)), '==', line + ) + self.builder.add_bool_branch(comparison, true_block, false_block) + self.builder.activate_block(false_block) + + self.add(RaiseStandardError(RaiseStandardError.STOP_ITERATION, None, line)) + self.add(Unreachable()) + + def add_raise_exception_blocks_to_generator_class(self, line: int) -> None: + """ + Generates blocks to check if error flags are set while calling the helper method for + generator functions, and raises an exception if those flags are set. + """ + cls = self.fn_info.generator_class + assert cls.exc_regs is not None + exc_type, exc_val, exc_tb = cls.exc_regs + + # Check to see if an exception was raised. + error_block = BasicBlock() + ok_block = BasicBlock() + comparison = self.builder.binary_op(exc_type, self.builder.none_object(), 'is not', line) + self.builder.add_bool_branch(comparison, error_block, ok_block) + + self.builder.activate_block(error_block) + self.primitive_op(raise_exception_with_tb_op, [exc_type, exc_val, exc_tb], line) + self.add(Unreachable()) + self.builder.goto_and_activate(ok_block) + + def add_helper_to_generator_class(self, + blocks: List[BasicBlock], + sig: FuncSignature, + env: Environment, + fn_info: FuncInfo) -> FuncDecl: + """Generates a helper method for a generator class, called by '__next__' and 'throw'.""" + sig = FuncSignature((RuntimeArg(SELF_NAME, object_rprimitive), + RuntimeArg('type', object_rprimitive), + RuntimeArg('value', object_rprimitive), + RuntimeArg('traceback', object_rprimitive), + RuntimeArg('arg', object_rprimitive) + ), sig.ret_type) + helper_fn_decl = FuncDecl('__mypyc_generator_helper__', fn_info.generator_class.ir.name, + self.module_name, sig) + helper_fn_ir = FuncIR(helper_fn_decl, blocks, env, + fn_info.fitem.line, traceback_name=fn_info.fitem.name) + fn_info.generator_class.ir.methods['__mypyc_generator_helper__'] = helper_fn_ir + self.functions.append(helper_fn_ir) + return helper_fn_decl + + def add_iter_to_generator_class(self, fn_info: FuncInfo) -> None: + """Generates the '__iter__' method for a generator class.""" + self.enter(fn_info) + self_target = add_self_to_env(self.environment, fn_info.generator_class.ir) + self.add(Return(self.read(self_target, fn_info.fitem.line))) + blocks, env, _, fn_info = self.leave() + + # Next, add the actual function as a method of the generator class. + sig = FuncSignature((RuntimeArg(SELF_NAME, object_rprimitive),), object_rprimitive) + iter_fn_decl = FuncDecl('__iter__', fn_info.generator_class.ir.name, self.module_name, sig) + iter_fn_ir = FuncIR(iter_fn_decl, blocks, env) + fn_info.generator_class.ir.methods['__iter__'] = iter_fn_ir + self.functions.append(iter_fn_ir) + + def add_next_to_generator_class(self, + fn_info: FuncInfo, + fn_decl: FuncDecl, + sig: FuncSignature) -> None: + """Generates the '__next__' method for a generator class.""" + self.enter(fn_info) + self_reg = self.read(add_self_to_env(self.environment, fn_info.generator_class.ir)) + none_reg = self.builder.none_object() + + # Call the helper function with error flags set to Py_None, and return that result. + result = self.add(Call(fn_decl, [self_reg, none_reg, none_reg, none_reg, none_reg], + fn_info.fitem.line)) + self.add(Return(result)) + blocks, env, _, fn_info = self.leave() + + sig = FuncSignature((RuntimeArg(SELF_NAME, object_rprimitive),), sig.ret_type) + next_fn_decl = FuncDecl('__next__', fn_info.generator_class.ir.name, self.module_name, sig) + next_fn_ir = FuncIR(next_fn_decl, blocks, env) + fn_info.generator_class.ir.methods['__next__'] = next_fn_ir + self.functions.append(next_fn_ir) + + def add_send_to_generator_class(self, + fn_info: FuncInfo, + fn_decl: FuncDecl, + sig: FuncSignature) -> None: + """Generates the 'send' method for a generator class.""" + # FIXME: this is basically the same as add_next... + self.enter(fn_info) + self_reg = self.read(add_self_to_env(self.environment, fn_info.generator_class.ir)) + arg = self.environment.add_local_reg(Var('arg'), object_rprimitive, True) + none_reg = self.builder.none_object() + + # Call the helper function with error flags set to Py_None, and return that result. + result = self.add(Call(fn_decl, [self_reg, none_reg, none_reg, none_reg, self.read(arg)], + fn_info.fitem.line)) + self.add(Return(result)) + blocks, env, _, fn_info = self.leave() + + sig = FuncSignature((RuntimeArg(SELF_NAME, object_rprimitive), + RuntimeArg('arg', object_rprimitive),), sig.ret_type) + next_fn_decl = FuncDecl('send', fn_info.generator_class.ir.name, self.module_name, sig) + next_fn_ir = FuncIR(next_fn_decl, blocks, env) + fn_info.generator_class.ir.methods['send'] = next_fn_ir + self.functions.append(next_fn_ir) + + def add_throw_to_generator_class(self, + fn_info: FuncInfo, + fn_decl: FuncDecl, + sig: FuncSignature) -> None: + """Generates the 'throw' method for a generator class.""" + self.enter(fn_info) + self_reg = self.read(add_self_to_env(self.environment, fn_info.generator_class.ir)) + + # Add the type, value, and traceback variables to the environment. + typ = self.environment.add_local_reg(Var('type'), object_rprimitive, True) + val = self.environment.add_local_reg(Var('value'), object_rprimitive, True) + tb = self.environment.add_local_reg(Var('traceback'), object_rprimitive, True) + + # Because the value and traceback arguments are optional and hence can be NULL if not + # passed in, we have to assign them Py_None if they are not passed in. + none_reg = self.builder.none_object() + self.builder.assign_if_null(val, lambda: none_reg, self.fn_info.fitem.line) + self.builder.assign_if_null(tb, lambda: none_reg, self.fn_info.fitem.line) + + # Call the helper function using the arguments passed in, and return that result. + result = self.add(Call(fn_decl, + [self_reg, self.read(typ), self.read(val), self.read(tb), none_reg], + fn_info.fitem.line)) + self.add(Return(result)) + blocks, env, _, fn_info = self.leave() + + # Create the FuncSignature for the throw function. NOte that the value and traceback fields + # are optional, and are assigned to if they are not passed in inside the body of the throw + # function. + sig = FuncSignature((RuntimeArg(SELF_NAME, object_rprimitive), + RuntimeArg('type', object_rprimitive), + RuntimeArg('value', object_rprimitive, ARG_OPT), + RuntimeArg('traceback', object_rprimitive, ARG_OPT)), + sig.ret_type) + + throw_fn_decl = FuncDecl('throw', fn_info.generator_class.ir.name, self.module_name, sig) + throw_fn_ir = FuncIR(throw_fn_decl, blocks, env) + fn_info.generator_class.ir.methods['throw'] = throw_fn_ir + self.functions.append(throw_fn_ir) + + def add_close_to_generator_class(self, fn_info: FuncInfo) -> None: + """Generates the '__close__' method for a generator class.""" + # TODO: Currently this method just triggers a runtime error, + # we should fill this out eventually. + self.enter(fn_info) + add_self_to_env(self.environment, fn_info.generator_class.ir) + self.add(RaiseStandardError(RaiseStandardError.RUNTIME_ERROR, + 'close method on generator classes uimplemented', + fn_info.fitem.line)) + self.add(Unreachable()) + blocks, env, _, fn_info = self.leave() + + # Next, add the actual function as a method of the generator class. + sig = FuncSignature((RuntimeArg(SELF_NAME, object_rprimitive),), object_rprimitive) + close_fn_decl = FuncDecl('close', fn_info.generator_class.ir.name, self.module_name, sig) + close_fn_ir = FuncIR(close_fn_decl, blocks, env) + fn_info.generator_class.ir.methods['close'] = close_fn_ir + self.functions.append(close_fn_ir) + + def add_await_to_generator_class(self, fn_info: FuncInfo) -> None: + """Generates the '__await__' method for a generator class.""" + self.enter(fn_info) + self_target = add_self_to_env(self.environment, fn_info.generator_class.ir) + self.add(Return(self.read(self_target, fn_info.fitem.line))) + blocks, env, _, fn_info = self.leave() + + # Next, add the actual function as a method of the generator class. + sig = FuncSignature((RuntimeArg(SELF_NAME, object_rprimitive),), object_rprimitive) + await_fn_decl = FuncDecl('__await__', fn_info.generator_class.ir.name, + self.module_name, sig) + await_fn_ir = FuncIR(await_fn_decl, blocks, env) + fn_info.generator_class.ir.methods['__await__'] = await_fn_ir + self.functions.append(await_fn_ir) + + def setup_env_for_generator_class(self) -> None: + """Populates the environment for a generator class.""" + fitem = self.fn_info.fitem + cls = self.fn_info.generator_class + self_target = add_self_to_env(self.environment, cls.ir) + + # Add the type, value, and traceback variables to the environment. + exc_type = self.environment.add_local(Var('type'), object_rprimitive, is_arg=True) + exc_val = self.environment.add_local(Var('value'), object_rprimitive, is_arg=True) + exc_tb = self.environment.add_local(Var('traceback'), object_rprimitive, is_arg=True) + # TODO: Use the right type here instead of object? + exc_arg = self.environment.add_local(Var('arg'), object_rprimitive, is_arg=True) + + cls.exc_regs = (exc_type, exc_val, exc_tb) + cls.send_arg_reg = exc_arg + + cls.self_reg = self.read(self_target, fitem.line) + cls.curr_env_reg = self.load_outer_env(cls.self_reg, self.environment) + + # Define a variable representing the label to go to the next time the '__next__' function + # of the generator is called, and add it as an attribute to the environment class. + cls.next_label_target = self.builder.add_var_to_env_class( + Var(NEXT_LABEL_ATTR_NAME), + int_rprimitive, + cls, + reassign=False + ) + + # Add arguments from the original generator function to the generator class' environment. + self.add_args_to_env(local=False, base=cls, reassign=False) + + # Set the next label register for the generator class. + cls.next_label_reg = self.read(cls.next_label_target, fitem.line) + + def setup_func_for_recursive_call(self, fdef: FuncDef, base: ImplicitClass) -> None: + """ + Adds the instance of the callable class representing the given FuncDef to a register in the + environment so that the function can be called recursively. Note that this needs to be done + only for nested functions. + """ + # First, set the attribute of the environment class so that GetAttr can be called on it. + prev_env = self.builder.fn_infos[-2].env_class + prev_env.attributes[fdef.name] = self.builder.type_to_rtype(fdef.type) + + if isinstance(base, GeneratorClass): + # If we are dealing with a generator class, then we need to first get the register + # holding the current environment class, and load the previous environment class from + # there. + prev_env_reg = self.add(GetAttr(base.curr_env_reg, ENV_ATTR_NAME, -1)) + else: + prev_env_reg = base.prev_env_reg + + # Obtain the instance of the callable class representing the FuncDef, and add it to the + # current environment. + val = self.add(GetAttr(prev_env_reg, fdef.name, -1)) + target = self.environment.add_local_reg(fdef, object_rprimitive) + self.assign(target, val, -1) + + def gen_func_ns(self) -> str: + """Generates a namespace for a nested function using its outer function names.""" + return '_'.join(info.name + ('' if not info.class_name else '_' + info.class_name) + for info in self.builder.fn_infos + if info.name and info.name != '') + + def emit_yield(self, val: Value, line: int) -> Value: + retval = self.builder.coerce(val, self.builder.ret_types[-1], line) + + cls = self.fn_info.generator_class + # Create a new block for the instructions immediately following the yield expression, and + # set the next label so that the next time '__next__' is called on the generator object, + # the function continues at the new block. + next_block = BasicBlock() + next_label = len(cls.blocks) + cls.blocks.append(next_block) + self.assign(cls.next_label_target, self.add(LoadInt(next_label)), line) + self.add(Return(retval)) + self.builder.activate_block(next_block) + + self.add_raise_exception_blocks_to_generator_class(line) + + assert cls.send_arg_reg is not None + return cls.send_arg_reg + + def handle_yield_from_and_await(self, o: Union[YieldFromExpr, AwaitExpr]) -> Value: + # This is basically an implementation of the code in PEP 380. + + # TODO: do we want to use the right types here? + result = self.builder.alloc_temp(object_rprimitive) + to_yield_reg = self.builder.alloc_temp(object_rprimitive) + received_reg = self.builder.alloc_temp(object_rprimitive) + + if isinstance(o, YieldFromExpr): + iter_val = self.primitive_op(iter_op, [self.builder.accept(o.expr)], o.line) + else: + iter_val = self.primitive_op(coro_op, [self.builder.accept(o.expr)], o.line) + + iter_reg = self.builder.maybe_spill_assignable(iter_val) + + stop_block, main_block, done_block = BasicBlock(), BasicBlock(), BasicBlock() + _y_init = self.primitive_op(next_raw_op, [self.read(iter_reg)], o.line) + self.add(Branch(_y_init, stop_block, main_block, Branch.IS_ERROR)) + + # Try extracting a return value from a StopIteration and return it. + # If it wasn't, this reraises the exception. + self.builder.activate_block(stop_block) + self.assign(result, self.primitive_op(check_stop_op, [], o.line), o.line) + self.builder.goto(done_block) + + self.builder.activate_block(main_block) + self.assign(to_yield_reg, _y_init, o.line) + + # OK Now the main loop! + loop_block = BasicBlock() + self.builder.goto_and_activate(loop_block) + + def try_body() -> None: + self.assign(received_reg, self.emit_yield(self.read(to_yield_reg), o.line), o.line) + + def except_body() -> None: + # The body of the except is all implemented in a C function to + # reduce how much code we need to generate. It returns a value + # indicating whether to break or yield (or raise an exception). + res = self.primitive_op(yield_from_except_op, [self.read(iter_reg)], o.line) + to_stop = self.add(TupleGet(res, 0, o.line)) + val = self.add(TupleGet(res, 1, o.line)) + + ok, stop = BasicBlock(), BasicBlock() + self.add(Branch(to_stop, stop, ok, Branch.BOOL_EXPR)) + + # The exception got swallowed. Continue, yielding the returned value + self.builder.activate_block(ok) + self.assign(to_yield_reg, val, o.line) + self.builder.nonlocal_control[-1].gen_continue(self.builder, o.line) + + # The exception was a StopIteration. Stop iterating. + self.builder.activate_block(stop) + self.assign(result, val, o.line) + self.builder.nonlocal_control[-1].gen_break(self.builder, o.line) + + def else_body() -> None: + # Do a next() or a .send(). It will return NULL on exception + # but it won't automatically propagate. + _y = self.primitive_op(send_op, [self.read(iter_reg), self.read(received_reg)], o.line) + ok, stop = BasicBlock(), BasicBlock() + self.add(Branch(_y, stop, ok, Branch.IS_ERROR)) + + # Everything's fine. Yield it. + self.builder.activate_block(ok) + self.assign(to_yield_reg, _y, o.line) + self.builder.nonlocal_control[-1].gen_continue(self.builder, o.line) + + # Try extracting a return value from a StopIteration and return it. + # If it wasn't, this rereaises the exception. + self.builder.activate_block(stop) + self.assign(result, self.primitive_op(check_stop_op, [], o.line), o.line) + self.builder.nonlocal_control[-1].gen_break(self.builder, o.line) + + self.builder.push_loop_stack(loop_block, done_block) + self.builder.visit_try_except(try_body, [(None, None, except_body)], else_body, o.line) + self.builder.pop_loop_stack() + + self.builder.goto_and_activate(done_block) + return self.read(result) + + def load_decorated_func(self, fdef: FuncDef, orig_func_reg: Value) -> Value: + """ + Given a decorated FuncDef and the register containing an instance of the callable class + representing that FuncDef, applies the corresponding decorator functions on that decorated + FuncDef and returns a register containing an instance of the callable class representing + the decorated function. + """ + if not self.is_decorated(fdef): + # If there are no decorators associated with the function, then just return the + # original function. + return orig_func_reg + + decorators = self.builder.fdefs_to_decorators[fdef] + func_reg = orig_func_reg + for d in reversed(decorators): + decorator = d.accept(self.builder) + assert isinstance(decorator, Value) + func_reg = self.builder.py_call(decorator, [func_reg], func_reg.line) + return func_reg + + def is_decorated(self, fdef: FuncDef) -> bool: + return fdef in self.builder.fdefs_to_decorators + + def gen_glue(self, sig: FuncSignature, target: FuncIR, + cls: ClassIR, base: ClassIR, fdef: FuncItem, + *, + do_py_ops: bool = False + ) -> FuncIR: + """Generate glue methods that mediate between different method types in subclasses. + + Works on both properties and methods. See gen_glue_methods below for more details. + + If do_py_ops is True, then the glue methods should use generic + C API operations instead of direct calls, to enable generating + "shadow" glue methods that work with interpreted subclasses. + """ + if fdef.is_property: + return self.gen_glue_property(sig, target, cls, base, fdef.line, do_py_ops) + else: + return self.gen_glue_method(sig, target, cls, base, fdef.line, do_py_ops) + + def gen_glue_method(self, sig: FuncSignature, target: FuncIR, + cls: ClassIR, base: ClassIR, line: int, + do_pycall: bool, + ) -> FuncIR: + """Generate glue methods that mediate between different method types in subclasses. + + For example, if we have: + + class A: + def f(self, x: int) -> object: ... + + then it is totally permissible to have a subclass + + class B(A): + def f(self, x: object) -> int: ... + + since '(object) -> int' is a subtype of '(int) -> object' by the usual + contra/co-variant function subtyping rules. + + The trickiness here is that int and object have different + runtime representations in mypyc, so A.f and B.f have + different signatures at the native C level. To deal with this, + we need to generate glue methods that mediate between the + different versions by coercing the arguments and return + values. + + If do_pycall is True, then make the call using the C API + instead of a native call. + """ + self.enter() + self.builder.ret_types[-1] = sig.ret_type + + rt_args = list(sig.args) + if target.decl.kind == FUNC_NORMAL: + rt_args[0] = RuntimeArg(sig.args[0].name, RInstance(cls)) + + # The environment operates on Vars, so we make some up + fake_vars = [(Var(arg.name), arg.type) for arg in rt_args] + args = [self.read(self.environment.add_local_reg(var, type, is_arg=True), line) + for var, type in fake_vars] + arg_names = [arg.name for arg in rt_args] + arg_kinds = [concrete_arg_kind(arg.kind) for arg in rt_args] + + if do_pycall: + retval = self.builder.py_method_call( + args[0], target.name, args[1:], line, arg_kinds[1:], arg_names[1:]) + else: + retval = self.builder.call(target.decl, args, arg_kinds, arg_names, line) + retval = self.builder.coerce(retval, sig.ret_type, line) + self.add(Return(retval)) + + blocks, env, ret_type, _ = self.leave() + return FuncIR( + FuncDecl(target.name + '__' + base.name + '_glue', + cls.name, self.module_name, + FuncSignature(rt_args, ret_type), + target.decl.kind), + blocks, env) + + def gen_glue_property(self, sig: FuncSignature, target: FuncIR, cls: ClassIR, base: ClassIR, + line: int, + do_pygetattr: bool) -> FuncIR: + """Generate glue methods for properties that mediate between different subclass types. + + Similarly to methods, properties of derived types can be covariantly subtyped. Thus, + properties also require glue. However, this only requires the return type to change. + Further, instead of a method call, an attribute get is performed. + + If do_pygetattr is True, then get the attribute using the C + API instead of a native call. + """ + self.enter() + + rt_arg = RuntimeArg(SELF_NAME, RInstance(cls)) + arg = self.read(add_self_to_env(self.environment, cls), line) + self.builder.ret_types[-1] = sig.ret_type + if do_pygetattr: + retval = self.builder.py_get_attr(arg, target.name, line) + else: + retval = self.add(GetAttr(arg, target.name, line)) + retbox = self.builder.coerce(retval, sig.ret_type, line) + self.add(Return(retbox)) + + blocks, env, return_type, _ = self.leave() + return FuncIR( + FuncDecl(target.name + '__' + base.name + '_glue', + cls.name, self.module_name, FuncSignature([rt_arg], return_type)), + blocks, env) + + def setup_callable_class(self) -> None: + """Generates a callable class representing a nested function or a function within a + non-extension class and sets up the 'self' variable for that class. + + This takes the most recently visited function and returns a ClassIR to represent that + function. Each callable class contains an environment attribute with points to another + ClassIR representing the environment class where some of its variables can be accessed. + Note that its '__call__' method is not yet implemented, and is implemented in the + add_call_to_callable_class function. + + Returns a newly constructed ClassIR representing the callable class for the nested + function. + """ + + # Check to see that the name has not already been taken. If so, rename the class. We allow + # multiple uses of the same function name because this is valid in if-else blocks. Example: + # if True: + # def foo(): ----> foo_obj() + # return True + # else: + # def foo(): ----> foo_obj_0() + # return False + name = base_name = '{}_obj'.format(self.fn_info.namespaced_name()) + count = 0 + while name in self.builder.callable_class_names: + name = base_name + '_' + str(count) + count += 1 + self.builder.callable_class_names.add(name) + + # Define the actual callable class ClassIR, and set its environment to point at the + # previously defined environment class. + callable_class_ir = ClassIR(name, self.module_name, is_generated=True) + + # The functools @wraps decorator attempts to call setattr on nested functions, so + # we create a dict for these nested functions. + # https://github.com/python/cpython/blob/3.7/Lib/functools.py#L58 + if self.fn_info.is_nested: + callable_class_ir.has_dict = True + + # If the enclosing class doesn't contain nested (which will happen if + # this is a toplevel lambda), don't set up an environment. + if self.builder.fn_infos[-2].contains_nested: + callable_class_ir.attributes[ENV_ATTR_NAME] = RInstance( + self.builder.fn_infos[-2].env_class + ) + callable_class_ir.mro = [callable_class_ir] + self.fn_info.callable_class = ImplicitClass(callable_class_ir) + self.builder.classes.append(callable_class_ir) + + # Add a 'self' variable to the callable class' environment, and store that variable in a + # register to be accessed later. + self_target = add_self_to_env(self.environment, callable_class_ir) + self.fn_info.callable_class.self_reg = self.read(self_target, self.fn_info.fitem.line) + + def add_call_to_callable_class(self, + blocks: List[BasicBlock], + sig: FuncSignature, + env: Environment, + fn_info: FuncInfo) -> FuncIR: + """Generates a '__call__' method for a callable class representing a nested function. + + This takes the blocks, signature, and environment associated with a function definition and + uses those to build the '__call__' method of a given callable class, used to represent that + function. Note that a 'self' parameter is added to its list of arguments, as the nested + function becomes a class method. + """ + sig = FuncSignature((RuntimeArg(SELF_NAME, object_rprimitive),) + sig.args, sig.ret_type) + call_fn_decl = FuncDecl('__call__', fn_info.callable_class.ir.name, self.module_name, sig) + call_fn_ir = FuncIR(call_fn_decl, blocks, env, + fn_info.fitem.line, traceback_name=fn_info.fitem.name) + fn_info.callable_class.ir.methods['__call__'] = call_fn_ir + return call_fn_ir + + def add_get_to_callable_class(self, fn_info: FuncInfo) -> None: + """Generates the '__get__' method for a callable class.""" + line = fn_info.fitem.line + self.enter(fn_info) + + vself = self.read(self.environment.add_local_reg(Var(SELF_NAME), object_rprimitive, True)) + instance = self.environment.add_local_reg(Var('instance'), object_rprimitive, True) + self.environment.add_local_reg(Var('owner'), object_rprimitive, True) + + # If accessed through the class, just return the callable + # object. If accessed through an object, create a new bound + # instance method object. + instance_block, class_block = BasicBlock(), BasicBlock() + comparison = self.builder.binary_op( + self.read(instance), self.builder.none_object(), 'is', line + ) + self.builder.add_bool_branch(comparison, class_block, instance_block) + + self.builder.activate_block(class_block) + self.add(Return(vself)) + + self.builder.activate_block(instance_block) + self.add(Return(self.primitive_op(method_new_op, [vself, self.read(instance)], line))) + + blocks, env, _, fn_info = self.leave() + + sig = FuncSignature((RuntimeArg(SELF_NAME, object_rprimitive), + RuntimeArg('instance', object_rprimitive), + RuntimeArg('owner', object_rprimitive)), + object_rprimitive) + get_fn_decl = FuncDecl('__get__', fn_info.callable_class.ir.name, self.module_name, sig) + get_fn_ir = FuncIR(get_fn_decl, blocks, env) + fn_info.callable_class.ir.methods['__get__'] = get_fn_ir + self.functions.append(get_fn_ir) + + def instantiate_callable_class(self, fn_info: FuncInfo) -> Value: + """ + Assigns a callable class to a register named after the given function definition. Note + that fn_info refers to the function being assigned, whereas self.fn_info refers to the + function encapsulating the function being turned into a callable class. + """ + fitem = fn_info.fitem + func_reg = self.add(Call(fn_info.callable_class.ir.ctor, [], fitem.line)) + + # Set the callable class' environment attribute to point at the environment class + # defined in the callable class' immediate outer scope. Note that there are three possible + # environment class registers we may use. If the encapsulating function is: + # - a generator function, then the callable class is instantiated from the generator class' + # __next__' function, and hence the generator class' environment register is used. + # - a nested function, then the callable class is instantiated from the current callable + # class' '__call__' function, and hence the callable class' environment register is used. + # - neither, then we use the environment register of the original function. + curr_env_reg = None + if self.fn_info.is_generator: + curr_env_reg = self.fn_info.generator_class.curr_env_reg + elif self.fn_info.is_nested: + curr_env_reg = self.fn_info.callable_class.curr_env_reg + elif self.fn_info.contains_nested: + curr_env_reg = self.fn_info.curr_env_reg + if curr_env_reg: + self.add(SetAttr(func_reg, ENV_ATTR_NAME, curr_env_reg, fitem.line)) + return func_reg + + def setup_env_class(self) -> ClassIR: + """Generates a class representing a function environment. + + Note that the variables in the function environment are not actually populated here. This + is because when the environment class is generated, the function environment has not yet + been visited. This behavior is allowed so that when the compiler visits nested functions, + it can use the returned ClassIR instance to figure out free variables it needs to access. + The remaining attributes of the environment class are populated when the environment + registers are loaded. + + Returns a ClassIR representing an environment for a function containing a nested function. + """ + env_class = ClassIR('{}_env'.format(self.fn_info.namespaced_name()), + self.module_name, is_generated=True) + env_class.attributes[SELF_NAME] = RInstance(env_class) + if self.fn_info.is_nested: + # If the function is nested, its environment class must contain an environment + # attribute pointing to its encapsulating functions' environment class. + env_class.attributes[ENV_ATTR_NAME] = RInstance(self.builder.fn_infos[-2].env_class) + env_class.mro = [env_class] + self.fn_info.env_class = env_class + self.builder.classes.append(env_class) + return env_class + + def finalize_env_class(self) -> None: + """Generates, instantiates, and sets up the environment of an environment class.""" + + self.instantiate_env_class() + + # Iterate through the function arguments and replace local definitions (using registers) + # that were previously added to the environment with references to the function's + # environment class. + if self.fn_info.is_nested: + self.add_args_to_env(local=False, base=self.fn_info.callable_class) + else: + self.add_args_to_env(local=False, base=self.fn_info) + + def instantiate_env_class(self) -> Value: + """Assigns an environment class to a register named after the given function definition.""" + curr_env_reg = self.add(Call(self.fn_info.env_class.ctor, [], self.fn_info.fitem.line)) + + if self.fn_info.is_nested: + self.fn_info.callable_class._curr_env_reg = curr_env_reg + self.add(SetAttr(curr_env_reg, + ENV_ATTR_NAME, + self.fn_info.callable_class.prev_env_reg, + self.fn_info.fitem.line)) + else: + self.fn_info._curr_env_reg = curr_env_reg + + return curr_env_reg + + def load_env_registers(self) -> None: + """Loads the registers for the current FuncItem being visited. + + Adds the arguments of the FuncItem to the environment. If the FuncItem is nested inside of + another function, then this also loads all of the outer environments of the FuncItem into + registers so that they can be used when accessing free variables. + """ + self.add_args_to_env(local=True) + + fn_info = self.fn_info + fitem = fn_info.fitem + if fn_info.is_nested: + self.load_outer_envs(fn_info.callable_class) + # If this is a FuncDef, then make sure to load the FuncDef into its own environment + # class so that the function can be called recursively. + if isinstance(fitem, FuncDef): + self.setup_func_for_recursive_call(fitem, fn_info.callable_class) + + def load_outer_env(self, base: Value, outer_env: Environment) -> Value: + """Loads the environment class for a given base into a register. + + Additionally, iterates through all of the SymbolNode and AssignmentTarget instances of the + environment at the given index's symtable, and adds those instances to the environment of + the current environment. This is done so that the current environment can access outer + environment variables without having to reload all of the environment registers. + + Returns the register where the environment class was loaded. + """ + env = self.add(GetAttr(base, ENV_ATTR_NAME, self.fn_info.fitem.line)) + assert isinstance(env.type, RInstance), '{} must be of type RInstance'.format(env) + + for symbol, target in outer_env.symtable.items(): + env.type.class_ir.attributes[symbol.name] = target.type + symbol_target = AssignmentTargetAttr(env, symbol.name) + self.environment.add_target(symbol, symbol_target) + + return env + + def load_outer_envs(self, base: ImplicitClass) -> None: + index = len(self.environments) - 2 + + # Load the first outer environment. This one is special because it gets saved in the + # FuncInfo instance's prev_env_reg field. + if index > 1: + # outer_env = self.fn_infos[index].environment + outer_env = self.environments[index] + if isinstance(base, GeneratorClass): + base.prev_env_reg = self.load_outer_env(base.curr_env_reg, outer_env) + else: + base.prev_env_reg = self.load_outer_env(base.self_reg, outer_env) + env_reg = base.prev_env_reg + index -= 1 + + # Load the remaining outer environments into registers. + while index > 1: + # outer_env = self.fn_infos[index].environment + outer_env = self.environments[index] + env_reg = self.load_outer_env(env_reg, outer_env) + index -= 1 + + def add_args_to_env(self, + local: bool = True, + base: Optional[Union[FuncInfo, ImplicitClass]] = None, + reassign: bool = True) -> None: + fn_info = self.fn_info + if local: + for arg in fn_info.fitem.arguments: + rtype = self.builder.type_to_rtype(arg.variable.type) + self.environment.add_local_reg(arg.variable, rtype, is_arg=True) + else: + for arg in fn_info.fitem.arguments: + if self.is_free_variable(arg.variable) or fn_info.is_generator: + rtype = self.builder.type_to_rtype(arg.variable.type) + assert base is not None, 'base cannot be None for adding nonlocal args' + self.builder.add_var_to_env_class(arg.variable, rtype, base, reassign=reassign) + + def is_free_variable(self, symbol: SymbolNode) -> bool: + fitem = self.fn_info.fitem + return ( + fitem in self.builder.free_variables + and symbol in self.builder.free_variables[fitem] + ) + + def get_func_target(self, fdef: FuncDef) -> AssignmentTarget: + """ + Given a FuncDef, return the target associated the instance of its callable class. If the + function was not already defined somewhere, then define it and add it to the current + environment. + """ + if fdef.original_def: + # Get the target associated with the previously defined FuncDef. + return self.environment.lookup(fdef.original_def) + + if self.fn_info.is_generator or self.fn_info.contains_nested: + return self.environment.lookup(fdef) + + return self.environment.add_local_reg(fdef, object_rprimitive) + + # Helpers + + @property + def fn_info(self) -> FuncInfo: + return self.builder.fn_info + + @property + def environment(self) -> Environment: + return self.builder.environment + + def add(self, op: Op) -> Value: + return self.builder.add(op) + + def enter(self, fn_info: Union[FuncInfo, str] = '') -> None: + self.builder.enter(fn_info) + + def leave(self) -> Tuple[List[BasicBlock], Environment, RType, FuncInfo]: + return self.builder.leave() + + def assign(self, + target: Union[Register, AssignmentTarget], + rvalue_reg: Value, + line: int) -> None: + return self.builder.assign(target, rvalue_reg, line) + + def primitive_op(self, desc: OpDescription, args: List[Value], line: int) -> Value: + return self.builder.primitive_op(desc, args, line) + + def read(self, target: Union[Value, AssignmentTarget], line: int = -1) -> Value: + return self.builder.read(target, line) diff --git a/mypyc/genops.py b/mypyc/genops.py index 970537bb0f1a..f2e84af86408 100644 --- a/mypyc/genops.py +++ b/mypyc/genops.py @@ -26,7 +26,7 @@ def f(x: int) -> int: from mypy.build import Graph from mypy.nodes import ( - MypyFile, SymbolNode, Statement, FuncItem, FuncDef, ReturnStmt, AssignmentStmt, OpExpr, + MypyFile, SymbolNode, Statement, FuncDef, ReturnStmt, AssignmentStmt, OpExpr, IntExpr, NameExpr, LDEF, Var, IfStmt, UnaryExpr, ComparisonExpr, WhileStmt, CallExpr, IndexExpr, Block, Expression, ListExpr, ExpressionStmt, MemberExpr, ForStmt, RefExpr, Lvalue, BreakStmt, ContinueStmt, ConditionalExpr, OperatorAssignmentStmt, TupleExpr, ClassDef, @@ -37,7 +37,7 @@ def f(x: int) -> int: NamedTupleExpr, NewTypeExpr, NonlocalDecl, OverloadedFuncDef, PrintStmt, RaiseStmt, RevealExpr, SetExpr, SliceExpr, StarExpr, SuperExpr, TryStmt, TypeAliasExpr, TypeApplication, TypeVarExpr, TypedDictExpr, UnicodeExpr, WithStmt, YieldFromExpr, YieldExpr, GDEF, ARG_POS, - ARG_OPT, ARG_NAMED, ARG_STAR, ARG_STAR2, op_methods + ARG_NAMED, ARG_STAR, ARG_STAR2, op_methods ) from mypy.types import ( Type, Instance, CallableType, NoneTyp, TupleType, UnionType, AnyType, TypeVarType, PartialType, @@ -51,15 +51,14 @@ def f(x: int) -> int: from mypy.util import split_target from mypyc.common import ( - ENV_ATTR_NAME, NEXT_LABEL_ATTR_NAME, TEMP_ATTR_NAME, LAMBDA_NAME, - MAX_LITERAL_SHORT_INT, TOP_LEVEL_NAME, SELF_NAME, decorator_helper_name, + TEMP_ATTR_NAME, MAX_LITERAL_SHORT_INT, TOP_LEVEL_NAME, FAST_ISINSTANCE_MAX_SUBCLASSES, PROPSET_PREFIX ) from mypyc.prebuildvisitor import PreBuildVisitor from mypyc.ops import ( BasicBlock, AssignmentTarget, AssignmentTargetRegister, AssignmentTargetIndex, AssignmentTargetAttr, AssignmentTargetTuple, Environment, Op, LoadInt, RType, Value, Register, - Return, FuncIR, Assign, Branch, Goto, RuntimeArg, Call, Box, Unbox, Cast, RTuple, Unreachable, + FuncIR, Assign, Branch, Goto, RuntimeArg, Call, Box, Unbox, Cast, RTuple, Unreachable, TupleGet, TupleSet, ClassIR, NonExtClassInfo, RInstance, ModuleIR, ModuleIRs, GetAttr, SetAttr, LoadStatic, InitStatic, MethodCall, INVALID_FUNC_DEF, int_rprimitive, float_rprimitive, bool_rprimitive, list_rprimitive, is_list_rprimitive, dict_rprimitive, set_rprimitive, @@ -82,15 +81,14 @@ def f(x: int) -> int: ) from mypyc.ops_set import new_set_op, set_add_op, set_update_op from mypyc.ops_misc import ( - none_op, none_object_op, true_op, false_op, iter_op, next_op, next_raw_op, - check_stop_op, send_op, yield_from_except_op, coro_op, + none_op, none_object_op, true_op, false_op, iter_op, next_op, py_getattr_op, py_setattr_op, py_delattr_op, py_call_op, py_call_with_kwargs_op, py_method_call_op, fast_isinstance_op, bool_op, new_slice_op, type_op, import_op, - get_module_dict_op, ellipsis_op, method_new_op, type_is_op, + get_module_dict_op, ellipsis_op, type_is_op, ) from mypyc.ops_exc import ( - raise_exception_op, raise_exception_with_tb_op, reraise_exception_op, + raise_exception_op, reraise_exception_op, error_catch_op, restore_exc_info_op, exc_matches_op, get_exc_value_op, get_exc_info_op, keep_propagating_op ) @@ -106,10 +104,11 @@ def f(x: int) -> int: FinallyNonlocalControl, TryFinallyNonlocalControl, GeneratorNonlocalControl ) from mypyc.genopsutil import ( - is_dataclass, get_func_def, concrete_arg_kind, get_mypyc_attrs, is_extension_class, - is_trait + is_dataclass, get_func_def, get_mypyc_attrs, is_extension_class, is_trait ) from mypyc.genclass import BuildClassIR +from mypyc.genfunc import BuildFuncIR +from mypyc.genopscontext import FuncInfo, ImplicitClass GenFunc = Callable[[], None] DictEntry = Tuple[Optional[Value], Value] @@ -650,167 +649,6 @@ def prepare_non_ext_class_def(path: str, module_name: str, cdef: ClassDef, "Non-extension classes may not inherit from extension classes", path, cdef.line) -class FuncInfo(object): - """Contains information about functions as they are generated.""" - def __init__(self, - fitem: FuncItem = INVALID_FUNC_DEF, - name: str = '', - class_name: Optional[str] = None, - namespace: str = '', - is_nested: bool = False, - contains_nested: bool = False, - is_decorated: bool = False, - in_non_ext: bool = False) -> None: - self.fitem = fitem - self.name = name if not is_decorated else decorator_helper_name(name) - self.class_name = class_name - self.ns = namespace - # Callable classes implement the '__call__' method, and are used to represent functions - # that are nested inside of other functions. - self._callable_class = None # type: Optional[ImplicitClass] - # Environment classes are ClassIR instances that contain attributes representing the - # variables in the environment of the function they correspond to. Environment classes are - # generated for functions that contain nested functions. - self._env_class = None # type: Optional[ClassIR] - # Generator classes implement the '__next__' method, and are used to represent generators - # returned by generator functions. - self._generator_class = None # type: Optional[GeneratorClass] - # Environment class registers are the local registers associated with instances of an - # environment class, used for getting and setting attributes. curr_env_reg is the register - # associated with the current environment. - self._curr_env_reg = None # type: Optional[Value] - # These are flags denoting whether a given function is nested, contains a nested function, - # is decorated, or is within a non-extension class. - self.is_nested = is_nested - self.contains_nested = contains_nested - self.is_decorated = is_decorated - self.in_non_ext = in_non_ext - - # TODO: add field for ret_type: RType = none_rprimitive - - def namespaced_name(self) -> str: - return '_'.join(x for x in [self.name, self.class_name, self.ns] if x) - - @property - def is_generator(self) -> bool: - return self.fitem.is_generator or self.fitem.is_coroutine - - @property - def callable_class(self) -> 'ImplicitClass': - assert self._callable_class is not None - return self._callable_class - - @callable_class.setter - def callable_class(self, cls: 'ImplicitClass') -> None: - self._callable_class = cls - - @property - def env_class(self) -> ClassIR: - assert self._env_class is not None - return self._env_class - - @env_class.setter - def env_class(self, ir: ClassIR) -> None: - self._env_class = ir - - @property - def generator_class(self) -> 'GeneratorClass': - assert self._generator_class is not None - return self._generator_class - - @generator_class.setter - def generator_class(self, cls: 'GeneratorClass') -> None: - self._generator_class = cls - - @property - def curr_env_reg(self) -> Value: - assert self._curr_env_reg is not None - return self._curr_env_reg - - -class ImplicitClass(object): - """Contains information regarding classes that are generated as a result of nested functions or - generated functions, but not explicitly defined in the source code. - """ - def __init__(self, ir: ClassIR) -> None: - # The ClassIR instance associated with this class. - self.ir = ir - # The register associated with the 'self' instance for this generator class. - self._self_reg = None # type: Optional[Value] - # Environment class registers are the local registers associated with instances of an - # environment class, used for getting and setting attributes. curr_env_reg is the register - # associated with the current environment. prev_env_reg is the self.__mypyc_env__ field - # associated with the previous environment. - self._curr_env_reg = None # type: Optional[Value] - self._prev_env_reg = None # type: Optional[Value] - - @property - def self_reg(self) -> Value: - assert self._self_reg is not None - return self._self_reg - - @self_reg.setter - def self_reg(self, reg: Value) -> None: - self._self_reg = reg - - @property - def curr_env_reg(self) -> Value: - assert self._curr_env_reg is not None - return self._curr_env_reg - - @curr_env_reg.setter - def curr_env_reg(self, reg: Value) -> None: - self._curr_env_reg = reg - - @property - def prev_env_reg(self) -> Value: - assert self._prev_env_reg is not None - return self._prev_env_reg - - @prev_env_reg.setter - def prev_env_reg(self, reg: Value) -> None: - self._prev_env_reg = reg - - -class GeneratorClass(ImplicitClass): - def __init__(self, ir: ClassIR) -> None: - super().__init__(ir) - # This register holds the label number that the '__next__' function should go to the next - # time it is called. - self._next_label_reg = None # type: Optional[Value] - self._next_label_target = None # type: Optional[AssignmentTarget] - - # These registers hold the error values for the generator object for the case that the - # 'throw' function is called. - self.exc_regs = None # type: Optional[Tuple[Value, Value, Value]] - - # Holds the arg passed to send - self.send_arg_reg = None # type: Optional[Value] - - # The switch block is used to decide which instruction to go using the value held in the - # next-label register. - self.switch_block = BasicBlock() - self.blocks = [] # type: List[BasicBlock] - - @property - def next_label_reg(self) -> Value: - assert self._next_label_reg is not None - return self._next_label_reg - - @next_label_reg.setter - def next_label_reg(self, reg: Value) -> None: - self._next_label_reg = reg - - @property - def next_label_target(self) -> AssignmentTarget: - assert self._next_label_target is not None - return self._next_label_target - - @next_label_target.setter - def next_label_target(self, target: AssignmentTarget) -> None: - self._next_label_target = target - - # Infrastructure for special casing calls to builtin functions in a # programmatic way. Most special cases should be handled using the # data driven "primitive ops" system, but certain operations require @@ -934,120 +772,9 @@ def visit_mypy_file(self, mypyfile: MypyFile) -> None: traceback_name="") self.functions.append(func_ir) - def handle_ext_method(self, cdef: ClassDef, fdef: FuncDef) -> None: - # Perform the function of visit_method for methods inside extension classes. - name = fdef.name - class_ir = self.mapper.type_to_ir[cdef.info] - func_ir, func_reg = self.gen_func_item(fdef, name, self.mapper.fdef_to_sig(fdef), cdef) - self.functions.append(func_ir) - - if self.is_decorated(fdef): - # Obtain the the function name in order to construct the name of the helper function. - _, _, name = fdef.fullname.rpartition('.') - helper_name = decorator_helper_name(name) - # Read the PyTypeObject representing the class, get the callable object - # representing the non-decorated method - typ = self.load_native_type_object(cdef.fullname) - orig_func = self.py_get_attr(typ, helper_name, fdef.line) - - # Decorate the non-decorated method - decorated_func = self.load_decorated_func(fdef, orig_func) - - # Set the callable object representing the decorated method as an attribute of the - # extension class. - self.primitive_op(py_setattr_op, - [typ, self.load_static_unicode(name), decorated_func], fdef.line) - - if fdef.is_property: - # If there is a property setter, it will be processed after the getter, - # We populate the optional setter field with none for now. - assert name not in class_ir.properties - class_ir.properties[name] = (func_ir, None) - - elif fdef in self.prop_setters: - # The respective property getter must have been processed already - assert name in class_ir.properties - getter_ir, _ = class_ir.properties[name] - class_ir.properties[name] = (getter_ir, func_ir) - - class_ir.methods[func_ir.decl.name] = func_ir - - # If this overrides a parent class method with a different type, we need - # to generate a glue method to mediate between them. - for base in class_ir.mro[1:]: - if (name in base.method_decls and name != '__init__' - and not is_same_method_signature(class_ir.method_decls[name].sig, - base.method_decls[name].sig)): - - # TODO: Support contravariant subtyping in the input argument for - # property setters. Need to make a special glue method for handling this, - # similar to gen_glue_property. - - f = self.gen_glue(base.method_decls[name].sig, func_ir, class_ir, base, fdef) - class_ir.glue_methods[(base, name)] = f - self.functions.append(f) - - # If the class allows interpreted children, create glue - # methods that dispatch via the Python API. These will go in a - # "shadow vtable" that will be assigned to interpreted - # children. - if class_ir.allow_interpreted_subclasses: - f = self.gen_glue(func_ir.sig, func_ir, class_ir, class_ir, fdef, do_py_ops=True) - class_ir.glue_methods[(class_ir, name)] = f - self.functions.append(f) - - def handle_non_ext_method( - self, non_ext: NonExtClassInfo, cdef: ClassDef, fdef: FuncDef) -> None: - # Perform the function of visit_method for methods inside non-extension classes. - name = fdef.name - func_ir, func_reg = self.gen_func_item(fdef, name, self.mapper.fdef_to_sig(fdef), cdef) - assert func_reg is not None - self.functions.append(func_ir) - - if self.is_decorated(fdef): - # The undecorated method is a generated callable class - orig_func = func_reg - func_reg = self.load_decorated_func(fdef, orig_func) - - # TODO: Support property setters in non-extension classes - if fdef.is_property: - prop = self.load_module_attr_by_fullname('builtins.property', fdef.line) - func_reg = self.py_call(prop, [func_reg], fdef.line) - - elif self.mapper.func_to_decl[fdef].kind == FUNC_CLASSMETHOD: - cls_meth = self.load_module_attr_by_fullname('builtins.classmethod', fdef.line) - func_reg = self.py_call(cls_meth, [func_reg], fdef.line) - - elif self.mapper.func_to_decl[fdef].kind == FUNC_STATICMETHOD: - stat_meth = self.load_module_attr_by_fullname('builtins.staticmethod', fdef.line) - func_reg = self.py_call(stat_meth, [func_reg], fdef.line) - - self.add_to_non_ext_dict(non_ext, name, func_reg, fdef.line) - def visit_method( self, cdef: ClassDef, non_ext: Optional[NonExtClassInfo], fdef: FuncDef) -> None: - if non_ext: - self.handle_non_ext_method(non_ext, cdef, fdef) - else: - self.handle_ext_method(cdef, fdef) - - def is_constant(self, e: Expression) -> bool: - """Check whether we allow an expression to appear as a default value. - - We don't currently properly support storing the evaluated - values for default arguments and default attribute values, so - we restrict what expressions we allow. We allow literals of - primitives types, None, and references to Final global - variables. - """ - return (isinstance(e, (StrExpr, BytesExpr, IntExpr, FloatExpr)) - or (isinstance(e, UnaryExpr) and e.op == '-' - and isinstance(e.expr, (IntExpr, FloatExpr))) - or (isinstance(e, TupleExpr) - and all(self.is_constant(e) for e in e.items)) - or (isinstance(e, RefExpr) and e.kind == GDEF - and (e.fullname in ('builtins.True', 'builtins.False', 'builtins.None') - or (isinstance(e.node, Var) and e.node.is_final)))) + BuildFuncIR(self).visit_method(cdef, non_ext, fdef) def visit_class_def(self, cdef: ClassDef) -> None: BuildClassIR(self).visit_class_def(cdef) @@ -1139,113 +866,6 @@ def visit_import_all(self, node: ImportAll) -> None: return self.gen_import(node.id, node.line) - def gen_glue(self, sig: FuncSignature, target: FuncIR, - cls: ClassIR, base: ClassIR, fdef: FuncItem, - *, - do_py_ops: bool = False - ) -> FuncIR: - """Generate glue methods that mediate between different method types in subclasses. - - Works on both properties and methods. See gen_glue_methods below for more details. - - If do_py_ops is True, then the glue methods should use generic - C API operations instead of direct calls, to enable generating - "shadow" glue methods that work with interpreted subclasses. - """ - if fdef.is_property: - return self.gen_glue_property(sig, target, cls, base, fdef.line, do_py_ops) - else: - return self.gen_glue_method(sig, target, cls, base, fdef.line, do_py_ops) - - def gen_glue_method(self, sig: FuncSignature, target: FuncIR, - cls: ClassIR, base: ClassIR, line: int, - do_pycall: bool, - ) -> FuncIR: - """Generate glue methods that mediate between different method types in subclasses. - - For example, if we have: - - class A: - def f(self, x: int) -> object: ... - - then it is totally permissible to have a subclass - - class B(A): - def f(self, x: object) -> int: ... - - since '(object) -> int' is a subtype of '(int) -> object' by the usual - contra/co-variant function subtyping rules. - - The trickiness here is that int and object have different - runtime representations in mypyc, so A.f and B.f have - different signatures at the native C level. To deal with this, - we need to generate glue methods that mediate between the - different versions by coercing the arguments and return - values. - - If do_pycall is True, then make the call using the C API - instead of a native call. - """ - self.enter() - self.ret_types[-1] = sig.ret_type - - rt_args = list(sig.args) - if target.decl.kind == FUNC_NORMAL: - rt_args[0] = RuntimeArg(sig.args[0].name, RInstance(cls)) - - # The environment operates on Vars, so we make some up - fake_vars = [(Var(arg.name), arg.type) for arg in rt_args] - args = [self.read(self.environment.add_local_reg(var, type, is_arg=True), line) - for var, type in fake_vars] - arg_names = [arg.name for arg in rt_args] - arg_kinds = [concrete_arg_kind(arg.kind) for arg in rt_args] - - if do_pycall: - retval = self.py_method_call( - args[0], target.name, args[1:], line, arg_kinds[1:], arg_names[1:]) - else: - retval = self.call(target.decl, args, arg_kinds, arg_names, line) - retval = self.coerce(retval, sig.ret_type, line) - self.add(Return(retval)) - - blocks, env, ret_type, _ = self.leave() - return FuncIR( - FuncDecl(target.name + '__' + base.name + '_glue', - cls.name, self.module_name, - FuncSignature(rt_args, ret_type), - target.decl.kind), - blocks, env) - - def gen_glue_property(self, sig: FuncSignature, target: FuncIR, cls: ClassIR, base: ClassIR, - line: int, - do_pygetattr: bool) -> FuncIR: - """Generate glue methods for properties that mediate between different subclass types. - - Similarly to methods, properties of derived types can be covariantly subtyped. Thus, - properties also require glue. However, this only requires the return type to change. - Further, instead of a method call, an attribute get is performed. - - If do_pygetattr is True, then get the attribute using the C - API instead of a native call. - """ - self.enter() - - rt_arg = RuntimeArg(SELF_NAME, RInstance(cls)) - arg = self.read(self.add_self_to_env(cls), line) - self.ret_types[-1] = sig.ret_type - if do_pygetattr: - retval = self.py_get_attr(arg, target.name, line) - else: - retval = self.add(GetAttr(arg, target.name, line)) - retbox = self.coerce(retval, sig.ret_type, line) - self.add(Return(retbox)) - - blocks, env, return_type, _ = self.leave() - return FuncIR( - FuncDecl(target.name + '__' + base.name + '_glue', - cls.name, self.module_name, FuncSignature([rt_arg], return_type)), - blocks, env) - def assign_if_null(self, target: AssignmentTargetRegister, get_val: Callable[[], Value], line: int) -> None: """Generate blocks for registers that NULL values.""" @@ -1256,259 +876,6 @@ def assign_if_null(self, target: AssignmentTargetRegister, self.goto(body_block) self.activate_block(body_block) - def calculate_arg_defaults(self, - fn_info: FuncInfo, - env: Environment, - func_reg: Optional[Value]) -> None: - """Calculate default argument values and store them. - - They are stored in statics for top level functions and in - the function objects for nested functions (while constants are - still stored computed on demand). - """ - fitem = fn_info.fitem - for arg in fitem.arguments: - # Constant values don't get stored but just recomputed - if arg.initializer and not self.is_constant(arg.initializer): - value = self.coerce(self.accept(arg.initializer), - env.lookup(arg.variable).type, arg.line) - if not fn_info.is_nested: - name = fitem.fullname + '.' + arg.variable.name - self.add(InitStatic(value, name, self.module_name)) - else: - assert func_reg is not None - self.add(SetAttr(func_reg, arg.variable.name, value, arg.line)) - - def gen_arg_defaults(self) -> None: - """Generate blocks for arguments that have default values. - - If the passed value is an error value, then assign the default - value to the argument. - """ - fitem = self.fn_info.fitem - for arg in fitem.arguments: - if arg.initializer: - target = self.environment.lookup(arg.variable) - - def get_default() -> Value: - assert arg.initializer is not None - - # If it is constant, don't bother storing it - if self.is_constant(arg.initializer): - return self.accept(arg.initializer) - - # Because gen_arg_defaults runs before calculate_arg_defaults, we - # add the static/attribute to final_names/the class here. - elif not self.fn_info.is_nested: - name = fitem.fullname + '.' + arg.variable.name - self.final_names.append((name, target.type)) - return self.add(LoadStatic(target.type, name, self.module_name)) - else: - name = arg.variable.name - self.fn_info.callable_class.ir.attributes[name] = target.type - return self.add( - GetAttr(self.fn_info.callable_class.self_reg, name, arg.line)) - assert isinstance(target, AssignmentTargetRegister) - self.assign_if_null(target, - get_default, - arg.initializer.line) - - def gen_func_item(self, - fitem: FuncItem, - name: str, - sig: FuncSignature, - cdef: Optional[ClassDef] = None, - ) -> Tuple[FuncIR, Optional[Value]]: - # TODO: do something about abstract methods. - - """Generates and returns the FuncIR for a given FuncDef. - - If the given FuncItem is a nested function, then we generate a callable class representing - the function and use that instead of the actual function. if the given FuncItem contains a - nested function, then we generate an environment class so that inner nested functions can - access the environment of the given FuncDef. - - Consider the following nested function. - def a() -> None: - def b() -> None: - def c() -> None: - return None - return None - return None - - The classes generated would look something like the following. - - has pointer to +-------+ - +--------------------------> | a_env | - | +-------+ - | ^ - | | has pointer to - +-------+ associated with +-------+ - | b_obj | -------------------> | b_env | - +-------+ +-------+ - ^ - | - +-------+ has pointer to | - | c_obj | --------------------------+ - +-------+ - """ - - func_reg = None # type: Optional[Value] - - # We treat lambdas as always being nested because we always generate - # a class for lambdas, no matter where they are. (It would probably also - # work to special case toplevel lambdas and generate a non-class function.) - is_nested = fitem in self.nested_fitems or isinstance(fitem, LambdaExpr) - contains_nested = fitem in self.encapsulating_funcs.keys() - is_decorated = fitem in self.fdefs_to_decorators - in_non_ext = False - class_name = None - if cdef: - ir = self.mapper.type_to_ir[cdef.info] - in_non_ext = not ir.is_ext_class - class_name = cdef.name - - self.enter(FuncInfo(fitem, name, class_name, self.gen_func_ns(), - is_nested, contains_nested, is_decorated, in_non_ext)) - - # Functions that contain nested functions need an environment class to store variables that - # are free in their nested functions. Generator functions need an environment class to - # store a variable denoting the next instruction to be executed when the __next__ function - # is called, along with all the variables inside the function itself. - if self.fn_info.contains_nested or self.fn_info.is_generator: - self.setup_env_class() - - if self.fn_info.is_nested or self.fn_info.in_non_ext: - self.setup_callable_class() - - if self.fn_info.is_generator: - # Do a first-pass and generate a function that just returns a generator object. - self.gen_generator_func() - blocks, env, ret_type, fn_info = self.leave() - func_ir, func_reg = self.gen_func_ir(blocks, sig, env, fn_info, cdef) - - # Re-enter the FuncItem and visit the body of the function this time. - self.enter(fn_info) - self.setup_env_for_generator_class() - self.load_outer_envs(self.fn_info.generator_class) - if self.fn_info.is_nested and isinstance(fitem, FuncDef): - self.setup_func_for_recursive_call(fitem, self.fn_info.generator_class) - self.create_switch_for_generator_class() - self.add_raise_exception_blocks_to_generator_class(fitem.line) - else: - self.load_env_registers() - self.gen_arg_defaults() - - if self.fn_info.contains_nested and not self.fn_info.is_generator: - self.finalize_env_class() - - self.ret_types[-1] = sig.ret_type - - # Add all variables and functions that are declared/defined within this - # function and are referenced in functions nested within this one to this - # function's environment class so the nested functions can reference - # them even if they are declared after the nested function's definition. - # Note that this is done before visiting the body of this function. - - env_for_func = self.fn_info # type: Union[FuncInfo, ImplicitClass] - if self.fn_info.is_generator: - env_for_func = self.fn_info.generator_class - elif self.fn_info.is_nested or self.fn_info.in_non_ext: - env_for_func = self.fn_info.callable_class - - if self.fn_info.fitem in self.free_variables: - # Sort the variables to keep things deterministic - for var in sorted(self.free_variables[self.fn_info.fitem], key=lambda x: x.name): - if isinstance(var, Var): - rtype = self.type_to_rtype(var.type) - self.add_var_to_env_class(var, rtype, env_for_func, reassign=False) - - if self.fn_info.fitem in self.encapsulating_funcs: - for nested_fn in self.encapsulating_funcs[self.fn_info.fitem]: - if isinstance(nested_fn, FuncDef): - # The return type is 'object' instead of an RInstance of the - # callable class because differently defined functions with - # the same name and signature across conditional blocks - # will generate different callable classes, so the callable - # class that gets instantiated must be generic. - self.add_var_to_env_class(nested_fn, object_rprimitive, - env_for_func, reassign=False) - - self.accept(fitem.body) - self.maybe_add_implicit_return() - - if self.fn_info.is_generator: - self.populate_switch_for_generator_class() - - blocks, env, ret_type, fn_info = self.leave() - - if fn_info.is_generator: - helper_fn_decl = self.add_helper_to_generator_class(blocks, sig, env, fn_info) - self.add_next_to_generator_class(fn_info, helper_fn_decl, sig) - self.add_send_to_generator_class(fn_info, helper_fn_decl, sig) - self.add_iter_to_generator_class(fn_info) - self.add_throw_to_generator_class(fn_info, helper_fn_decl, sig) - self.add_close_to_generator_class(fn_info) - if fitem.is_coroutine: - self.add_await_to_generator_class(fn_info) - - else: - func_ir, func_reg = self.gen_func_ir(blocks, sig, env, fn_info, cdef) - - self.calculate_arg_defaults(fn_info, env, func_reg) - - return (func_ir, func_reg) - - def gen_func_ir(self, - blocks: List[BasicBlock], - sig: FuncSignature, - env: Environment, - fn_info: FuncInfo, - cdef: Optional[ClassDef]) -> Tuple[FuncIR, Optional[Value]]: - """Generates the FuncIR for a function given the blocks, environment, and function info of - a particular function and returns it. If the function is nested, also returns the register - containing the instance of the corresponding callable class. - """ - func_reg = None # type: Optional[Value] - if fn_info.is_nested or fn_info.in_non_ext: - func_ir = self.add_call_to_callable_class(blocks, sig, env, fn_info) - self.add_get_to_callable_class(fn_info) - func_reg = self.instantiate_callable_class(fn_info) - else: - assert isinstance(fn_info.fitem, FuncDef) - func_decl = self.mapper.func_to_decl[fn_info.fitem] - if fn_info.is_decorated: - class_name = None if cdef is None else cdef.name - func_decl = FuncDecl(fn_info.name, class_name, self.module_name, sig, - func_decl.kind, - func_decl.is_prop_getter, func_decl.is_prop_setter) - func_ir = FuncIR(func_decl, blocks, env, fn_info.fitem.line, - traceback_name=fn_info.fitem.name) - else: - func_ir = FuncIR(func_decl, blocks, env, - fn_info.fitem.line, traceback_name=fn_info.fitem.name) - return (func_ir, func_reg) - - def load_decorated_func(self, fdef: FuncDef, orig_func_reg: Value) -> Value: - """ - Given a decorated FuncDef and the register containing an instance of the callable class - representing that FuncDef, applies the corresponding decorator functions on that decorated - FuncDef and returns a register containing an instance of the callable class representing - the decorated function. - """ - if not self.is_decorated(fdef): - # If there are no decorators associated with the function, then just return the - # original function. - return orig_func_reg - - decorators = self.fdefs_to_decorators[fdef] - func_reg = orig_func_reg - for d in reversed(decorators): - decorator = d.accept(self) - assert isinstance(decorator, Value) - func_reg = self.py_call(decorator, [func_reg], func_reg.line) - return func_reg - def maybe_add_implicit_return(self) -> None: if is_none_rprimitive(self.ret_types[-1]) or is_object_rprimitive(self.ret_types[-1]): self.add_implicit_return() @@ -1516,18 +883,10 @@ def maybe_add_implicit_return(self) -> None: self.add_implicit_unreachable() def visit_func_def(self, fdef: FuncDef) -> None: - func_ir, func_reg = self.gen_func_item(fdef, fdef.name, self.mapper.fdef_to_sig(fdef)) - - # If the function that was visited was a nested function, then either look it up in our - # current environment or define it if it was not already defined. - if func_reg: - self.assign(self.get_func_target(fdef), func_reg, fdef.line) - self.functions.append(func_ir) + BuildFuncIR(self).visit_func_def(fdef) def visit_overloaded_func_def(self, o: OverloadedFuncDef) -> None: - # Handle regular overload case - assert o.impl - self.accept(o.impl) + BuildFuncIR(self).visit_overloaded_func_def(o) def add_implicit_return(self) -> None: block = self.blocks[-1][-1] @@ -2289,13 +1648,6 @@ def is_synthetic_type(self, typ: TypeInfo) -> bool: """Is a type something other than just a class we've created?""" return typ.is_named_tuple or typ.is_newtype or typ.typeddict_type is not None - def is_decorated(self, fdef: FuncDef) -> bool: - return fdef in self.fdefs_to_decorators - - def is_free_variable(self, symbol: SymbolNode) -> bool: - fitem = self.fn_info.fitem - return fitem in self.free_variables and symbol in self.free_variables[fitem] - def get_final_ref(self, expr: MemberExpr) -> Optional[Tuple[str, Var, bool]]: """Check if `expr` is a final attribute. @@ -3423,25 +2775,7 @@ def generate(i: int) -> None: generate(0) def visit_lambda_expr(self, expr: LambdaExpr) -> Value: - typ = get_proper_type(self.types[expr]) - assert isinstance(typ, CallableType) - - runtime_args = [] - for arg, arg_type in zip(expr.arguments, typ.arg_types): - arg.variable.type = arg_type - runtime_args.append( - RuntimeArg(arg.variable.name, self.type_to_rtype(arg_type), arg.kind)) - ret_type = self.type_to_rtype(typ.ret_type) - - fsig = FuncSignature(runtime_args, ret_type) - - fname = '{}{}'.format(LAMBDA_NAME, self.lambda_counter) - self.lambda_counter += 1 - func_ir, func_reg = self.gen_func_item(expr, fname, fsig) - assert func_reg is not None - - self.functions.append(func_ir) - return func_reg + return BuildFuncIR(self).visit_lambda_expr(expr) def visit_pass_stmt(self, o: PassStmt) -> None: pass @@ -3571,30 +2905,7 @@ def loop_contents( handle_loop(loop_params) def visit_decorator(self, dec: Decorator) -> None: - func_ir, func_reg = self.gen_func_item(dec.func, dec.func.name, - self.mapper.fdef_to_sig(dec.func)) - - if dec.func in self.nested_fitems: - assert func_reg is not None - decorated_func = self.load_decorated_func(dec.func, func_reg) - self.assign(self.get_func_target(dec.func), decorated_func, dec.func.line) - func_reg = decorated_func - else: - # Obtain the the function name in order to construct the name of the helper function. - name = dec.func.fullname.split('.')[-1] - helper_name = decorator_helper_name(name) - - # Load the callable object representing the non-decorated function, and decorate it. - orig_func = self.load_global_str(helper_name, dec.line) - decorated_func = self.load_decorated_func(dec.func, orig_func) - - # Set the callable object representing the decorated function as a global. - self.primitive_op(dict_set_item_op, - [self.load_globals_dict(), - self.load_static_unicode(dec.func.name), decorated_func], - decorated_func.line) - - self.functions.append(func_ir) + BuildFuncIR(self).visit_decorator(dec) def visit_del_stmt(self, o: DelStmt) -> None: self.visit_del_item(self.get_assignment_target(o.expr), o.line) @@ -3642,114 +2953,10 @@ def visit_super_expr(self, o: SuperExpr) -> Value: return self.py_get_attr(res, o.name, o.line) def visit_yield_expr(self, expr: YieldExpr) -> Value: - if expr.expr: - retval = self.accept(expr.expr) - else: - retval = self.none() - return self.emit_yield(retval, expr.line) - - def emit_yield(self, val: Value, line: int) -> Value: - retval = self.coerce(val, self.ret_types[-1], line) - - cls = self.fn_info.generator_class - # Create a new block for the instructions immediately following the yield expression, and - # set the next label so that the next time '__next__' is called on the generator object, - # the function continues at the new block. - next_block = BasicBlock() - next_label = len(cls.blocks) - cls.blocks.append(next_block) - self.assign(cls.next_label_target, self.add(LoadInt(next_label)), line) - self.add(Return(retval)) - self.activate_block(next_block) - - self.add_raise_exception_blocks_to_generator_class(line) - - assert cls.send_arg_reg is not None - return cls.send_arg_reg - - def handle_yield_from_and_await(self, o: Union[YieldFromExpr, AwaitExpr]) -> Value: - # This is basically an implementation of the code in PEP 380. - - # TODO: do we want to use the right types here? - result = self.alloc_temp(object_rprimitive) - to_yield_reg = self.alloc_temp(object_rprimitive) - received_reg = self.alloc_temp(object_rprimitive) - - if isinstance(o, YieldFromExpr): - iter_val = self.primitive_op(iter_op, [self.accept(o.expr)], o.line) - else: - iter_val = self.primitive_op(coro_op, [self.accept(o.expr)], o.line) - - iter_reg = self.maybe_spill_assignable(iter_val) - - stop_block, main_block, done_block = BasicBlock(), BasicBlock(), BasicBlock() - _y_init = self.primitive_op(next_raw_op, [self.read(iter_reg)], o.line) - self.add(Branch(_y_init, stop_block, main_block, Branch.IS_ERROR)) - - # Try extracting a return value from a StopIteration and return it. - # If it wasn't, this reraises the exception. - self.activate_block(stop_block) - self.assign(result, self.primitive_op(check_stop_op, [], o.line), o.line) - self.goto(done_block) - - self.activate_block(main_block) - self.assign(to_yield_reg, _y_init, o.line) - - # OK Now the main loop! - loop_block = BasicBlock() - self.goto_and_activate(loop_block) - - def try_body() -> None: - self.assign(received_reg, self.emit_yield(self.read(to_yield_reg), o.line), o.line) - - def except_body() -> None: - # The body of the except is all implemented in a C function to - # reduce how much code we need to generate. It returns a value - # indicating whether to break or yield (or raise an exception). - res = self.primitive_op(yield_from_except_op, [self.read(iter_reg)], o.line) - to_stop = self.add(TupleGet(res, 0, o.line)) - val = self.add(TupleGet(res, 1, o.line)) - - ok, stop = BasicBlock(), BasicBlock() - self.add(Branch(to_stop, stop, ok, Branch.BOOL_EXPR)) - - # The exception got swallowed. Continue, yielding the returned value - self.activate_block(ok) - self.assign(to_yield_reg, val, o.line) - self.nonlocal_control[-1].gen_continue(self, o.line) - - # The exception was a StopIteration. Stop iterating. - self.activate_block(stop) - self.assign(result, val, o.line) - self.nonlocal_control[-1].gen_break(self, o.line) - - def else_body() -> None: - # Do a next() or a .send(). It will return NULL on exception - # but it won't automatically propagate. - _y = self.primitive_op(send_op, [self.read(iter_reg), self.read(received_reg)], o.line) - ok, stop = BasicBlock(), BasicBlock() - self.add(Branch(_y, stop, ok, Branch.IS_ERROR)) - - # Everything's fine. Yield it. - self.activate_block(ok) - self.assign(to_yield_reg, _y, o.line) - self.nonlocal_control[-1].gen_continue(self, o.line) - - # Try extracting a return value from a StopIteration and return it. - # If it wasn't, this rereaises the exception. - self.activate_block(stop) - self.assign(result, self.primitive_op(check_stop_op, [], o.line), o.line) - self.nonlocal_control[-1].gen_break(self, o.line) - - self.push_loop_stack(loop_block, done_block) - self.visit_try_except(try_body, [(None, None, except_body)], else_body, o.line) - self.pop_loop_stack() - - self.goto_and_activate(done_block) - return self.read(result) + return BuildFuncIR(self).visit_yield_expr(expr) def visit_yield_from_expr(self, o: YieldFromExpr) -> Value: - return self.handle_yield_from_and_await(o) + return BuildFuncIR(self).visit_yield_from_expr(o) def visit_ellipsis(self, o: EllipsisExpr) -> Value: return self.primitive_op(ellipsis_op, [], o.line) @@ -3939,7 +3146,7 @@ def flatten_classes(self, arg: Union[RefExpr, TupleExpr]) -> Optional[List[Class return res def visit_await_expr(self, o: AwaitExpr) -> Value: - return self.handle_yield_from_and_await(o) + return BuildFuncIR(self).visit_await_expr(o) # Unimplemented constructs def visit_assignment_expr(self, o: AssignmentExpr) -> Value: @@ -4172,66 +3379,6 @@ def none(self) -> Value: def none_object(self) -> Value: return self.add(PrimitiveOp([], none_object_op, line=-1)) - def load_outer_env(self, base: Value, outer_env: Environment) -> Value: - """Loads the environment class for a given base into a register. - - Additionally, iterates through all of the SymbolNode and AssignmentTarget instances of the - environment at the given index's symtable, and adds those instances to the environment of - the current environment. This is done so that the current environment can access outer - environment variables without having to reload all of the environment registers. - - Returns the register where the environment class was loaded. - """ - env = self.add(GetAttr(base, ENV_ATTR_NAME, self.fn_info.fitem.line)) - assert isinstance(env.type, RInstance), '{} must be of type RInstance'.format(env) - - for symbol, target in outer_env.symtable.items(): - env.type.class_ir.attributes[symbol.name] = target.type - symbol_target = AssignmentTargetAttr(env, symbol.name) - self.environment.add_target(symbol, symbol_target) - - return env - - def load_outer_envs(self, base: ImplicitClass) -> None: - index = len(self.environments) - 2 - - # Load the first outer environment. This one is special because it gets saved in the - # FuncInfo instance's prev_env_reg field. - if index > 1: - # outer_env = self.fn_infos[index].environment - outer_env = self.environments[index] - if isinstance(base, GeneratorClass): - base.prev_env_reg = self.load_outer_env(base.curr_env_reg, outer_env) - else: - base.prev_env_reg = self.load_outer_env(base.self_reg, outer_env) - env_reg = base.prev_env_reg - index -= 1 - - # Load the remaining outer environments into registers. - while index > 1: - # outer_env = self.fn_infos[index].environment - outer_env = self.environments[index] - env_reg = self.load_outer_env(env_reg, outer_env) - index -= 1 - - def load_env_registers(self) -> None: - """Loads the registers for the current FuncItem being visited. - - Adds the arguments of the FuncItem to the environment. If the FuncItem is nested inside of - another function, then this also loads all of the outer environments of the FuncItem into - registers so that they can be used when accessing free variables. - """ - self.add_args_to_env(local=True) - - fn_info = self.fn_info - fitem = fn_info.fitem - if fn_info.is_nested: - self.load_outer_envs(fn_info.callable_class) - # If this is a FuncDef, then make sure to load the FuncDef into its own environment - # class so that the function can be called recursively. - if isinstance(fitem, FuncDef): - self.setup_func_for_recursive_call(fitem, fn_info.callable_class) - def add_var_to_env_class(self, var: SymbolNode, rtype: RType, @@ -4252,504 +3399,6 @@ def add_var_to_env_class(self, # the environment class. return self.environment.add_target(var, attr_target) - def setup_func_for_recursive_call(self, fdef: FuncDef, base: ImplicitClass) -> None: - """ - Adds the instance of the callable class representing the given FuncDef to a register in the - environment so that the function can be called recursively. Note that this needs to be done - only for nested functions. - """ - # First, set the attribute of the environment class so that GetAttr can be called on it. - prev_env = self.fn_infos[-2].env_class - prev_env.attributes[fdef.name] = self.type_to_rtype(fdef.type) - - if isinstance(base, GeneratorClass): - # If we are dealing with a generator class, then we need to first get the register - # holding the current environment class, and load the previous environment class from - # there. - prev_env_reg = self.add(GetAttr(base.curr_env_reg, ENV_ATTR_NAME, -1)) - else: - prev_env_reg = base.prev_env_reg - - # Obtain the instance of the callable class representing the FuncDef, and add it to the - # current environment. - val = self.add(GetAttr(prev_env_reg, fdef.name, -1)) - target = self.environment.add_local_reg(fdef, object_rprimitive) - self.assign(target, val, -1) - - def setup_env_for_generator_class(self) -> None: - """Populates the environment for a generator class.""" - fitem = self.fn_info.fitem - cls = self.fn_info.generator_class - self_target = self.add_self_to_env(cls.ir) - - # Add the type, value, and traceback variables to the environment. - exc_type = self.environment.add_local(Var('type'), object_rprimitive, is_arg=True) - exc_val = self.environment.add_local(Var('value'), object_rprimitive, is_arg=True) - exc_tb = self.environment.add_local(Var('traceback'), object_rprimitive, is_arg=True) - # TODO: Use the right type here instead of object? - exc_arg = self.environment.add_local(Var('arg'), object_rprimitive, is_arg=True) - - cls.exc_regs = (exc_type, exc_val, exc_tb) - cls.send_arg_reg = exc_arg - - cls.self_reg = self.read(self_target, fitem.line) - cls.curr_env_reg = self.load_outer_env(cls.self_reg, self.environment) - - # Define a variable representing the label to go to the next time the '__next__' function - # of the generator is called, and add it as an attribute to the environment class. - cls.next_label_target = self.add_var_to_env_class(Var(NEXT_LABEL_ATTR_NAME), - int_rprimitive, - cls, - reassign=False) - - # Add arguments from the original generator function to the generator class' environment. - self.add_args_to_env(local=False, base=cls, reassign=False) - - # Set the next label register for the generator class. - cls.next_label_reg = self.read(cls.next_label_target, fitem.line) - - def add_args_to_env(self, - local: bool = True, - base: Optional[Union[FuncInfo, ImplicitClass]] = None, - reassign: bool = True) -> None: - fn_info = self.fn_info - if local: - for arg in fn_info.fitem.arguments: - rtype = self.type_to_rtype(arg.variable.type) - self.environment.add_local_reg(arg.variable, rtype, is_arg=True) - else: - for arg in fn_info.fitem.arguments: - if self.is_free_variable(arg.variable) or fn_info.is_generator: - rtype = self.type_to_rtype(arg.variable.type) - assert base is not None, 'base cannot be None for adding nonlocal args' - self.add_var_to_env_class(arg.variable, rtype, base, reassign=reassign) - - def gen_func_ns(self) -> str: - """Generates a namespace for a nested function using its outer function names.""" - return '_'.join(info.name + ('' if not info.class_name else '_' + info.class_name) - for info in self.fn_infos - if info.name and info.name != '') - - def setup_callable_class(self) -> None: - """Generates a callable class representing a nested function or a function within a - non-extension class and sets up the 'self' variable for that class. - - This takes the most recently visited function and returns a ClassIR to represent that - function. Each callable class contains an environment attribute with points to another - ClassIR representing the environment class where some of its variables can be accessed. - Note that its '__call__' method is not yet implemented, and is implemented in the - add_call_to_callable_class function. - - Returns a newly constructed ClassIR representing the callable class for the nested - function. - """ - - # Check to see that the name has not already been taken. If so, rename the class. We allow - # multiple uses of the same function name because this is valid in if-else blocks. Example: - # if True: - # def foo(): ----> foo_obj() - # return True - # else: - # def foo(): ----> foo_obj_0() - # return False - name = base_name = '{}_obj'.format(self.fn_info.namespaced_name()) - count = 0 - while name in self.callable_class_names: - name = base_name + '_' + str(count) - count += 1 - self.callable_class_names.add(name) - - # Define the actual callable class ClassIR, and set its environment to point at the - # previously defined environment class. - callable_class_ir = ClassIR(name, self.module_name, is_generated=True) - - # The functools @wraps decorator attempts to call setattr on nested functions, so - # we create a dict for these nested functions. - # https://github.com/python/cpython/blob/3.7/Lib/functools.py#L58 - if self.fn_info.is_nested: - callable_class_ir.has_dict = True - - # If the enclosing class doesn't contain nested (which will happen if - # this is a toplevel lambda), don't set up an environment. - if self.fn_infos[-2].contains_nested: - callable_class_ir.attributes[ENV_ATTR_NAME] = RInstance(self.fn_infos[-2].env_class) - callable_class_ir.mro = [callable_class_ir] - self.fn_info.callable_class = ImplicitClass(callable_class_ir) - self.classes.append(callable_class_ir) - - # Add a 'self' variable to the callable class' environment, and store that variable in a - # register to be accessed later. - self_target = self.add_self_to_env(callable_class_ir) - self.fn_info.callable_class.self_reg = self.read(self_target, self.fn_info.fitem.line) - - def add_call_to_callable_class(self, - blocks: List[BasicBlock], - sig: FuncSignature, - env: Environment, - fn_info: FuncInfo) -> FuncIR: - """Generates a '__call__' method for a callable class representing a nested function. - - This takes the blocks, signature, and environment associated with a function definition and - uses those to build the '__call__' method of a given callable class, used to represent that - function. Note that a 'self' parameter is added to its list of arguments, as the nested - function becomes a class method. - """ - sig = FuncSignature((RuntimeArg(SELF_NAME, object_rprimitive),) + sig.args, sig.ret_type) - call_fn_decl = FuncDecl('__call__', fn_info.callable_class.ir.name, self.module_name, sig) - call_fn_ir = FuncIR(call_fn_decl, blocks, env, - fn_info.fitem.line, traceback_name=fn_info.fitem.name) - fn_info.callable_class.ir.methods['__call__'] = call_fn_ir - return call_fn_ir - - def add_get_to_callable_class(self, fn_info: FuncInfo) -> None: - """Generates the '__get__' method for a callable class.""" - line = fn_info.fitem.line - self.enter(fn_info) - - vself = self.read(self.environment.add_local_reg(Var(SELF_NAME), object_rprimitive, True)) - instance = self.environment.add_local_reg(Var('instance'), object_rprimitive, True) - self.environment.add_local_reg(Var('owner'), object_rprimitive, True) - - # If accessed through the class, just return the callable - # object. If accessed through an object, create a new bound - # instance method object. - instance_block, class_block = BasicBlock(), BasicBlock() - comparison = self.binary_op(self.read(instance), self.none_object(), 'is', line) - self.add_bool_branch(comparison, class_block, instance_block) - - self.activate_block(class_block) - self.add(Return(vself)) - - self.activate_block(instance_block) - self.add(Return(self.primitive_op(method_new_op, [vself, self.read(instance)], line))) - - blocks, env, _, fn_info = self.leave() - - sig = FuncSignature((RuntimeArg(SELF_NAME, object_rprimitive), - RuntimeArg('instance', object_rprimitive), - RuntimeArg('owner', object_rprimitive)), - object_rprimitive) - get_fn_decl = FuncDecl('__get__', fn_info.callable_class.ir.name, self.module_name, sig) - get_fn_ir = FuncIR(get_fn_decl, blocks, env) - fn_info.callable_class.ir.methods['__get__'] = get_fn_ir - self.functions.append(get_fn_ir) - - def instantiate_callable_class(self, fn_info: FuncInfo) -> Value: - """ - Assigns a callable class to a register named after the given function definition. Note - that fn_info refers to the function being assigned, whereas self.fn_info refers to the - function encapsulating the function being turned into a callable class. - """ - fitem = fn_info.fitem - func_reg = self.add(Call(fn_info.callable_class.ir.ctor, [], fitem.line)) - - # Set the callable class' environment attribute to point at the environment class - # defined in the callable class' immediate outer scope. Note that there are three possible - # environment class registers we may use. If the encapsulating function is: - # - a generator function, then the callable class is instantiated from the generator class' - # __next__' function, and hence the generator class' environment register is used. - # - a nested function, then the callable class is instantiated from the current callable - # class' '__call__' function, and hence the callable class' environment register is used. - # - neither, then we use the environment register of the original function. - curr_env_reg = None - if self.fn_info.is_generator: - curr_env_reg = self.fn_info.generator_class.curr_env_reg - elif self.fn_info.is_nested: - curr_env_reg = self.fn_info.callable_class.curr_env_reg - elif self.fn_info.contains_nested: - curr_env_reg = self.fn_info.curr_env_reg - if curr_env_reg: - self.add(SetAttr(func_reg, ENV_ATTR_NAME, curr_env_reg, fitem.line)) - return func_reg - - def setup_env_class(self) -> ClassIR: - """Generates a class representing a function environment. - - Note that the variables in the function environment are not actually populated here. This - is because when the environment class is generated, the function environment has not yet - been visited. This behavior is allowed so that when the compiler visits nested functions, - it can use the returned ClassIR instance to figure out free variables it needs to access. - The remaining attributes of the environment class are populated when the environment - registers are loaded. - - Returns a ClassIR representing an environment for a function containing a nested function. - """ - env_class = ClassIR('{}_env'.format(self.fn_info.namespaced_name()), - self.module_name, is_generated=True) - env_class.attributes[SELF_NAME] = RInstance(env_class) - if self.fn_info.is_nested: - # If the function is nested, its environment class must contain an environment - # attribute pointing to its encapsulating functions' environment class. - env_class.attributes[ENV_ATTR_NAME] = RInstance(self.fn_infos[-2].env_class) - env_class.mro = [env_class] - self.fn_info.env_class = env_class - self.classes.append(env_class) - return env_class - - def finalize_env_class(self) -> None: - """Generates, instantiates, and sets up the environment of an environment class.""" - - self.instantiate_env_class() - - # Iterate through the function arguments and replace local definitions (using registers) - # that were previously added to the environment with references to the function's - # environment class. - if self.fn_info.is_nested: - self.add_args_to_env(local=False, base=self.fn_info.callable_class) - else: - self.add_args_to_env(local=False, base=self.fn_info) - - def instantiate_env_class(self) -> Value: - """Assigns an environment class to a register named after the given function definition.""" - curr_env_reg = self.add(Call(self.fn_info.env_class.ctor, [], self.fn_info.fitem.line)) - - if self.fn_info.is_nested: - self.fn_info.callable_class._curr_env_reg = curr_env_reg - self.add(SetAttr(curr_env_reg, - ENV_ATTR_NAME, - self.fn_info.callable_class.prev_env_reg, - self.fn_info.fitem.line)) - else: - self.fn_info._curr_env_reg = curr_env_reg - - return curr_env_reg - - def gen_generator_func(self) -> None: - self.setup_generator_class() - self.load_env_registers() - self.gen_arg_defaults() - self.finalize_env_class() - self.add(Return(self.instantiate_generator_class())) - - def setup_generator_class(self) -> ClassIR: - name = '{}_gen'.format(self.fn_info.namespaced_name()) - - generator_class_ir = ClassIR(name, self.module_name, is_generated=True) - generator_class_ir.attributes[ENV_ATTR_NAME] = RInstance(self.fn_info.env_class) - generator_class_ir.mro = [generator_class_ir] - - self.classes.append(generator_class_ir) - self.fn_info.generator_class = GeneratorClass(generator_class_ir) - return generator_class_ir - - def add_helper_to_generator_class(self, - blocks: List[BasicBlock], - sig: FuncSignature, - env: Environment, - fn_info: FuncInfo) -> FuncDecl: - """Generates a helper method for a generator class, called by '__next__' and 'throw'.""" - sig = FuncSignature((RuntimeArg(SELF_NAME, object_rprimitive), - RuntimeArg('type', object_rprimitive), - RuntimeArg('value', object_rprimitive), - RuntimeArg('traceback', object_rprimitive), - RuntimeArg('arg', object_rprimitive) - ), sig.ret_type) - helper_fn_decl = FuncDecl('__mypyc_generator_helper__', fn_info.generator_class.ir.name, - self.module_name, sig) - helper_fn_ir = FuncIR(helper_fn_decl, blocks, env, - fn_info.fitem.line, traceback_name=fn_info.fitem.name) - fn_info.generator_class.ir.methods['__mypyc_generator_helper__'] = helper_fn_ir - self.functions.append(helper_fn_ir) - return helper_fn_decl - - def add_iter_to_generator_class(self, fn_info: FuncInfo) -> None: - """Generates the '__iter__' method for a generator class.""" - self.enter(fn_info) - self_target = self.add_self_to_env(fn_info.generator_class.ir) - self.add(Return(self.read(self_target, fn_info.fitem.line))) - blocks, env, _, fn_info = self.leave() - - # Next, add the actual function as a method of the generator class. - sig = FuncSignature((RuntimeArg(SELF_NAME, object_rprimitive),), object_rprimitive) - iter_fn_decl = FuncDecl('__iter__', fn_info.generator_class.ir.name, self.module_name, sig) - iter_fn_ir = FuncIR(iter_fn_decl, blocks, env) - fn_info.generator_class.ir.methods['__iter__'] = iter_fn_ir - self.functions.append(iter_fn_ir) - - def add_next_to_generator_class(self, - fn_info: FuncInfo, - fn_decl: FuncDecl, - sig: FuncSignature) -> None: - """Generates the '__next__' method for a generator class.""" - self.enter(fn_info) - self_reg = self.read(self.add_self_to_env(fn_info.generator_class.ir)) - none_reg = self.none_object() - - # Call the helper function with error flags set to Py_None, and return that result. - result = self.add(Call(fn_decl, [self_reg, none_reg, none_reg, none_reg, none_reg], - fn_info.fitem.line)) - self.add(Return(result)) - blocks, env, _, fn_info = self.leave() - - sig = FuncSignature((RuntimeArg(SELF_NAME, object_rprimitive),), sig.ret_type) - next_fn_decl = FuncDecl('__next__', fn_info.generator_class.ir.name, self.module_name, sig) - next_fn_ir = FuncIR(next_fn_decl, blocks, env) - fn_info.generator_class.ir.methods['__next__'] = next_fn_ir - self.functions.append(next_fn_ir) - - def add_send_to_generator_class(self, - fn_info: FuncInfo, - fn_decl: FuncDecl, - sig: FuncSignature) -> None: - """Generates the 'send' method for a generator class.""" - # FIXME: this is basically the same as add_next... - self.enter(fn_info) - self_reg = self.read(self.add_self_to_env(fn_info.generator_class.ir)) - arg = self.environment.add_local_reg(Var('arg'), object_rprimitive, True) - none_reg = self.none_object() - - # Call the helper function with error flags set to Py_None, and return that result. - result = self.add(Call(fn_decl, [self_reg, none_reg, none_reg, none_reg, self.read(arg)], - fn_info.fitem.line)) - self.add(Return(result)) - blocks, env, _, fn_info = self.leave() - - sig = FuncSignature((RuntimeArg(SELF_NAME, object_rprimitive), - RuntimeArg('arg', object_rprimitive),), sig.ret_type) - next_fn_decl = FuncDecl('send', fn_info.generator_class.ir.name, self.module_name, sig) - next_fn_ir = FuncIR(next_fn_decl, blocks, env) - fn_info.generator_class.ir.methods['send'] = next_fn_ir - self.functions.append(next_fn_ir) - - def add_throw_to_generator_class(self, - fn_info: FuncInfo, - fn_decl: FuncDecl, - sig: FuncSignature) -> None: - """Generates the 'throw' method for a generator class.""" - self.enter(fn_info) - self_reg = self.read(self.add_self_to_env(fn_info.generator_class.ir)) - - # Add the type, value, and traceback variables to the environment. - typ = self.environment.add_local_reg(Var('type'), object_rprimitive, True) - val = self.environment.add_local_reg(Var('value'), object_rprimitive, True) - tb = self.environment.add_local_reg(Var('traceback'), object_rprimitive, True) - - # Because the value and traceback arguments are optional and hence can be NULL if not - # passed in, we have to assign them Py_None if they are not passed in. - none_reg = self.none_object() - self.assign_if_null(val, lambda: none_reg, self.fn_info.fitem.line) - self.assign_if_null(tb, lambda: none_reg, self.fn_info.fitem.line) - - # Call the helper function using the arguments passed in, and return that result. - result = self.add(Call(fn_decl, - [self_reg, self.read(typ), self.read(val), self.read(tb), none_reg], - fn_info.fitem.line)) - self.add(Return(result)) - blocks, env, _, fn_info = self.leave() - - # Create the FuncSignature for the throw function. NOte that the value and traceback fields - # are optional, and are assigned to if they are not passed in inside the body of the throw - # function. - sig = FuncSignature((RuntimeArg(SELF_NAME, object_rprimitive), - RuntimeArg('type', object_rprimitive), - RuntimeArg('value', object_rprimitive, ARG_OPT), - RuntimeArg('traceback', object_rprimitive, ARG_OPT)), - sig.ret_type) - - throw_fn_decl = FuncDecl('throw', fn_info.generator_class.ir.name, self.module_name, sig) - throw_fn_ir = FuncIR(throw_fn_decl, blocks, env) - fn_info.generator_class.ir.methods['throw'] = throw_fn_ir - self.functions.append(throw_fn_ir) - - def add_close_to_generator_class(self, fn_info: FuncInfo) -> None: - """Generates the '__close__' method for a generator class.""" - # TODO: Currently this method just triggers a runtime error, - # we should fill this out eventually. - self.enter(fn_info) - self.add_self_to_env(fn_info.generator_class.ir) - self.add(RaiseStandardError(RaiseStandardError.RUNTIME_ERROR, - 'close method on generator classes uimplemented', - fn_info.fitem.line)) - self.add(Unreachable()) - blocks, env, _, fn_info = self.leave() - - # Next, add the actual function as a method of the generator class. - sig = FuncSignature((RuntimeArg(SELF_NAME, object_rprimitive),), object_rprimitive) - close_fn_decl = FuncDecl('close', fn_info.generator_class.ir.name, self.module_name, sig) - close_fn_ir = FuncIR(close_fn_decl, blocks, env) - fn_info.generator_class.ir.methods['close'] = close_fn_ir - self.functions.append(close_fn_ir) - - def add_await_to_generator_class(self, fn_info: FuncInfo) -> None: - """Generates the '__await__' method for a generator class.""" - self.enter(fn_info) - self_target = self.add_self_to_env(fn_info.generator_class.ir) - self.add(Return(self.read(self_target, fn_info.fitem.line))) - blocks, env, _, fn_info = self.leave() - - # Next, add the actual function as a method of the generator class. - sig = FuncSignature((RuntimeArg(SELF_NAME, object_rprimitive),), object_rprimitive) - await_fn_decl = FuncDecl('__await__', fn_info.generator_class.ir.name, - self.module_name, sig) - await_fn_ir = FuncIR(await_fn_decl, blocks, env) - fn_info.generator_class.ir.methods['__await__'] = await_fn_ir - self.functions.append(await_fn_ir) - - def create_switch_for_generator_class(self) -> None: - self.add(Goto(self.fn_info.generator_class.switch_block)) - self.fn_info.generator_class.blocks.append(self.new_block()) - - def populate_switch_for_generator_class(self) -> None: - cls = self.fn_info.generator_class - line = self.fn_info.fitem.line - - self.activate_block(cls.switch_block) - for label, true_block in enumerate(cls.blocks): - false_block = BasicBlock() - comparison = self.binary_op(cls.next_label_reg, self.add(LoadInt(label)), '==', line) - self.add_bool_branch(comparison, true_block, false_block) - self.activate_block(false_block) - - self.add(RaiseStandardError(RaiseStandardError.STOP_ITERATION, None, line)) - self.add(Unreachable()) - - def instantiate_generator_class(self) -> Value: - fitem = self.fn_info.fitem - generator_reg = self.add(Call(self.fn_info.generator_class.ir.ctor, [], fitem.line)) - - # Get the current environment register. If the current function is nested, then the - # generator class gets instantiated from the callable class' '__call__' method, and hence - # we use the callable class' environment register. Otherwise, we use the original - # function's environment register. - if self.fn_info.is_nested: - curr_env_reg = self.fn_info.callable_class.curr_env_reg - else: - curr_env_reg = self.fn_info.curr_env_reg - - # Set the generator class' environment attribute to point at the environment class - # defined in the current scope. - self.add(SetAttr(generator_reg, ENV_ATTR_NAME, curr_env_reg, fitem.line)) - - # Set the generator class' environment class' NEXT_LABEL_ATTR_NAME attribute to 0. - zero_reg = self.add(LoadInt(0)) - self.add(SetAttr(curr_env_reg, NEXT_LABEL_ATTR_NAME, zero_reg, fitem.line)) - return generator_reg - - def add_raise_exception_blocks_to_generator_class(self, line: int) -> None: - """ - Generates blocks to check if error flags are set while calling the helper method for - generator functions, and raises an exception if those flags are set. - """ - cls = self.fn_info.generator_class - assert cls.exc_regs is not None - exc_type, exc_val, exc_tb = cls.exc_regs - - # Check to see if an exception was raised. - error_block = BasicBlock() - ok_block = BasicBlock() - comparison = self.binary_op(exc_type, self.none_object(), 'is not', line) - self.add_bool_branch(comparison, error_block, ok_block) - - self.activate_block(error_block) - self.primitive_op(raise_exception_with_tb_op, [exc_type, exc_val, exc_tb], line) - self.add(Unreachable()) - self.goto_and_activate(ok_block) - - def add_self_to_env(self, cls: ClassIR) -> AssignmentTargetRegister: - return self.environment.add_local_reg(Var(SELF_NAME), - RInstance(cls), - is_arg=True) - def is_builtin_ref_expr(self, expr: RefExpr) -> bool: assert expr.node, "RefExpr not resolved" return '.' in expr.node.fullname and expr.node.fullname.split('.')[0] == 'builtins' @@ -4893,21 +3542,6 @@ def native_args_to_positional(self, return output_args - def get_func_target(self, fdef: FuncDef) -> AssignmentTarget: - """ - Given a FuncDef, return the target associated the instance of its callable class. If the - function was not already defined somewhere, then define it and add it to the current - environment. - """ - if fdef.original_def: - # Get the target associated with the previously defined FuncDef. - return self.environment.lookup(fdef.original_def) - - if self.fn_info.is_generator or self.fn_info.contains_nested: - return self.environment.lookup(fdef) - - return self.environment.add_local_reg(fdef, object_rprimitive) - # Lacks a good type because there wasn't a reasonable type in 3.5 :( def catch_errors(self, line: int) -> Any: return catch_errors(self.module_path, line) diff --git a/mypyc/genopscontext.py b/mypyc/genopscontext.py new file mode 100644 index 000000000000..7b54f16854b5 --- /dev/null +++ b/mypyc/genopscontext.py @@ -0,0 +1,167 @@ +from typing import List, Optional, Tuple + +from mypy.nodes import FuncItem + +from mypyc.ops import Value, BasicBlock, AssignmentTarget, ClassIR, INVALID_FUNC_DEF +from mypyc.common import decorator_helper_name + + +class FuncInfo: + """Contains information about functions as they are generated.""" + def __init__(self, + fitem: FuncItem = INVALID_FUNC_DEF, + name: str = '', + class_name: Optional[str] = None, + namespace: str = '', + is_nested: bool = False, + contains_nested: bool = False, + is_decorated: bool = False, + in_non_ext: bool = False) -> None: + self.fitem = fitem + self.name = name if not is_decorated else decorator_helper_name(name) + self.class_name = class_name + self.ns = namespace + # Callable classes implement the '__call__' method, and are used to represent functions + # that are nested inside of other functions. + self._callable_class = None # type: Optional[ImplicitClass] + # Environment classes are ClassIR instances that contain attributes representing the + # variables in the environment of the function they correspond to. Environment classes are + # generated for functions that contain nested functions. + self._env_class = None # type: Optional[ClassIR] + # Generator classes implement the '__next__' method, and are used to represent generators + # returned by generator functions. + self._generator_class = None # type: Optional[GeneratorClass] + # Environment class registers are the local registers associated with instances of an + # environment class, used for getting and setting attributes. curr_env_reg is the register + # associated with the current environment. + self._curr_env_reg = None # type: Optional[Value] + # These are flags denoting whether a given function is nested, contains a nested function, + # is decorated, or is within a non-extension class. + self.is_nested = is_nested + self.contains_nested = contains_nested + self.is_decorated = is_decorated + self.in_non_ext = in_non_ext + + # TODO: add field for ret_type: RType = none_rprimitive + + def namespaced_name(self) -> str: + return '_'.join(x for x in [self.name, self.class_name, self.ns] if x) + + @property + def is_generator(self) -> bool: + return self.fitem.is_generator or self.fitem.is_coroutine + + @property + def callable_class(self) -> 'ImplicitClass': + assert self._callable_class is not None + return self._callable_class + + @callable_class.setter + def callable_class(self, cls: 'ImplicitClass') -> None: + self._callable_class = cls + + @property + def env_class(self) -> ClassIR: + assert self._env_class is not None + return self._env_class + + @env_class.setter + def env_class(self, ir: ClassIR) -> None: + self._env_class = ir + + @property + def generator_class(self) -> 'GeneratorClass': + assert self._generator_class is not None + return self._generator_class + + @generator_class.setter + def generator_class(self, cls: 'GeneratorClass') -> None: + self._generator_class = cls + + @property + def curr_env_reg(self) -> Value: + assert self._curr_env_reg is not None + return self._curr_env_reg + + +class ImplicitClass: + """Contains information regarding classes that are generated as a result of nested functions or + generated functions, but not explicitly defined in the source code. + """ + def __init__(self, ir: ClassIR) -> None: + # The ClassIR instance associated with this class. + self.ir = ir + # The register associated with the 'self' instance for this generator class. + self._self_reg = None # type: Optional[Value] + # Environment class registers are the local registers associated with instances of an + # environment class, used for getting and setting attributes. curr_env_reg is the register + # associated with the current environment. prev_env_reg is the self.__mypyc_env__ field + # associated with the previous environment. + self._curr_env_reg = None # type: Optional[Value] + self._prev_env_reg = None # type: Optional[Value] + + @property + def self_reg(self) -> Value: + assert self._self_reg is not None + return self._self_reg + + @self_reg.setter + def self_reg(self, reg: Value) -> None: + self._self_reg = reg + + @property + def curr_env_reg(self) -> Value: + assert self._curr_env_reg is not None + return self._curr_env_reg + + @curr_env_reg.setter + def curr_env_reg(self, reg: Value) -> None: + self._curr_env_reg = reg + + @property + def prev_env_reg(self) -> Value: + assert self._prev_env_reg is not None + return self._prev_env_reg + + @prev_env_reg.setter + def prev_env_reg(self, reg: Value) -> None: + self._prev_env_reg = reg + + +class GeneratorClass(ImplicitClass): + def __init__(self, ir: ClassIR) -> None: + super().__init__(ir) + # This register holds the label number that the '__next__' function should go to the next + # time it is called. + self._next_label_reg = None # type: Optional[Value] + self._next_label_target = None # type: Optional[AssignmentTarget] + + # These registers hold the error values for the generator object for the case that the + # 'throw' function is called. + self.exc_regs = None # type: Optional[Tuple[Value, Value, Value]] + + # Holds the arg passed to send + self.send_arg_reg = None # type: Optional[Value] + + # The switch block is used to decide which instruction to go using the value held in the + # next-label register. + self.switch_block = BasicBlock() + self.blocks = [] # type: List[BasicBlock] + + @property + def next_label_reg(self) -> Value: + assert self._next_label_reg is not None + return self._next_label_reg + + @next_label_reg.setter + def next_label_reg(self, reg: Value) -> None: + self._next_label_reg = reg + + @property + def next_label_target(self) -> AssignmentTarget: + assert self._next_label_target is not None + return self._next_label_target + + @next_label_target.setter + def next_label_target(self, target: AssignmentTarget) -> None: + self._next_label_target = target diff --git a/mypyc/genopsutil.py b/mypyc/genopsutil.py index a08646c03e38..62122f7a0330 100644 --- a/mypyc/genopsutil.py +++ b/mypyc/genopsutil.py @@ -2,8 +2,11 @@ from mypy.nodes import ( ClassDef, FuncDef, Decorator, OverloadedFuncDef, StrExpr, CallExpr, RefExpr, Expression, - IntExpr, FloatExpr, ARG_NAMED, ARG_NAMED_OPT, ARG_POS, ARG_OPT + IntExpr, FloatExpr, Var, TupleExpr, UnaryExpr, BytesExpr, ARG_NAMED, ARG_NAMED_OPT, ARG_POS, + ARG_OPT, GDEF ) +from mypyc.ops import Environment, ClassIR, RInstance, AssignmentTargetRegister +from mypyc.common import SELF_NAME def is_trait_decorator(d: Expression) -> bool: @@ -102,3 +105,28 @@ def concrete_arg_kind(kind: int) -> int: return ARG_NAMED else: return kind + + +def is_constant(e: Expression) -> bool: + """Check whether we allow an expression to appear as a default value. + + We don't currently properly support storing the evaluated + values for default arguments and default attribute values, so + we restrict what expressions we allow. We allow literals of + primitives types, None, and references to Final global + variables. + """ + return (isinstance(e, (StrExpr, BytesExpr, IntExpr, FloatExpr)) + or (isinstance(e, UnaryExpr) and e.op == '-' + and isinstance(e.expr, (IntExpr, FloatExpr))) + or (isinstance(e, TupleExpr) + and all(is_constant(e) for e in e.items)) + or (isinstance(e, RefExpr) and e.kind == GDEF + and (e.fullname in ('builtins.True', 'builtins.False', 'builtins.None') + or (isinstance(e.node, Var) and e.node.is_final)))) + + +def add_self_to_env(environment: Environment, cls: ClassIR) -> AssignmentTargetRegister: + return environment.add_local_reg( + Var(SELF_NAME), RInstance(cls), is_arg=True + ) From 0bbed9d99b3c4e65cf9b91b8f02ed252ba495cdf Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Mon, 17 Feb 2020 09:31:26 +0000 Subject: [PATCH 094/117] [mypyc] Extract more things from mypyc.genops (#8412) I extracted various things that did things not directly related to the main business of genops. --- mypyc/emitmodule.py | 3 +- mypyc/genops.py | 515 ++--------------------------------------- mypyc/genopsmapper.py | 146 ++++++++++++ mypyc/genopsprepare.py | 283 ++++++++++++++++++++++ mypyc/genopsvtable.py | 93 ++++++++ 5 files changed, 537 insertions(+), 503 deletions(-) create mode 100644 mypyc/genopsmapper.py create mode 100644 mypyc/genopsprepare.py create mode 100644 mypyc/genopsvtable.py diff --git a/mypyc/emitmodule.py b/mypyc/emitmodule.py index 435e8c237d76..cb7cd82d9f2f 100644 --- a/mypyc/emitmodule.py +++ b/mypyc/emitmodule.py @@ -20,6 +20,7 @@ from mypy.util import hash_digest from mypyc import genops +from mypyc.genopsprepare import load_type_map from mypyc.common import ( PREFIX, TOP_LEVEL_NAME, INT_PREFIX, MODULE_PREFIX, shared_lib_name, ) @@ -370,7 +371,7 @@ def load_scc_from_cache( )['ir'] for k in scc } modules = deserialize_modules(cache_data, ctx) - genops.load_type_map(mapper, scc, ctx) + load_type_map(mapper, scc, ctx) return modules diff --git a/mypyc/genops.py b/mypyc/genops.py index f2e84af86408..3c0c2d249a40 100644 --- a/mypyc/genops.py +++ b/mypyc/genops.py @@ -17,12 +17,11 @@ def f(x: int) -> int: """ from typing import ( - TypeVar, Callable, Dict, List, Tuple, Optional, Union, Sequence, Set, Any, Iterable, cast + TypeVar, Callable, Dict, List, Tuple, Optional, Union, Sequence, Set, Any, cast ) from typing_extensions import overload, NoReturn from collections import OrderedDict import importlib.util -import itertools from mypy.build import Graph from mypy.nodes import ( @@ -40,10 +39,7 @@ def f(x: int) -> int: ARG_NAMED, ARG_STAR, ARG_STAR2, op_methods ) from mypy.types import ( - Type, Instance, CallableType, NoneTyp, TupleType, UnionType, AnyType, TypeVarType, PartialType, - TypeType, Overloaded, TypeOfAny, UninhabitedType, UnboundType, TypedDictType, - LiteralType, - get_proper_type, + Type, Instance, TupleType, AnyType, TypeOfAny, UninhabitedType, get_proper_type ) from mypy.visitor import ExpressionVisitor, StatementVisitor from mypy.checkexpr import map_actuals_to_formals @@ -51,25 +47,22 @@ def f(x: int) -> int: from mypy.util import split_target from mypyc.common import ( - TEMP_ATTR_NAME, MAX_LITERAL_SHORT_INT, TOP_LEVEL_NAME, - FAST_ISINSTANCE_MAX_SUBCLASSES, PROPSET_PREFIX + TEMP_ATTR_NAME, MAX_LITERAL_SHORT_INT, TOP_LEVEL_NAME, FAST_ISINSTANCE_MAX_SUBCLASSES ) from mypyc.prebuildvisitor import PreBuildVisitor from mypyc.ops import ( BasicBlock, AssignmentTarget, AssignmentTargetRegister, AssignmentTargetIndex, AssignmentTargetAttr, AssignmentTargetTuple, Environment, Op, LoadInt, RType, Value, Register, - FuncIR, Assign, Branch, Goto, RuntimeArg, Call, Box, Unbox, Cast, RTuple, Unreachable, + FuncIR, Assign, Branch, Goto, Call, Box, Unbox, Cast, RTuple, Unreachable, TupleGet, TupleSet, ClassIR, NonExtClassInfo, RInstance, ModuleIR, ModuleIRs, GetAttr, SetAttr, LoadStatic, InitStatic, MethodCall, INVALID_FUNC_DEF, int_rprimitive, float_rprimitive, bool_rprimitive, list_rprimitive, is_list_rprimitive, dict_rprimitive, set_rprimitive, - str_rprimitive, tuple_rprimitive, none_rprimitive, is_none_rprimitive, object_rprimitive, - exc_rtuple, - PrimitiveOp, ControlOp, OpDescription, RegisterOp, - is_object_rprimitive, LiteralsMap, FuncSignature, VTableAttr, VTableMethod, VTableEntries, - NAMESPACE_TYPE, NAMESPACE_MODULE, + str_rprimitive, none_rprimitive, is_none_rprimitive, object_rprimitive, + exc_rtuple, PrimitiveOp, ControlOp, OpDescription, RegisterOp, is_object_rprimitive, + FuncSignature, NAMESPACE_TYPE, NAMESPACE_MODULE, RaiseStandardError, LoadErrorValue, NO_TRACEBACK_LINE_NO, FuncDecl, - FUNC_NORMAL, FUNC_STATICMETHOD, FUNC_CLASSMETHOD, - RUnion, optional_value_type, all_concrete_classes, DeserMaps, + FUNC_STATICMETHOD, FUNC_CLASSMETHOD, RUnion, optional_value_type, + all_concrete_classes ) from mypyc.ops_primitive import binary_ops, unary_ops, func_ops, method_ops, name_ref_ops from mypyc.ops_list import ( @@ -95,7 +88,7 @@ def f(x: int) -> int: from mypyc.genops_for import ForGenerator, ForRange, ForList, ForIterable, ForEnumerate, ForZip from mypyc.rt_subtype import is_runtime_subtype from mypyc.subtype import is_subtype -from mypyc.sametype import is_same_type, is_same_method_signature +from mypyc.sametype import is_same_type from mypyc.crash import catch_errors from mypyc.options import CompilerOptions from mypyc.errors import Errors @@ -103,12 +96,12 @@ def f(x: int) -> int: NonlocalControl, BaseNonlocalControl, LoopNonlocalControl, ExceptNonlocalControl, FinallyNonlocalControl, TryFinallyNonlocalControl, GeneratorNonlocalControl ) -from mypyc.genopsutil import ( - is_dataclass, get_func_def, get_mypyc_attrs, is_extension_class, is_trait -) from mypyc.genclass import BuildClassIR from mypyc.genfunc import BuildFuncIR from mypyc.genopscontext import FuncInfo, ImplicitClass +from mypyc.genopsmapper import Mapper +from mypyc.genopsvtable import compute_vtable +from mypyc.genopsprepare import build_type_map GenFunc = Callable[[], None] DictEntry = Tuple[Optional[Value], Value] @@ -124,62 +117,6 @@ class UnsupportedException(Exception): strict_optional_dec = cast(Callable[[F], F], strict_optional_set(True)) -def build_type_map(mapper: 'Mapper', - modules: List[MypyFile], - graph: Graph, - types: Dict[Expression, Type], - options: CompilerOptions, - errors: Errors) -> None: - # Collect all classes defined in everything we are compiling - classes = [] - for module in modules: - module_classes = [node for node in module.defs if isinstance(node, ClassDef)] - classes.extend([(module, cdef) for cdef in module_classes]) - - # Collect all class mappings so that we can bind arbitrary class name - # references even if there are import cycles. - for module, cdef in classes: - class_ir = ClassIR(cdef.name, module.fullname, is_trait(cdef), - is_abstract=cdef.info.is_abstract) - class_ir.is_ext_class = is_extension_class(cdef) - # If global optimizations are disabled, turn of tracking of class children - if not options.global_opts: - class_ir.children = None - mapper.type_to_ir[cdef.info] = class_ir - - # Populate structural information in class IR for extension classes. - for module, cdef in classes: - with catch_errors(module.path, cdef.line): - if mapper.type_to_ir[cdef.info].is_ext_class: - prepare_class_def(module.path, module.fullname, cdef, errors, mapper) - else: - prepare_non_ext_class_def(module.path, module.fullname, cdef, errors, mapper) - - # Collect all the functions also. We collect from the symbol table - # so that we can easily pick out the right copy of a function that - # is conditionally defined. - for module in modules: - for func in get_module_func_defs(module): - prepare_func_def(module.fullname, None, func, mapper) - # TODO: what else? - - -def load_type_map(mapper: 'Mapper', - modules: List[MypyFile], - deser_ctx: DeserMaps) -> None: - """Populate a Mapper with deserialized IR from a list of modules.""" - for module in modules: - for name, node in module.names.items(): - if isinstance(node.node, TypeInfo): - ir = deser_ctx.classes[node.node.fullname] - mapper.type_to_ir[node.node] = ir - mapper.func_to_decl[node.node] = ir.ctor - - for module in modules: - for func in get_module_func_defs(module): - mapper.func_to_decl[func] = deser_ctx.functions[func.fullname].decl - - @strict_optional_dec # Turn on strict optional for any type manipulations we do def build_ir(modules: List[MypyFile], graph: Graph, @@ -223,432 +160,6 @@ def build_ir(modules: List[MypyFile], return result -def get_module_func_defs(module: MypyFile) -> Iterable[FuncDef]: - """Collect all of the (non-method) functions declared in a module.""" - for name, node in module.names.items(): - # We need to filter out functions that are imported or - # aliases. The best way to do this seems to be by - # checking that the fullname matches. - if (isinstance(node.node, (FuncDef, Decorator, OverloadedFuncDef)) - and node.fullname == module.fullname + '.' + name): - yield get_func_def(node.node) - - -def specialize_parent_vtable(cls: ClassIR, parent: ClassIR) -> VTableEntries: - """Generate the part of a vtable corresponding to a parent class or trait""" - updated = [] - for entry in parent.vtable_entries: - if isinstance(entry, VTableMethod): - # Find the original method corresponding to this vtable entry. - # (This may not be the method in the entry, if it was overridden.) - orig_parent_method = entry.cls.get_method(entry.name) - assert orig_parent_method - method_cls = cls.get_method_and_class(entry.name) - if method_cls: - child_method, defining_cls = method_cls - # TODO: emit a wrapper for __init__ that raises or something - if (is_same_method_signature(orig_parent_method.sig, child_method.sig) - or orig_parent_method.name == '__init__'): - entry = VTableMethod(entry.cls, entry.name, child_method, entry.shadow_method) - else: - entry = VTableMethod(entry.cls, entry.name, - defining_cls.glue_methods[(entry.cls, entry.name)], - entry.shadow_method) - else: - # If it is an attribute from a trait, we need to find out - # the real class it got mixed in at and point to that. - if parent.is_trait: - _, origin_cls = cls.attr_details(entry.name) - entry = VTableAttr(origin_cls, entry.name, entry.is_setter) - updated.append(entry) - return updated - - -def compute_vtable(cls: ClassIR) -> None: - """Compute the vtable structure for a class.""" - if cls.vtable is not None: return - - if not cls.is_generated: - cls.has_dict = any(x.inherits_python for x in cls.mro) - - for t in cls.mro[1:]: - # Make sure all ancestors are processed first - compute_vtable(t) - # Merge attributes from traits into the class - if not t.is_trait: - continue - for name, typ in t.attributes.items(): - if not cls.is_trait and not any(name in b.attributes for b in cls.base_mro): - cls.attributes[name] = typ - - cls.vtable = {} - if cls.base: - assert cls.base.vtable is not None - cls.vtable.update(cls.base.vtable) - cls.vtable_entries = specialize_parent_vtable(cls, cls.base) - - # Include the vtable from the parent classes, but handle method overrides. - entries = cls.vtable_entries - - # Traits need to have attributes in the vtable, since the - # attributes can be at different places in different classes, but - # regular classes can just directly get them. - if cls.is_trait: - # Traits also need to pull in vtable entries for non-trait - # parent classes explicitly. - for t in cls.mro: - for attr in t.attributes: - if attr in cls.vtable: - continue - cls.vtable[attr] = len(entries) - entries.append(VTableAttr(t, attr, is_setter=False)) - entries.append(VTableAttr(t, attr, is_setter=True)) - - all_traits = [t for t in cls.mro if t.is_trait] - - for t in [cls] + cls.traits: - for fn in itertools.chain(t.methods.values()): - # TODO: don't generate a new entry when we overload without changing the type - if fn == cls.get_method(fn.name): - cls.vtable[fn.name] = len(entries) - # If the class contains a glue method referring to itself, that is a - # shadow glue method to support interpreted subclasses. - shadow = cls.glue_methods.get((cls, fn.name)) - entries.append(VTableMethod(t, fn.name, fn, shadow)) - - # Compute vtables for all of the traits that the class implements - if not cls.is_trait: - for trait in all_traits: - compute_vtable(trait) - cls.trait_vtables[trait] = specialize_parent_vtable(cls, trait) - - -class Mapper: - """Keep track of mappings from mypy concepts to IR concepts. - - This state is shared across all modules being compiled in all - compilation groups. - """ - - def __init__(self, group_map: Dict[str, Optional[str]]) -> None: - self.group_map = group_map - self.type_to_ir = {} # type: Dict[TypeInfo, ClassIR] - self.func_to_decl = {} # type: Dict[SymbolNode, FuncDecl] - # LiteralsMap maps literal values to a static name. Each - # compilation group has its own LiteralsMap. (Since they can't - # share literals.) - self.literals = { - v: OrderedDict() for v in group_map.values() - } # type: Dict[Optional[str], LiteralsMap] - - def type_to_rtype(self, typ: Optional[Type]) -> RType: - if typ is None: - return object_rprimitive - - typ = get_proper_type(typ) - if isinstance(typ, Instance): - if typ.type.fullname == 'builtins.int': - return int_rprimitive - elif typ.type.fullname == 'builtins.float': - return float_rprimitive - elif typ.type.fullname == 'builtins.str': - return str_rprimitive - elif typ.type.fullname == 'builtins.bool': - return bool_rprimitive - elif typ.type.fullname == 'builtins.list': - return list_rprimitive - # Dict subclasses are at least somewhat common and we - # specifically support them, so make sure that dict operations - # get optimized on them. - elif any(cls.fullname == 'builtins.dict' for cls in typ.type.mro): - return dict_rprimitive - elif typ.type.fullname == 'builtins.set': - return set_rprimitive - elif typ.type.fullname == 'builtins.tuple': - return tuple_rprimitive # Varying-length tuple - elif typ.type in self.type_to_ir: - return RInstance(self.type_to_ir[typ.type]) - else: - return object_rprimitive - elif isinstance(typ, TupleType): - # Use our unboxed tuples for raw tuples but fall back to - # being boxed for NamedTuple. - if typ.partial_fallback.type.fullname == 'builtins.tuple': - return RTuple([self.type_to_rtype(t) for t in typ.items]) - else: - return tuple_rprimitive - elif isinstance(typ, CallableType): - return object_rprimitive - elif isinstance(typ, NoneTyp): - return none_rprimitive - elif isinstance(typ, UnionType): - return RUnion([self.type_to_rtype(item) - for item in typ.items]) - elif isinstance(typ, AnyType): - return object_rprimitive - elif isinstance(typ, TypeType): - return object_rprimitive - elif isinstance(typ, TypeVarType): - # Erase type variable to upper bound. - # TODO: Erase to union if object has value restriction? - return self.type_to_rtype(typ.upper_bound) - elif isinstance(typ, PartialType): - assert typ.var.type is not None - return self.type_to_rtype(typ.var.type) - elif isinstance(typ, Overloaded): - return object_rprimitive - elif isinstance(typ, TypedDictType): - return dict_rprimitive - elif isinstance(typ, LiteralType): - return self.type_to_rtype(typ.fallback) - elif isinstance(typ, (UninhabitedType, UnboundType)): - # Sure, whatever! - return object_rprimitive - - # I think we've covered everything that is supposed to - # actually show up, so anything else is a bug somewhere. - assert False, 'unexpected type %s' % type(typ) - - def get_arg_rtype(self, typ: Type, kind: int) -> RType: - if kind == ARG_STAR: - return tuple_rprimitive - elif kind == ARG_STAR2: - return dict_rprimitive - else: - return self.type_to_rtype(typ) - - def fdef_to_sig(self, fdef: FuncDef) -> FuncSignature: - if isinstance(fdef.type, CallableType): - arg_types = [self.get_arg_rtype(typ, kind) - for typ, kind in zip(fdef.type.arg_types, fdef.type.arg_kinds)] - ret = self.type_to_rtype(fdef.type.ret_type) - else: - # Handle unannotated functions - arg_types = [object_rprimitive for arg in fdef.arguments] - ret = object_rprimitive - - args = [RuntimeArg(arg_name, arg_type, arg_kind) - for arg_name, arg_kind, arg_type in zip(fdef.arg_names, fdef.arg_kinds, arg_types)] - - # We force certain dunder methods to return objects to support letting them - # return NotImplemented. It also avoids some pointless boxing and unboxing, - # since tp_richcompare needs an object anyways. - if fdef.name in ('__eq__', '__ne__', '__lt__', '__gt__', '__le__', '__ge__'): - ret = object_rprimitive - return FuncSignature(args, ret) - - def literal_static_name(self, module: str, - value: Union[int, float, complex, str, bytes]) -> str: - # Literals are shared between modules in a compilation group - # but not outside the group. - literals = self.literals[self.group_map.get(module)] - - # Include type to distinguish between 1 and 1.0, and so on. - key = (type(value), value) - if key not in literals: - if isinstance(value, str): - prefix = 'unicode_' - else: - prefix = type(value).__name__ + '_' - literals[key] = prefix + str(len(literals)) - return literals[key] - - -def prepare_func_def(module_name: str, class_name: Optional[str], - fdef: FuncDef, mapper: Mapper) -> FuncDecl: - kind = FUNC_STATICMETHOD if fdef.is_static else ( - FUNC_CLASSMETHOD if fdef.is_class else FUNC_NORMAL) - decl = FuncDecl(fdef.name, class_name, module_name, mapper.fdef_to_sig(fdef), kind) - mapper.func_to_decl[fdef] = decl - return decl - - -def prepare_method_def(ir: ClassIR, module_name: str, cdef: ClassDef, mapper: Mapper, - node: Union[FuncDef, Decorator]) -> None: - if isinstance(node, FuncDef): - ir.method_decls[node.name] = prepare_func_def(module_name, cdef.name, node, mapper) - elif isinstance(node, Decorator): - # TODO: do something about abstract methods here. Currently, they are handled just like - # normal methods. - decl = prepare_func_def(module_name, cdef.name, node.func, mapper) - if not node.decorators: - ir.method_decls[node.name] = decl - elif isinstance(node.decorators[0], MemberExpr) and node.decorators[0].name == 'setter': - # Make property setter name different than getter name so there are no - # name clashes when generating C code, and property lookup at the IR level - # works correctly. - decl.name = PROPSET_PREFIX + decl.name - decl.is_prop_setter = True - ir.method_decls[PROPSET_PREFIX + node.name] = decl - - if node.func.is_property: - assert node.func.type - decl.is_prop_getter = True - ir.property_types[node.name] = decl.sig.ret_type - - -def is_valid_multipart_property_def(prop: OverloadedFuncDef) -> bool: - # Checks to ensure supported property decorator semantics - if len(prop.items) == 2: - getter = prop.items[0] - setter = prop.items[1] - if isinstance(getter, Decorator) and isinstance(setter, Decorator): - if getter.func.is_property and len(setter.decorators) == 1: - if isinstance(setter.decorators[0], MemberExpr): - if setter.decorators[0].name == "setter": - return True - return False - - -def can_subclass_builtin(builtin_base: str) -> bool: - # BaseException and dict are special cased. - return builtin_base in ( - ('builtins.Exception', 'builtins.LookupError', 'builtins.IndexError', - 'builtins.Warning', 'builtins.UserWarning', 'builtins.ValueError', - 'builtins.object', )) - - -def prepare_class_def(path: str, module_name: str, cdef: ClassDef, - errors: Errors, mapper: Mapper) -> None: - - ir = mapper.type_to_ir[cdef.info] - info = cdef.info - - attrs = get_mypyc_attrs(cdef) - if attrs.get("allow_interpreted_subclasses") is True: - ir.allow_interpreted_subclasses = True - - # We sort the table for determinism here on Python 3.5 - for name, node in sorted(info.names.items()): - # Currently all plugin generated methods are dummies and not included. - if node.plugin_generated: - continue - - if isinstance(node.node, Var): - assert node.node.type, "Class member %s missing type" % name - if not node.node.is_classvar and name != '__slots__': - ir.attributes[name] = mapper.type_to_rtype(node.node.type) - elif isinstance(node.node, (FuncDef, Decorator)): - prepare_method_def(ir, module_name, cdef, mapper, node.node) - elif isinstance(node.node, OverloadedFuncDef): - # Handle case for property with both a getter and a setter - if node.node.is_property: - if is_valid_multipart_property_def(node.node): - for item in node.node.items: - prepare_method_def(ir, module_name, cdef, mapper, item) - else: - errors.error("Unsupported property decorator semantics", path, cdef.line) - - # Handle case for regular function overload - else: - assert node.node.impl - prepare_method_def(ir, module_name, cdef, mapper, node.node.impl) - - # Check for subclassing from builtin types - for cls in info.mro: - # Special case exceptions and dicts - # XXX: How do we handle *other* things?? - if cls.fullname == 'builtins.BaseException': - ir.builtin_base = 'PyBaseExceptionObject' - elif cls.fullname == 'builtins.dict': - ir.builtin_base = 'PyDictObject' - elif cls.fullname.startswith('builtins.'): - if not can_subclass_builtin(cls.fullname): - # Note that if we try to subclass a C extension class that - # isn't in builtins, bad things will happen and we won't - # catch it here! But this should catch a lot of the most - # common pitfalls. - errors.error("Inheriting from most builtin types is unimplemented", - path, cdef.line) - - if ir.builtin_base: - ir.attributes.clear() - - # Set up a constructor decl - init_node = cdef.info['__init__'].node - if not ir.is_trait and not ir.builtin_base and isinstance(init_node, FuncDef): - init_sig = mapper.fdef_to_sig(init_node) - - defining_ir = mapper.type_to_ir.get(init_node.info) - # If there is a nontrivial __init__ that wasn't defined in an - # extension class, we need to make the constructor take *args, - # **kwargs so it can call tp_init. - if ((defining_ir is None or not defining_ir.is_ext_class - or cdef.info['__init__'].plugin_generated) - and init_node.info.fullname != 'builtins.object'): - init_sig = FuncSignature( - [init_sig.args[0], - RuntimeArg("args", tuple_rprimitive, ARG_STAR), - RuntimeArg("kwargs", dict_rprimitive, ARG_STAR2)], - init_sig.ret_type) - - ctor_sig = FuncSignature(init_sig.args[1:], RInstance(ir)) - ir.ctor = FuncDecl(cdef.name, None, module_name, ctor_sig) - mapper.func_to_decl[cdef.info] = ir.ctor - - # Set up the parent class - bases = [mapper.type_to_ir[base.type] for base in info.bases - if base.type in mapper.type_to_ir] - if not all(c.is_trait for c in bases[1:]): - errors.error("Non-trait bases must appear first in parent list", path, cdef.line) - ir.traits = [c for c in bases if c.is_trait] - - mro = [] - base_mro = [] - for cls in info.mro: - if cls not in mapper.type_to_ir: - if cls.fullname != 'builtins.object': - ir.inherits_python = True - continue - base_ir = mapper.type_to_ir[cls] - if not base_ir.is_trait: - base_mro.append(base_ir) - mro.append(base_ir) - - if cls.defn.removed_base_type_exprs or not base_ir.is_ext_class: - ir.inherits_python = True - - base_idx = 1 if not ir.is_trait else 0 - if len(base_mro) > base_idx: - ir.base = base_mro[base_idx] - ir.mro = mro - ir.base_mro = base_mro - - for base in bases: - if base.children is not None: - base.children.append(ir) - - if is_dataclass(cdef): - ir.is_augmented = True - - -def prepare_non_ext_class_def(path: str, module_name: str, cdef: ClassDef, - errors: Errors, mapper: Mapper) -> None: - - ir = mapper.type_to_ir[cdef.info] - info = cdef.info - - for name, node in info.names.items(): - if isinstance(node.node, (FuncDef, Decorator)): - prepare_method_def(ir, module_name, cdef, mapper, node.node) - elif isinstance(node.node, OverloadedFuncDef): - # Handle case for property with both a getter and a setter - if node.node.is_property: - if not is_valid_multipart_property_def(node.node): - errors.error("Unsupported property decorator semantics", path, cdef.line) - for item in node.node.items: - prepare_method_def(ir, module_name, cdef, mapper, item) - # Handle case for regular function overload - else: - prepare_method_def(ir, module_name, cdef, mapper, get_func_def(node.node)) - - if any( - cls in mapper.type_to_ir and mapper.type_to_ir[cls].is_ext_class for cls in info.mro - ): - errors.error( - "Non-extension classes may not inherit from extension classes", path, cdef.line) - - # Infrastructure for special casing calls to builtin functions in a # programmatic way. Most special cases should be handled using the # data driven "primitive ops" system, but certain operations require diff --git a/mypyc/genopsmapper.py b/mypyc/genopsmapper.py new file mode 100644 index 000000000000..0ac86dab40bf --- /dev/null +++ b/mypyc/genopsmapper.py @@ -0,0 +1,146 @@ +from typing import Dict, Optional, Union +from collections import OrderedDict + +from mypy.nodes import FuncDef, TypeInfo, SymbolNode, ARG_STAR, ARG_STAR2 +from mypy.types import ( + Instance, Type, CallableType, LiteralType, TypedDictType, UnboundType, PartialType, + UninhabitedType, Overloaded, UnionType, TypeType, AnyType, NoneTyp, TupleType, TypeVarType, + get_proper_type +) + +from mypyc.ops import ( + RType, RUnion, RTuple, RInstance, object_rprimitive, dict_rprimitive, tuple_rprimitive, + none_rprimitive, int_rprimitive, float_rprimitive, str_rprimitive, bool_rprimitive, + list_rprimitive, set_rprimitive, FuncSignature, ClassIR, FuncDecl, RuntimeArg, LiteralsMap +) + + +class Mapper: + """Keep track of mappings from mypy concepts to IR concepts. + + This state is shared across all modules being compiled in all + compilation groups. + """ + + def __init__(self, group_map: Dict[str, Optional[str]]) -> None: + self.group_map = group_map + self.type_to_ir = {} # type: Dict[TypeInfo, ClassIR] + self.func_to_decl = {} # type: Dict[SymbolNode, FuncDecl] + # LiteralsMap maps literal values to a static name. Each + # compilation group has its own LiteralsMap. (Since they can't + # share literals.) + self.literals = { + v: OrderedDict() for v in group_map.values() + } # type: Dict[Optional[str], LiteralsMap] + + def type_to_rtype(self, typ: Optional[Type]) -> RType: + if typ is None: + return object_rprimitive + + typ = get_proper_type(typ) + if isinstance(typ, Instance): + if typ.type.fullname == 'builtins.int': + return int_rprimitive + elif typ.type.fullname == 'builtins.float': + return float_rprimitive + elif typ.type.fullname == 'builtins.str': + return str_rprimitive + elif typ.type.fullname == 'builtins.bool': + return bool_rprimitive + elif typ.type.fullname == 'builtins.list': + return list_rprimitive + # Dict subclasses are at least somewhat common and we + # specifically support them, so make sure that dict operations + # get optimized on them. + elif any(cls.fullname == 'builtins.dict' for cls in typ.type.mro): + return dict_rprimitive + elif typ.type.fullname == 'builtins.set': + return set_rprimitive + elif typ.type.fullname == 'builtins.tuple': + return tuple_rprimitive # Varying-length tuple + elif typ.type in self.type_to_ir: + return RInstance(self.type_to_ir[typ.type]) + else: + return object_rprimitive + elif isinstance(typ, TupleType): + # Use our unboxed tuples for raw tuples but fall back to + # being boxed for NamedTuple. + if typ.partial_fallback.type.fullname == 'builtins.tuple': + return RTuple([self.type_to_rtype(t) for t in typ.items]) + else: + return tuple_rprimitive + elif isinstance(typ, CallableType): + return object_rprimitive + elif isinstance(typ, NoneTyp): + return none_rprimitive + elif isinstance(typ, UnionType): + return RUnion([self.type_to_rtype(item) + for item in typ.items]) + elif isinstance(typ, AnyType): + return object_rprimitive + elif isinstance(typ, TypeType): + return object_rprimitive + elif isinstance(typ, TypeVarType): + # Erase type variable to upper bound. + # TODO: Erase to union if object has value restriction? + return self.type_to_rtype(typ.upper_bound) + elif isinstance(typ, PartialType): + assert typ.var.type is not None + return self.type_to_rtype(typ.var.type) + elif isinstance(typ, Overloaded): + return object_rprimitive + elif isinstance(typ, TypedDictType): + return dict_rprimitive + elif isinstance(typ, LiteralType): + return self.type_to_rtype(typ.fallback) + elif isinstance(typ, (UninhabitedType, UnboundType)): + # Sure, whatever! + return object_rprimitive + + # I think we've covered everything that is supposed to + # actually show up, so anything else is a bug somewhere. + assert False, 'unexpected type %s' % type(typ) + + def get_arg_rtype(self, typ: Type, kind: int) -> RType: + if kind == ARG_STAR: + return tuple_rprimitive + elif kind == ARG_STAR2: + return dict_rprimitive + else: + return self.type_to_rtype(typ) + + def fdef_to_sig(self, fdef: FuncDef) -> FuncSignature: + if isinstance(fdef.type, CallableType): + arg_types = [self.get_arg_rtype(typ, kind) + for typ, kind in zip(fdef.type.arg_types, fdef.type.arg_kinds)] + ret = self.type_to_rtype(fdef.type.ret_type) + else: + # Handle unannotated functions + arg_types = [object_rprimitive for arg in fdef.arguments] + ret = object_rprimitive + + args = [RuntimeArg(arg_name, arg_type, arg_kind) + for arg_name, arg_kind, arg_type in zip(fdef.arg_names, fdef.arg_kinds, arg_types)] + + # We force certain dunder methods to return objects to support letting them + # return NotImplemented. It also avoids some pointless boxing and unboxing, + # since tp_richcompare needs an object anyways. + if fdef.name in ('__eq__', '__ne__', '__lt__', '__gt__', '__le__', '__ge__'): + ret = object_rprimitive + return FuncSignature(args, ret) + + def literal_static_name(self, module: str, + value: Union[int, float, complex, str, bytes]) -> str: + # Literals are shared between modules in a compilation group + # but not outside the group. + literals = self.literals[self.group_map.get(module)] + + # Include type to distinguish between 1 and 1.0, and so on. + key = (type(value), value) + if key not in literals: + if isinstance(value, str): + prefix = 'unicode_' + else: + prefix = type(value).__name__ + '_' + literals[key] = prefix + str(len(literals)) + return literals[key] diff --git a/mypyc/genopsprepare.py b/mypyc/genopsprepare.py new file mode 100644 index 000000000000..94e926fee981 --- /dev/null +++ b/mypyc/genopsprepare.py @@ -0,0 +1,283 @@ +from typing import List, Dict, Iterable, Optional, Union + +from mypy.nodes import ( + MypyFile, TypeInfo, FuncDef, ClassDef, Decorator, OverloadedFuncDef, MemberExpr, Var, + Expression, ARG_STAR, ARG_STAR2 +) +from mypy.types import Type +from mypy.build import Graph + +from mypyc.ops import ( + FuncDecl, FuncSignature, ClassIR, RInstance, RuntimeArg, tuple_rprimitive, dict_rprimitive, + DeserMaps, FUNC_NORMAL, FUNC_STATICMETHOD, FUNC_CLASSMETHOD +) +from mypyc.common import PROPSET_PREFIX +from mypyc.genopsmapper import Mapper +from mypyc.genopsutil import ( + get_func_def, is_dataclass, is_trait, is_extension_class, get_mypyc_attrs +) +from mypyc.errors import Errors +from mypyc.options import CompilerOptions +from mypyc.crash import catch_errors + + +def build_type_map(mapper: Mapper, + modules: List[MypyFile], + graph: Graph, + types: Dict[Expression, Type], + options: CompilerOptions, + errors: Errors) -> None: + # Collect all classes defined in everything we are compiling + classes = [] + for module in modules: + module_classes = [node for node in module.defs if isinstance(node, ClassDef)] + classes.extend([(module, cdef) for cdef in module_classes]) + + # Collect all class mappings so that we can bind arbitrary class name + # references even if there are import cycles. + for module, cdef in classes: + class_ir = ClassIR(cdef.name, module.fullname, is_trait(cdef), + is_abstract=cdef.info.is_abstract) + class_ir.is_ext_class = is_extension_class(cdef) + # If global optimizations are disabled, turn of tracking of class children + if not options.global_opts: + class_ir.children = None + mapper.type_to_ir[cdef.info] = class_ir + + # Populate structural information in class IR for extension classes. + for module, cdef in classes: + with catch_errors(module.path, cdef.line): + if mapper.type_to_ir[cdef.info].is_ext_class: + prepare_class_def(module.path, module.fullname, cdef, errors, mapper) + else: + prepare_non_ext_class_def(module.path, module.fullname, cdef, errors, mapper) + + # Collect all the functions also. We collect from the symbol table + # so that we can easily pick out the right copy of a function that + # is conditionally defined. + for module in modules: + for func in get_module_func_defs(module): + prepare_func_def(module.fullname, None, func, mapper) + # TODO: what else? + + +def load_type_map(mapper: 'Mapper', + modules: List[MypyFile], + deser_ctx: DeserMaps) -> None: + """Populate a Mapper with deserialized IR from a list of modules.""" + for module in modules: + for name, node in module.names.items(): + if isinstance(node.node, TypeInfo): + ir = deser_ctx.classes[node.node.fullname] + mapper.type_to_ir[node.node] = ir + mapper.func_to_decl[node.node] = ir.ctor + + for module in modules: + for func in get_module_func_defs(module): + mapper.func_to_decl[func] = deser_ctx.functions[func.fullname].decl + + +def get_module_func_defs(module: MypyFile) -> Iterable[FuncDef]: + """Collect all of the (non-method) functions declared in a module.""" + for name, node in module.names.items(): + # We need to filter out functions that are imported or + # aliases. The best way to do this seems to be by + # checking that the fullname matches. + if (isinstance(node.node, (FuncDef, Decorator, OverloadedFuncDef)) + and node.fullname == module.fullname + '.' + name): + yield get_func_def(node.node) + + +def prepare_func_def(module_name: str, class_name: Optional[str], + fdef: FuncDef, mapper: Mapper) -> FuncDecl: + kind = FUNC_STATICMETHOD if fdef.is_static else ( + FUNC_CLASSMETHOD if fdef.is_class else FUNC_NORMAL) + decl = FuncDecl(fdef.name, class_name, module_name, mapper.fdef_to_sig(fdef), kind) + mapper.func_to_decl[fdef] = decl + return decl + + +def prepare_method_def(ir: ClassIR, module_name: str, cdef: ClassDef, mapper: Mapper, + node: Union[FuncDef, Decorator]) -> None: + if isinstance(node, FuncDef): + ir.method_decls[node.name] = prepare_func_def(module_name, cdef.name, node, mapper) + elif isinstance(node, Decorator): + # TODO: do something about abstract methods here. Currently, they are handled just like + # normal methods. + decl = prepare_func_def(module_name, cdef.name, node.func, mapper) + if not node.decorators: + ir.method_decls[node.name] = decl + elif isinstance(node.decorators[0], MemberExpr) and node.decorators[0].name == 'setter': + # Make property setter name different than getter name so there are no + # name clashes when generating C code, and property lookup at the IR level + # works correctly. + decl.name = PROPSET_PREFIX + decl.name + decl.is_prop_setter = True + ir.method_decls[PROPSET_PREFIX + node.name] = decl + + if node.func.is_property: + assert node.func.type + decl.is_prop_getter = True + ir.property_types[node.name] = decl.sig.ret_type + + +def is_valid_multipart_property_def(prop: OverloadedFuncDef) -> bool: + # Checks to ensure supported property decorator semantics + if len(prop.items) == 2: + getter = prop.items[0] + setter = prop.items[1] + if isinstance(getter, Decorator) and isinstance(setter, Decorator): + if getter.func.is_property and len(setter.decorators) == 1: + if isinstance(setter.decorators[0], MemberExpr): + if setter.decorators[0].name == "setter": + return True + return False + + +def can_subclass_builtin(builtin_base: str) -> bool: + # BaseException and dict are special cased. + return builtin_base in ( + ('builtins.Exception', 'builtins.LookupError', 'builtins.IndexError', + 'builtins.Warning', 'builtins.UserWarning', 'builtins.ValueError', + 'builtins.object', )) + + +def prepare_class_def(path: str, module_name: str, cdef: ClassDef, + errors: Errors, mapper: Mapper) -> None: + + ir = mapper.type_to_ir[cdef.info] + info = cdef.info + + attrs = get_mypyc_attrs(cdef) + if attrs.get("allow_interpreted_subclasses") is True: + ir.allow_interpreted_subclasses = True + + # We sort the table for determinism here on Python 3.5 + for name, node in sorted(info.names.items()): + # Currently all plugin generated methods are dummies and not included. + if node.plugin_generated: + continue + + if isinstance(node.node, Var): + assert node.node.type, "Class member %s missing type" % name + if not node.node.is_classvar and name != '__slots__': + ir.attributes[name] = mapper.type_to_rtype(node.node.type) + elif isinstance(node.node, (FuncDef, Decorator)): + prepare_method_def(ir, module_name, cdef, mapper, node.node) + elif isinstance(node.node, OverloadedFuncDef): + # Handle case for property with both a getter and a setter + if node.node.is_property: + if is_valid_multipart_property_def(node.node): + for item in node.node.items: + prepare_method_def(ir, module_name, cdef, mapper, item) + else: + errors.error("Unsupported property decorator semantics", path, cdef.line) + + # Handle case for regular function overload + else: + assert node.node.impl + prepare_method_def(ir, module_name, cdef, mapper, node.node.impl) + + # Check for subclassing from builtin types + for cls in info.mro: + # Special case exceptions and dicts + # XXX: How do we handle *other* things?? + if cls.fullname == 'builtins.BaseException': + ir.builtin_base = 'PyBaseExceptionObject' + elif cls.fullname == 'builtins.dict': + ir.builtin_base = 'PyDictObject' + elif cls.fullname.startswith('builtins.'): + if not can_subclass_builtin(cls.fullname): + # Note that if we try to subclass a C extension class that + # isn't in builtins, bad things will happen and we won't + # catch it here! But this should catch a lot of the most + # common pitfalls. + errors.error("Inheriting from most builtin types is unimplemented", + path, cdef.line) + + if ir.builtin_base: + ir.attributes.clear() + + # Set up a constructor decl + init_node = cdef.info['__init__'].node + if not ir.is_trait and not ir.builtin_base and isinstance(init_node, FuncDef): + init_sig = mapper.fdef_to_sig(init_node) + + defining_ir = mapper.type_to_ir.get(init_node.info) + # If there is a nontrivial __init__ that wasn't defined in an + # extension class, we need to make the constructor take *args, + # **kwargs so it can call tp_init. + if ((defining_ir is None or not defining_ir.is_ext_class + or cdef.info['__init__'].plugin_generated) + and init_node.info.fullname != 'builtins.object'): + init_sig = FuncSignature( + [init_sig.args[0], + RuntimeArg("args", tuple_rprimitive, ARG_STAR), + RuntimeArg("kwargs", dict_rprimitive, ARG_STAR2)], + init_sig.ret_type) + + ctor_sig = FuncSignature(init_sig.args[1:], RInstance(ir)) + ir.ctor = FuncDecl(cdef.name, None, module_name, ctor_sig) + mapper.func_to_decl[cdef.info] = ir.ctor + + # Set up the parent class + bases = [mapper.type_to_ir[base.type] for base in info.bases + if base.type in mapper.type_to_ir] + if not all(c.is_trait for c in bases[1:]): + errors.error("Non-trait bases must appear first in parent list", path, cdef.line) + ir.traits = [c for c in bases if c.is_trait] + + mro = [] + base_mro = [] + for cls in info.mro: + if cls not in mapper.type_to_ir: + if cls.fullname != 'builtins.object': + ir.inherits_python = True + continue + base_ir = mapper.type_to_ir[cls] + if not base_ir.is_trait: + base_mro.append(base_ir) + mro.append(base_ir) + + if cls.defn.removed_base_type_exprs or not base_ir.is_ext_class: + ir.inherits_python = True + + base_idx = 1 if not ir.is_trait else 0 + if len(base_mro) > base_idx: + ir.base = base_mro[base_idx] + ir.mro = mro + ir.base_mro = base_mro + + for base in bases: + if base.children is not None: + base.children.append(ir) + + if is_dataclass(cdef): + ir.is_augmented = True + + +def prepare_non_ext_class_def(path: str, module_name: str, cdef: ClassDef, + errors: Errors, mapper: Mapper) -> None: + + ir = mapper.type_to_ir[cdef.info] + info = cdef.info + + for name, node in info.names.items(): + if isinstance(node.node, (FuncDef, Decorator)): + prepare_method_def(ir, module_name, cdef, mapper, node.node) + elif isinstance(node.node, OverloadedFuncDef): + # Handle case for property with both a getter and a setter + if node.node.is_property: + if not is_valid_multipart_property_def(node.node): + errors.error("Unsupported property decorator semantics", path, cdef.line) + for item in node.node.items: + prepare_method_def(ir, module_name, cdef, mapper, item) + # Handle case for regular function overload + else: + prepare_method_def(ir, module_name, cdef, mapper, get_func_def(node.node)) + + if any( + cls in mapper.type_to_ir and mapper.type_to_ir[cls].is_ext_class for cls in info.mro + ): + errors.error( + "Non-extension classes may not inherit from extension classes", path, cdef.line) diff --git a/mypyc/genopsvtable.py b/mypyc/genopsvtable.py new file mode 100644 index 000000000000..8e9cd5e6d193 --- /dev/null +++ b/mypyc/genopsvtable.py @@ -0,0 +1,93 @@ +import itertools + +from mypyc.ops import ClassIR, VTableEntries, VTableMethod, VTableAttr +from mypyc.sametype import is_same_method_signature + + +def compute_vtable(cls: ClassIR) -> None: + """Compute the vtable structure for a class.""" + if cls.vtable is not None: return + + if not cls.is_generated: + cls.has_dict = any(x.inherits_python for x in cls.mro) + + for t in cls.mro[1:]: + # Make sure all ancestors are processed first + compute_vtable(t) + # Merge attributes from traits into the class + if not t.is_trait: + continue + for name, typ in t.attributes.items(): + if not cls.is_trait and not any(name in b.attributes for b in cls.base_mro): + cls.attributes[name] = typ + + cls.vtable = {} + if cls.base: + assert cls.base.vtable is not None + cls.vtable.update(cls.base.vtable) + cls.vtable_entries = specialize_parent_vtable(cls, cls.base) + + # Include the vtable from the parent classes, but handle method overrides. + entries = cls.vtable_entries + + # Traits need to have attributes in the vtable, since the + # attributes can be at different places in different classes, but + # regular classes can just directly get them. + if cls.is_trait: + # Traits also need to pull in vtable entries for non-trait + # parent classes explicitly. + for t in cls.mro: + for attr in t.attributes: + if attr in cls.vtable: + continue + cls.vtable[attr] = len(entries) + entries.append(VTableAttr(t, attr, is_setter=False)) + entries.append(VTableAttr(t, attr, is_setter=True)) + + all_traits = [t for t in cls.mro if t.is_trait] + + for t in [cls] + cls.traits: + for fn in itertools.chain(t.methods.values()): + # TODO: don't generate a new entry when we overload without changing the type + if fn == cls.get_method(fn.name): + cls.vtable[fn.name] = len(entries) + # If the class contains a glue method referring to itself, that is a + # shadow glue method to support interpreted subclasses. + shadow = cls.glue_methods.get((cls, fn.name)) + entries.append(VTableMethod(t, fn.name, fn, shadow)) + + # Compute vtables for all of the traits that the class implements + if not cls.is_trait: + for trait in all_traits: + compute_vtable(trait) + cls.trait_vtables[trait] = specialize_parent_vtable(cls, trait) + + +def specialize_parent_vtable(cls: ClassIR, parent: ClassIR) -> VTableEntries: + """Generate the part of a vtable corresponding to a parent class or trait""" + updated = [] + for entry in parent.vtable_entries: + if isinstance(entry, VTableMethod): + # Find the original method corresponding to this vtable entry. + # (This may not be the method in the entry, if it was overridden.) + orig_parent_method = entry.cls.get_method(entry.name) + assert orig_parent_method + method_cls = cls.get_method_and_class(entry.name) + if method_cls: + child_method, defining_cls = method_cls + # TODO: emit a wrapper for __init__ that raises or something + if (is_same_method_signature(orig_parent_method.sig, child_method.sig) + or orig_parent_method.name == '__init__'): + entry = VTableMethod(entry.cls, entry.name, child_method, entry.shadow_method) + else: + entry = VTableMethod(entry.cls, entry.name, + defining_cls.glue_methods[(entry.cls, entry.name)], + entry.shadow_method) + else: + # If it is an attribute from a trait, we need to find out + # the real class it got mixed in at and point to that. + if parent.is_trait: + _, origin_cls = cls.attr_details(entry.name) + entry = VTableAttr(origin_cls, entry.name, entry.is_setter) + updated.append(entry) + return updated From e91bc22abb2e7fc4e8e5233ada33ce2348535ae2 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Mon, 17 Feb 2020 09:31:40 +0000 Subject: [PATCH 095/117] [mypyc] Refactor: get rid of "if MYPY:" checks (#8413) --- mypyc/common.py | 4 +--- mypyc/crash.py | 4 +--- mypyc/emitfunc.py | 5 +---- mypyc/genclass.py | 5 ++--- mypyc/genfunc.py | 4 ++-- mypyc/nonlocalcontrol.py | 4 ++-- 6 files changed, 9 insertions(+), 17 deletions(-) diff --git a/mypyc/common.py b/mypyc/common.py index bfc3192977d7..c3537e015f3d 100644 --- a/mypyc/common.py +++ b/mypyc/common.py @@ -1,6 +1,4 @@ -MYPY = False -if MYPY: - from typing_extensions import Final +from typing_extensions import Final PREFIX = 'CPyPy_' # type: Final # Python wrappers NATIVE_PREFIX = 'CPyDef_' # type: Final # Native functions etc. diff --git a/mypyc/crash.py b/mypyc/crash.py index 913252fba5d1..04948dd08dec 100644 --- a/mypyc/crash.py +++ b/mypyc/crash.py @@ -1,7 +1,5 @@ from typing import Iterator -MYPY = False -if MYPY: - from typing import NoReturn +from typing_extensions import NoReturn import sys import traceback diff --git a/mypyc/emitfunc.py b/mypyc/emitfunc.py index 336f8992fa3b..f61ec9b11980 100644 --- a/mypyc/emitfunc.py +++ b/mypyc/emitfunc.py @@ -1,5 +1,6 @@ """Code generation for native function bodies.""" +from typing_extensions import Final from mypyc.common import ( REG_PREFIX, NATIVE_PREFIX, STATIC_PREFIX, TYPE_PREFIX, MODULE_PREFIX, @@ -15,10 +16,6 @@ ) from mypyc.namegen import NameGenerator -MYPY = False -if MYPY: - from typing_extensions import Final - # Whether to insert debug asserts for all error handling, to quickly # catch errors propagating without exceptions set. DEBUG_ERRORS = False diff --git a/mypyc/genclass.py b/mypyc/genclass.py index a8d282477d03..009c1009fa0f 100644 --- a/mypyc/genclass.py +++ b/mypyc/genclass.py @@ -1,5 +1,5 @@ from typing import List, Optional, Union -from typing_extensions import overload +from typing_extensions import overload, TYPE_CHECKING from mypy.nodes import ( ClassDef, FuncDef, OverloadedFuncDef, PassStmt, AssignmentStmt, NameExpr, StrExpr, @@ -25,8 +25,7 @@ ) from mypyc.common import SELF_NAME -MYPY = False -if MYPY: +if TYPE_CHECKING: from mypyc.genops import IRBuilder diff --git a/mypyc/genfunc.py b/mypyc/genfunc.py index b9008874391f..4011a6506a1c 100644 --- a/mypyc/genfunc.py +++ b/mypyc/genfunc.py @@ -4,6 +4,7 @@ """ from typing import Optional, List, Tuple, Union +from typing_extensions import TYPE_CHECKING from mypy.nodes import ( ClassDef, FuncDef, OverloadedFuncDef, Decorator, Var, YieldFromExpr, AwaitExpr, YieldExpr, @@ -30,8 +31,7 @@ from mypyc.genopsutil import concrete_arg_kind, is_constant, add_self_to_env from mypyc.genopscontext import FuncInfo, GeneratorClass, ImplicitClass -MYPY = False -if MYPY: +if TYPE_CHECKING: from mypyc.genops import IRBuilder diff --git a/mypyc/nonlocalcontrol.py b/mypyc/nonlocalcontrol.py index 275f84bc0fd9..d794429b9ee6 100644 --- a/mypyc/nonlocalcontrol.py +++ b/mypyc/nonlocalcontrol.py @@ -1,5 +1,6 @@ from abc import abstractmethod from typing import Optional, Union +from typing_extensions import TYPE_CHECKING from mypyc.ops import ( Branch, BasicBlock, Unreachable, Value, Goto, LoadInt, Assign, Register, Return, @@ -7,8 +8,7 @@ ) from mypyc.ops_exc import set_stop_iteration_value, restore_exc_info_op -MYPY = False -if MYPY: +if TYPE_CHECKING: from mypyc.genops import IRBuilder From 6efbff4b7c69ee0104a11a5e3f0360921f460358 Mon Sep 17 00:00:00 2001 From: Shantanu Date: Tue, 18 Feb 2020 11:20:28 -0800 Subject: [PATCH 096/117] stubtest: workaround mypyc (#8411) --- mypy/stubtest.py | 5 +++-- setup.py | 2 +- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/mypy/stubtest.py b/mypy/stubtest.py index 8d87f6e7f5a0..4c7cc815c08f 100644 --- a/mypy/stubtest.py +++ b/mypy/stubtest.py @@ -14,7 +14,7 @@ import warnings from functools import singledispatch from pathlib import Path -from typing import Any, Dict, Generic, Iterator, List, Optional, Tuple, TypeVar, Union +from typing import Any, Dict, Generic, Iterator, List, Optional, Tuple, TypeVar, Union, cast from typing_extensions import Type @@ -236,7 +236,8 @@ def verify_typeinfo( return to_check = set(stub.names) - to_check.update(m for m in vars(runtime) if not m.startswith("_")) + # cast to workaround mypyc complaints + to_check.update(m for m in cast(Any, vars)(runtime) if not m.startswith("_")) for entry in sorted(to_check): yield from verify( diff --git a/setup.py b/setup.py index a393c4035205..e51cff9ee76a 100644 --- a/setup.py +++ b/setup.py @@ -101,7 +101,7 @@ def run(self): # Also I think there would be problems with how we generate version.py. 'version.py', - # Written by someone who doesn't know how to deal with mypyc + # Can be removed once we drop support for Python 3.5.2 and lower. 'stubtest.py', )) + ( # Don't want to grab this accidentally From 345134eb4eb18e748a9c78cc8f60c2d43c3ea3c1 Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Tue, 18 Feb 2020 22:18:02 +0000 Subject: [PATCH 097/117] Fix erorrs in self-check related to pytest (#8418) --- mypy/test/data.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/mypy/test/data.py b/mypy/test/data.py index cf48d47c7407..5484fd99e944 100644 --- a/mypy/test/data.py +++ b/mypy/test/data.py @@ -181,6 +181,12 @@ class DataDrivenTestCase(pytest.Item): # type: ignore # inheriting from Any # forward vs backward slashes in file paths for Windows vs Linux. normalize_output = True + # Extra attributes used by some tests. + lastline = None # type: int + output_files = None # type: List[Tuple[str, str]] # Path and contents for output files + deleted_paths = None # type: Dict[int, Set[str]] # Mapping run number -> paths + triggered = None # type: List[str] # Active triggers (one line per incremental step) + def __init__(self, parent: 'DataSuiteCollector', suite: 'DataSuite', @@ -252,7 +258,7 @@ def teardown(self) -> None: def reportinfo(self) -> Tuple[str, int, str]: return self.file, self.line, self.name - def repr_failure(self, excinfo: Any) -> str: + def repr_failure(self, excinfo: Any, style: Optional[Any] = None) -> str: if excinfo.errisinstance(SystemExit): # We assume that before doing exit() (which raises SystemExit) we've printed # enough context about what happened so that a stack trace is not useful. From cdf943ac8e432105ef42b885c444a3fb9a4d6864 Mon Sep 17 00:00:00 2001 From: "Michael J. Sullivan" Date: Wed, 19 Feb 2020 19:36:52 -0800 Subject: [PATCH 098/117] [mypyc] Refactor: extract low-level genops to a new module (#8419) Part of https://github.com/mypyc/mypyc/issues/714 --- mypyc/genops.py | 703 ++---------------------------------------- mypyc/ir_builder.py | 729 ++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 746 insertions(+), 686 deletions(-) create mode 100644 mypyc/ir_builder.py diff --git a/mypyc/genops.py b/mypyc/genops.py index 3c0c2d249a40..fd19ae73e277 100644 --- a/mypyc/genops.py +++ b/mypyc/genops.py @@ -36,49 +36,44 @@ def f(x: int) -> int: NamedTupleExpr, NewTypeExpr, NonlocalDecl, OverloadedFuncDef, PrintStmt, RaiseStmt, RevealExpr, SetExpr, SliceExpr, StarExpr, SuperExpr, TryStmt, TypeAliasExpr, TypeApplication, TypeVarExpr, TypedDictExpr, UnicodeExpr, WithStmt, YieldFromExpr, YieldExpr, GDEF, ARG_POS, - ARG_NAMED, ARG_STAR, ARG_STAR2, op_methods + ARG_NAMED, ) from mypy.types import ( Type, Instance, TupleType, AnyType, TypeOfAny, UninhabitedType, get_proper_type ) from mypy.visitor import ExpressionVisitor, StatementVisitor -from mypy.checkexpr import map_actuals_to_formals from mypy.state import strict_optional_set from mypy.util import split_target from mypyc.common import ( - TEMP_ATTR_NAME, MAX_LITERAL_SHORT_INT, TOP_LEVEL_NAME, FAST_ISINSTANCE_MAX_SUBCLASSES + TEMP_ATTR_NAME, MAX_LITERAL_SHORT_INT, TOP_LEVEL_NAME, ) from mypyc.prebuildvisitor import PreBuildVisitor from mypyc.ops import ( BasicBlock, AssignmentTarget, AssignmentTargetRegister, AssignmentTargetIndex, - AssignmentTargetAttr, AssignmentTargetTuple, Environment, Op, LoadInt, RType, Value, Register, - FuncIR, Assign, Branch, Goto, Call, Box, Unbox, Cast, RTuple, Unreachable, + AssignmentTargetAttr, AssignmentTargetTuple, Environment, LoadInt, RType, Value, Register, + FuncIR, Assign, Branch, RTuple, Unreachable, TupleGet, TupleSet, ClassIR, NonExtClassInfo, RInstance, ModuleIR, ModuleIRs, GetAttr, SetAttr, - LoadStatic, InitStatic, MethodCall, INVALID_FUNC_DEF, int_rprimitive, float_rprimitive, + LoadStatic, InitStatic, INVALID_FUNC_DEF, int_rprimitive, bool_rprimitive, list_rprimitive, is_list_rprimitive, dict_rprimitive, set_rprimitive, str_rprimitive, none_rprimitive, is_none_rprimitive, object_rprimitive, - exc_rtuple, PrimitiveOp, ControlOp, OpDescription, RegisterOp, is_object_rprimitive, - FuncSignature, NAMESPACE_TYPE, NAMESPACE_MODULE, + exc_rtuple, PrimitiveOp, ControlOp, OpDescription, is_object_rprimitive, + FuncSignature, NAMESPACE_MODULE, RaiseStandardError, LoadErrorValue, NO_TRACEBACK_LINE_NO, FuncDecl, - FUNC_STATICMETHOD, FUNC_CLASSMETHOD, RUnion, optional_value_type, - all_concrete_classes + FUNC_STATICMETHOD, FUNC_CLASSMETHOD, ) -from mypyc.ops_primitive import binary_ops, unary_ops, func_ops, method_ops, name_ref_ops +from mypyc.ops_primitive import func_ops, name_ref_ops from mypyc.ops_list import ( list_append_op, list_extend_op, list_len_op, new_list_op, to_list, list_pop_last ) -from mypyc.ops_tuple import list_tuple_op, new_tuple_op +from mypyc.ops_tuple import list_tuple_op from mypyc.ops_dict import ( - new_dict_op, dict_get_item_op, dict_set_item_op, dict_update_in_display_op, + new_dict_op, dict_get_item_op, dict_set_item_op ) from mypyc.ops_set import new_set_op, set_add_op, set_update_op from mypyc.ops_misc import ( - none_op, none_object_op, true_op, false_op, iter_op, next_op, - py_getattr_op, py_setattr_op, py_delattr_op, - py_call_op, py_call_with_kwargs_op, py_method_call_op, - fast_isinstance_op, bool_op, new_slice_op, type_op, import_op, - get_module_dict_op, ellipsis_op, type_is_op, + true_op, false_op, iter_op, next_op, py_setattr_op, py_delattr_op, + new_slice_op, type_op, import_op, get_module_dict_op, ellipsis_op, ) from mypyc.ops_exc import ( raise_exception_op, reraise_exception_op, @@ -86,9 +81,6 @@ def f(x: int) -> int: get_exc_info_op, keep_propagating_op ) from mypyc.genops_for import ForGenerator, ForRange, ForList, ForIterable, ForEnumerate, ForZip -from mypyc.rt_subtype import is_runtime_subtype -from mypyc.subtype import is_subtype -from mypyc.sametype import is_same_type from mypyc.crash import catch_errors from mypyc.options import CompilerOptions from mypyc.errors import Errors @@ -102,9 +94,9 @@ def f(x: int) -> int: from mypyc.genopsmapper import Mapper from mypyc.genopsvtable import compute_vtable from mypyc.genopsprepare import build_type_map +from mypyc.ir_builder import LowLevelIRBuilder GenFunc = Callable[[], None] -DictEntry = Tuple[Optional[Value], Value] class UnsupportedException(Exception): @@ -194,7 +186,7 @@ def wrapper(f: Specializer) -> Specializer: return wrapper -class IRBuilder(ExpressionVisitor[Value], StatementVisitor[None]): +class IRBuilder(LowLevelIRBuilder, ExpressionVisitor[Value], StatementVisitor[None]): def __init__(self, current_module: str, types: Dict[Expression, Type], @@ -203,13 +195,12 @@ def __init__(self, mapper: Mapper, pbv: PreBuildVisitor, options: CompilerOptions) -> None: + super().__init__(current_module, mapper) + self.current_module = current_module self.types = types self.graph = graph - self.environment = Environment() - self.environments = [self.environment] self.ret_types = [] # type: List[RType] - self.blocks = [] # type: List[List[BasicBlock]] self.functions = [] # type: List[FuncIR] self.classes = [] # type: List[ClassIR] self.final_names = [] # type: List[Tuple[str, RType]] @@ -240,11 +231,8 @@ def __init__(self, # This list operates as a stack of constructs that modify the # behavior of nonlocal control flow constructs. self.nonlocal_control = [] # type: List[NonlocalControl] - # Stack of except handler entry blocks - self.error_handlers = [None] # type: List[Optional[BasicBlock]] self.errors = errors - self.mapper = mapper # Notionally a list of all of the modules imported by the # module being compiled, but stored as an OrderedDict so we # can also do quick lookups. @@ -1020,102 +1008,6 @@ def visit_op_expr(self, expr: OpExpr) -> Value: return self.shortcircuit_expr(expr) return self.binary_op(self.accept(expr.left), self.accept(expr.right), expr.op, expr.line) - def translate_eq_cmp(self, - lreg: Value, - rreg: Value, - expr_op: str, - line: int) -> Optional[Value]: - ltype = lreg.type - rtype = rreg.type - if not (isinstance(ltype, RInstance) and ltype == rtype): - return None - - class_ir = ltype.class_ir - # Check whether any subclasses of the operand redefines __eq__ - # or it might be redefined in a Python parent class or by - # dataclasses - cmp_varies_at_runtime = ( - not class_ir.is_method_final('__eq__') - or not class_ir.is_method_final('__ne__') - or class_ir.inherits_python - or class_ir.is_augmented - ) - - if cmp_varies_at_runtime: - # We might need to call left.__eq__(right) or right.__eq__(left) - # depending on which is the more specific type. - return None - - if not class_ir.has_method('__eq__'): - # There's no __eq__ defined, so just use object identity. - identity_ref_op = 'is' if expr_op == '==' else 'is not' - return self.binary_op(lreg, rreg, identity_ref_op, line) - - return self.gen_method_call( - lreg, - op_methods[expr_op], - [rreg], - ltype, - line - ) - - def matching_primitive_op(self, - candidates: List[OpDescription], - args: List[Value], - line: int, - result_type: Optional[RType] = None) -> Optional[Value]: - # Find the highest-priority primitive op that matches. - matching = None # type: Optional[OpDescription] - for desc in candidates: - if len(desc.arg_types) != len(args): - continue - if all(is_subtype(actual.type, formal) - for actual, formal in zip(args, desc.arg_types)): - if matching: - assert matching.priority != desc.priority, 'Ambiguous:\n1) %s\n2) %s' % ( - matching, desc) - if desc.priority > matching.priority: - matching = desc - else: - matching = desc - if matching: - target = self.primitive_op(matching, args, line) - if result_type and not is_runtime_subtype(target.type, result_type): - if is_none_rprimitive(result_type): - # Special case None return. The actual result may actually be a bool - # and so we can't just coerce it. - target = self.none() - else: - target = self.coerce(target, result_type, line) - return target - return None - - def binary_op(self, - lreg: Value, - rreg: Value, - expr_op: str, - line: int) -> Value: - # Special case == and != when we can resolve the method call statically. - value = None - if expr_op in ('==', '!='): - value = self.translate_eq_cmp(lreg, rreg, expr_op, line) - if value is not None: - return value - - ops = binary_ops.get(expr_op, []) - target = self.matching_primitive_op(ops, [lreg, rreg], line) - assert target, 'Unsupported binary operation: %s' % expr_op - return target - - def unary_op(self, - lreg: Value, - expr_op: str, - line: int) -> Value: - ops = unary_ops.get(expr_op, []) - target = self.matching_primitive_op(ops, [lreg], line) - assert target, 'Unsupported unary operation: %s' % expr_op - return target - def visit_index_expr(self, expr: IndexExpr) -> Value: base = self.accept(expr.base) @@ -1268,200 +1160,6 @@ def visit_member_expr(self, expr: MemberExpr) -> Value: obj = self.accept(expr.expr) return self.get_attr(obj, expr.name, self.node_type(expr), expr.line) - def get_attr(self, obj: Value, attr: str, result_type: RType, line: int) -> Value: - if (isinstance(obj.type, RInstance) and obj.type.class_ir.is_ext_class - and obj.type.class_ir.has_attr(attr)): - return self.add(GetAttr(obj, attr, line)) - elif isinstance(obj.type, RUnion): - return self.union_get_attr(obj, obj.type, attr, result_type, line) - else: - return self.py_get_attr(obj, attr, line) - - def union_get_attr(self, - obj: Value, - rtype: RUnion, - attr: str, - result_type: RType, - line: int) -> Value: - def get_item_attr(value: Value) -> Value: - return self.get_attr(value, attr, result_type, line) - - return self.decompose_union_helper(obj, rtype, result_type, get_item_attr, line) - - def decompose_union_helper(self, - obj: Value, - rtype: RUnion, - result_type: RType, - process_item: Callable[[Value], Value], - line: int) -> Value: - """Generate isinstance() + specialized operations for union items. - - Say, for Union[A, B] generate ops resembling this (pseudocode): - - if isinstance(obj, A): - result = - else: - result = - - Args: - obj: value with a union type - rtype: the union type - result_type: result of the operation - process_item: callback to generate op for a single union item (arg is coerced - to union item type) - line: line number - """ - # TODO: Optimize cases where a single operation can handle multiple union items - # (say a method is implemented in a common base class) - fast_items = [] - rest_items = [] - for item in rtype.items: - if isinstance(item, RInstance): - fast_items.append(item) - else: - # For everything but RInstance we fall back to C API - rest_items.append(item) - exit_block = BasicBlock() - result = self.alloc_temp(result_type) - for i, item in enumerate(fast_items): - more_types = i < len(fast_items) - 1 or rest_items - if more_types: - # We are not at the final item so we need one more branch - op = self.isinstance_native(obj, item.class_ir, line) - true_block, false_block = BasicBlock(), BasicBlock() - self.add_bool_branch(op, true_block, false_block) - self.activate_block(true_block) - coerced = self.coerce(obj, item, line) - temp = process_item(coerced) - temp2 = self.coerce(temp, result_type, line) - self.add(Assign(result, temp2)) - self.goto(exit_block) - if more_types: - self.activate_block(false_block) - if rest_items: - # For everything else we use generic operation. Use force=True to drop the - # union type. - coerced = self.coerce(obj, object_rprimitive, line, force=True) - temp = process_item(coerced) - temp2 = self.coerce(temp, result_type, line) - self.add(Assign(result, temp2)) - self.goto(exit_block) - self.activate_block(exit_block) - return result - - def isinstance_helper(self, obj: Value, class_irs: List[ClassIR], line: int) -> Value: - """Fast path for isinstance() that checks against a list of native classes.""" - if not class_irs: - return self.primitive_op(false_op, [], line) - ret = self.isinstance_native(obj, class_irs[0], line) - for class_ir in class_irs[1:]: - def other() -> Value: - return self.isinstance_native(obj, class_ir, line) - ret = self.shortcircuit_helper('or', bool_rprimitive, lambda: ret, other, line) - return ret - - def isinstance_native(self, obj: Value, class_ir: ClassIR, line: int) -> Value: - """Fast isinstance() check for a native class. - - If there three or less concrete (non-trait) classes among the class and all - its children, use even faster type comparison checks `type(obj) is typ`. - """ - concrete = all_concrete_classes(class_ir) - if concrete is None or len(concrete) > FAST_ISINSTANCE_MAX_SUBCLASSES + 1: - return self.primitive_op(fast_isinstance_op, - [obj, self.get_native_type(class_ir)], - line) - if not concrete: - # There can't be any concrete instance that matches this. - return self.primitive_op(false_op, [], line) - type_obj = self.get_native_type(concrete[0]) - ret = self.primitive_op(type_is_op, [obj, type_obj], line) - for c in concrete[1:]: - def other() -> Value: - return self.primitive_op(type_is_op, [obj, self.get_native_type(c)], line) - ret = self.shortcircuit_helper('or', bool_rprimitive, lambda: ret, other, line) - return ret - - def get_native_type(self, cls: ClassIR) -> Value: - fullname = '%s.%s' % (cls.module_name, cls.name) - return self.load_native_type_object(fullname) - - def py_get_attr(self, obj: Value, attr: str, line: int) -> Value: - key = self.load_static_unicode(attr) - return self.add(PrimitiveOp([obj, key], py_getattr_op, line)) - - def py_call(self, - function: Value, - arg_values: List[Value], - line: int, - arg_kinds: Optional[List[int]] = None, - arg_names: Optional[Sequence[Optional[str]]] = None) -> Value: - """Use py_call_op or py_call_with_kwargs_op for function call.""" - # If all arguments are positional, we can use py_call_op. - if (arg_kinds is None) or all(kind == ARG_POS for kind in arg_kinds): - return self.primitive_op(py_call_op, [function] + arg_values, line) - - # Otherwise fallback to py_call_with_kwargs_op. - assert arg_names is not None - - pos_arg_values = [] - kw_arg_key_value_pairs = [] # type: List[DictEntry] - star_arg_values = [] - for value, kind, name in zip(arg_values, arg_kinds, arg_names): - if kind == ARG_POS: - pos_arg_values.append(value) - elif kind == ARG_NAMED: - assert name is not None - key = self.load_static_unicode(name) - kw_arg_key_value_pairs.append((key, value)) - elif kind == ARG_STAR: - star_arg_values.append(value) - elif kind == ARG_STAR2: - # NOTE: mypy currently only supports a single ** arg, but python supports multiple. - # This code supports multiple primarily to make the logic easier to follow. - kw_arg_key_value_pairs.append((None, value)) - else: - assert False, ("Argument kind should not be possible:", kind) - - if len(star_arg_values) == 0: - # We can directly construct a tuple if there are no star args. - pos_args_tuple = self.primitive_op(new_tuple_op, pos_arg_values, line) - else: - # Otherwise we construct a list and call extend it with the star args, since tuples - # don't have an extend method. - pos_args_list = self.primitive_op(new_list_op, pos_arg_values, line) - for star_arg_value in star_arg_values: - self.primitive_op(list_extend_op, [pos_args_list, star_arg_value], line) - pos_args_tuple = self.primitive_op(list_tuple_op, [pos_args_list], line) - - kw_args_dict = self.make_dict(kw_arg_key_value_pairs, line) - - return self.primitive_op( - py_call_with_kwargs_op, [function, pos_args_tuple, kw_args_dict], line) - - def py_method_call(self, - obj: Value, - method_name: str, - arg_values: List[Value], - line: int, - arg_kinds: Optional[List[int]], - arg_names: Optional[Sequence[Optional[str]]]) -> Value: - if (arg_kinds is None) or all(kind == ARG_POS for kind in arg_kinds): - method_name_reg = self.load_static_unicode(method_name) - return self.primitive_op(py_method_call_op, [obj, method_name_reg] + arg_values, line) - else: - method = self.py_get_attr(obj, method_name, line) - return self.py_call(method, arg_values, line, arg_kinds=arg_kinds, arg_names=arg_names) - - def call(self, decl: FuncDecl, args: Sequence[Value], - arg_kinds: List[int], - arg_names: Sequence[Optional[str]], - line: int) -> Value: - # Normalize args to positionals. - args = self.native_args_to_positional( - args, arg_kinds, arg_names, decl.sig, line) - return self.add(Call(decl, args, line)) - def visit_call_expr(self, expr: CallExpr) -> Value: if isinstance(expr.analyzed, CastExpr): return self.translate_cast_expr(expr.analyzed) @@ -1628,110 +1326,11 @@ def translate_super_method_call(self, expr: CallExpr, callee: SuperExpr) -> Valu return self.call(decl, arg_values, arg_kinds, arg_names, expr.line) - def gen_method_call(self, - base: Value, - name: str, - arg_values: List[Value], - return_rtype: Optional[RType], - line: int, - arg_kinds: Optional[List[int]] = None, - arg_names: Optional[List[Optional[str]]] = None) -> Value: - # If arg_kinds contains values other than arg_pos and arg_named, then fallback to - # Python method call. - if (arg_kinds is not None - and not all(kind in (ARG_POS, ARG_NAMED) for kind in arg_kinds)): - return self.py_method_call(base, name, arg_values, base.line, arg_kinds, arg_names) - - # If the base type is one of ours, do a MethodCall - if (isinstance(base.type, RInstance) and base.type.class_ir.is_ext_class - and not base.type.class_ir.builtin_base): - if base.type.class_ir.has_method(name): - decl = base.type.class_ir.method_decl(name) - if arg_kinds is None: - assert arg_names is None, "arg_kinds not present but arg_names is" - arg_kinds = [ARG_POS for _ in arg_values] - arg_names = [None for _ in arg_values] - else: - assert arg_names is not None, "arg_kinds present but arg_names is not" - - # Normalize args to positionals. - assert decl.bound_sig - arg_values = self.native_args_to_positional( - arg_values, arg_kinds, arg_names, decl.bound_sig, line) - return self.add(MethodCall(base, name, arg_values, line)) - elif base.type.class_ir.has_attr(name): - function = self.add(GetAttr(base, name, line)) - return self.py_call(function, arg_values, line, - arg_kinds=arg_kinds, arg_names=arg_names) - - elif isinstance(base.type, RUnion): - return self.union_method_call(base, base.type, name, arg_values, return_rtype, line, - arg_kinds, arg_names) - - # Try to do a special-cased method call - if not arg_kinds or arg_kinds == [ARG_POS] * len(arg_values): - target = self.translate_special_method_call(base, name, arg_values, return_rtype, line) - if target: - return target - - # Fall back to Python method call - return self.py_method_call(base, name, arg_values, line, arg_kinds, arg_names) - - def union_method_call(self, - base: Value, - obj_type: RUnion, - name: str, - arg_values: List[Value], - return_rtype: Optional[RType], - line: int, - arg_kinds: Optional[List[int]], - arg_names: Optional[List[Optional[str]]]) -> Value: - # Union method call needs a return_rtype for the type of the output register. - # If we don't have one, use object_rprimitive. - return_rtype = return_rtype or object_rprimitive - - def call_union_item(value: Value) -> Value: - return self.gen_method_call(value, name, arg_values, return_rtype, line, - arg_kinds, arg_names) - - return self.decompose_union_helper(base, obj_type, return_rtype, call_union_item, line) - def translate_cast_expr(self, expr: CastExpr) -> Value: src = self.accept(expr.expr) target_type = self.type_to_rtype(expr.type) return self.coerce(src, target_type, expr.line) - def shortcircuit_helper(self, op: str, - expr_type: RType, - left: Callable[[], Value], - right: Callable[[], Value], line: int) -> Value: - # Having actual Phi nodes would be really nice here! - target = self.alloc_temp(expr_type) - # left_body takes the value of the left side, right_body the right - left_body, right_body, next = BasicBlock(), BasicBlock(), BasicBlock() - # true_body is taken if the left is true, false_body if it is false. - # For 'and' the value is the right side if the left is true, and for 'or' - # it is the right side if the left is false. - true_body, false_body = ( - (right_body, left_body) if op == 'and' else (left_body, right_body)) - - left_value = left() - self.add_bool_branch(left_value, true_body, false_body) - - self.activate_block(left_body) - left_coerced = self.coerce(left_value, expr_type, line) - self.add(Assign(target, left_coerced)) - self.goto(next) - - self.activate_block(right_body) - right_value = right() - right_coerced = self.coerce(right_value, expr_type, line) - self.add(Assign(target, right_coerced)) - self.goto(next) - - self.activate_block(next) - return target - def shortcircuit_expr(self, expr: OpExpr) -> Value: return self.shortcircuit_helper( expr.op, self.node_type(expr), @@ -1764,23 +1363,6 @@ def visit_conditional_expr(self, expr: ConditionalExpr) -> Value: return target - def translate_special_method_call(self, - base_reg: Value, - name: str, - args: List[Value], - result_type: Optional[RType], - line: int) -> Optional[Value]: - """Translate a method call which is handled nongenerically. - - These are special in the sense that we have code generated specifically for them. - They tend to be method calls which have equivalents in C that are more direct - than calling with the PyObject api. - - Return None if no translation found; otherwise return the target register. - """ - ops = method_ops.get(name, []) - return self.matching_primitive_op(ops, [base_reg] + args, line, result_type=result_type) - def visit_list_expr(self, expr: ListExpr) -> Value: return self._visit_list_display(expr.items, expr.line) @@ -1930,43 +1512,6 @@ def go(i: int, prev: Value) -> Value: return go(0, self.accept(e.operands[0])) - def add_bool_branch(self, value: Value, true: BasicBlock, false: BasicBlock) -> None: - if is_runtime_subtype(value.type, int_rprimitive): - zero = self.add(LoadInt(0)) - value = self.binary_op(value, zero, '!=', value.line) - elif is_same_type(value.type, list_rprimitive): - length = self.primitive_op(list_len_op, [value], value.line) - zero = self.add(LoadInt(0)) - value = self.binary_op(length, zero, '!=', value.line) - elif (isinstance(value.type, RInstance) and value.type.class_ir.is_ext_class - and value.type.class_ir.has_method('__bool__')): - # Directly call the __bool__ method on classes that have it. - value = self.gen_method_call(value, '__bool__', [], bool_rprimitive, value.line) - else: - value_type = optional_value_type(value.type) - if value_type is not None: - is_none = self.binary_op(value, self.none_object(), 'is not', value.line) - branch = Branch(is_none, true, false, Branch.BOOL_EXPR) - self.add(branch) - always_truthy = False - if isinstance(value_type, RInstance): - # check whether X.__bool__ is always just the default (object.__bool__) - if (not value_type.class_ir.has_method('__bool__') - and value_type.class_ir.is_method_final('__bool__')): - always_truthy = True - - if not always_truthy: - # Optional[X] where X may be falsey and requires a check - branch.true = self.new_block() - # unbox_or_cast instead of coerce because we want the - # type to change even if it is a subtype. - remaining = self.unbox_or_cast(value, value_type, value.line) - self.add_bool_branch(remaining, true, false) - return - elif not is_same_type(value.type, bool_rprimitive): - value = self.primitive_op(bool_op, [value], value.line) - self.add(Branch(value, true, false, Branch.BOOL_EXPR)) - def visit_nonlocal_decl(self, o: NonlocalDecl) -> None: pass @@ -2734,27 +2279,6 @@ def enter(self, fn_info: Union[FuncInfo, str] = '') -> None: self.blocks.append([]) self.new_block() - def activate_block(self, block: BasicBlock) -> None: - if self.blocks[-1]: - assert isinstance(self.blocks[-1][-1].ops[-1], ControlOp) - - block.error_handler = self.error_handlers[-1] - self.blocks[-1].append(block) - - def goto_and_activate(self, block: BasicBlock) -> None: - self.goto(block) - self.activate_block(block) - - def new_block(self) -> BasicBlock: - block = BasicBlock() - self.activate_block(block) - return block - - def goto_new_block(self) -> BasicBlock: - block = BasicBlock() - self.goto_and_activate(block) - return block - def leave(self) -> Tuple[List[BasicBlock], Environment, RType, FuncInfo]: blocks = self.blocks.pop() env = self.environments.pop() @@ -2766,36 +2290,6 @@ def leave(self) -> Tuple[List[BasicBlock], Environment, RType, FuncInfo]: self.fn_info = self.fn_infos[-1] return blocks, env, ret_type, fn_info - def add(self, op: Op) -> Value: - if self.blocks[-1][-1].ops: - assert not isinstance(self.blocks[-1][-1].ops[-1], ControlOp), ( - "Can't add to finished block") - - self.blocks[-1][-1].ops.append(op) - if isinstance(op, RegisterOp): - self.environment.add_op(op) - return op - - def goto(self, target: BasicBlock) -> None: - if not self.blocks[-1][-1].ops or not isinstance(self.blocks[-1][-1].ops[-1], ControlOp): - self.add(Goto(target)) - - def primitive_op(self, desc: OpDescription, args: List[Value], line: int) -> Value: - assert desc.result_type is not None - coerced = [] - for i, arg in enumerate(args): - formal_type = self.op_arg_type(desc, i) - arg = self.coerce(arg, formal_type, line) - coerced.append(arg) - target = self.add(PrimitiveOp(coerced, desc, line)) - return target - - def op_arg_type(self, desc: OpDescription, n: int) -> RType: - if n >= len(desc.arg_types): - assert desc.is_var_arg - return desc.arg_types[-1] - return desc.arg_types[n] - @overload def accept(self, node: Expression) -> Value: ... @@ -2822,9 +2316,6 @@ def accept(self, node: Union[Statement, Expression]) -> Optional[Value]: pass return None - def alloc_temp(self, type: RType) -> Register: - return self.environment.add_temp(type) - def type_to_rtype(self, typ: Optional[Type]) -> RType: return self.mapper.type_to_rtype(typ) @@ -2837,59 +2328,9 @@ def node_type(self, node: Expression) -> RType: mypy_type = self.types[node] return self.type_to_rtype(mypy_type) - def box(self, src: Value) -> Value: - if src.type.is_unboxed: - return self.add(Box(src)) - else: - return src - - def unbox_or_cast(self, src: Value, target_type: RType, line: int) -> Value: - if target_type.is_unboxed: - return self.add(Unbox(src, target_type, line)) - else: - return self.add(Cast(src, target_type, line)) - def box_expr(self, expr: Expression) -> Value: return self.box(self.accept(expr)) - def make_dict(self, key_value_pairs: Sequence[DictEntry], line: int) -> Value: - result = None # type: Union[Value, None] - initial_items = [] # type: List[Value] - for key, value in key_value_pairs: - if key is not None: - # key:value - if result is None: - initial_items.extend((key, value)) - continue - - self.translate_special_method_call( - result, - '__setitem__', - [key, value], - result_type=None, - line=line) - else: - # **value - if result is None: - result = self.primitive_op(new_dict_op, initial_items, line) - - self.primitive_op( - dict_update_in_display_op, - [result, value], - line=line - ) - - if result is None: - result = self.primitive_op(new_dict_op, initial_items, line) - - return result - - def none(self) -> Value: - return self.add(PrimitiveOp([], none_op, line=-1)) - - def none_object(self) -> Value: - return self.add(PrimitiveOp([], none_object_op, line=-1)) - def add_var_to_env_class(self, var: SymbolNode, rtype: RType, @@ -2938,121 +2379,11 @@ def load_global_str(self, name: str, line: int) -> Value: def load_globals_dict(self) -> Value: return self.add(LoadStatic(dict_rprimitive, 'globals', self.module_name)) - def literal_static_name(self, value: Union[int, float, complex, str, bytes]) -> str: - return self.mapper.literal_static_name(self.current_module, value) - - def load_static_int(self, value: int) -> Value: - """Loads a static integer Python 'int' object into a register.""" - static_symbol = self.literal_static_name(value) - return self.add(LoadStatic(int_rprimitive, static_symbol, ann=value)) - - def load_static_float(self, value: float) -> Value: - """Loads a static float value into a register.""" - static_symbol = self.literal_static_name(value) - return self.add(LoadStatic(float_rprimitive, static_symbol, ann=value)) - - def load_static_bytes(self, value: bytes) -> Value: - """Loads a static bytes value into a register.""" - static_symbol = self.literal_static_name(value) - return self.add(LoadStatic(object_rprimitive, static_symbol, ann=value)) - - def load_static_complex(self, value: complex) -> Value: - """Loads a static complex value into a register.""" - static_symbol = self.literal_static_name(value) - return self.add(LoadStatic(object_rprimitive, static_symbol, ann=value)) - - def load_static_unicode(self, value: str) -> Value: - """Loads a static unicode value into a register. - - This is useful for more than just unicode literals; for example, method calls - also require a PyObject * form for the name of the method. - """ - static_symbol = self.literal_static_name(value) - return self.add(LoadStatic(str_rprimitive, static_symbol, ann=value)) - - def load_module(self, name: str) -> Value: - return self.add(LoadStatic(object_rprimitive, name, namespace=NAMESPACE_MODULE)) - def load_module_attr_by_fullname(self, fullname: str, line: int) -> Value: module, _, name = fullname.rpartition('.') left = self.load_module(module) return self.py_get_attr(left, name, line) - def load_native_type_object(self, fullname: str) -> Value: - module, name = fullname.rsplit('.', 1) - return self.add(LoadStatic(object_rprimitive, name, module, NAMESPACE_TYPE)) - - def coerce(self, src: Value, target_type: RType, line: int, force: bool = False) -> Value: - """Generate a coercion/cast from one type to other (only if needed). - - For example, int -> object boxes the source int; int -> int emits nothing; - object -> int unboxes the object. All conversions preserve object value. - - If force is true, always generate an op (even if it is just an assignment) so - that the result will have exactly target_type as the type. - - Returns the register with the converted value (may be same as src). - """ - if src.type.is_unboxed and not target_type.is_unboxed: - return self.box(src) - if ((src.type.is_unboxed and target_type.is_unboxed) - and not is_runtime_subtype(src.type, target_type)): - # To go from one unboxed type to another, we go through a boxed - # in-between value, for simplicity. - tmp = self.box(src) - return self.unbox_or_cast(tmp, target_type, line) - if ((not src.type.is_unboxed and target_type.is_unboxed) - or not is_subtype(src.type, target_type)): - return self.unbox_or_cast(src, target_type, line) - elif force: - tmp = self.alloc_temp(target_type) - self.add(Assign(tmp, src)) - return tmp - return src - - def native_args_to_positional(self, - args: Sequence[Value], - arg_kinds: List[int], - arg_names: Sequence[Optional[str]], - sig: FuncSignature, - line: int) -> List[Value]: - """Prepare arguments for a native call. - - Given args/kinds/names and a target signature for a native call, map - keyword arguments to their appropriate place in the argument list, - fill in error values for unspecified default arguments, - package arguments that will go into *args/**kwargs into a tuple/dict, - and coerce arguments to the appropriate type. - """ - - sig_arg_kinds = [arg.kind for arg in sig.args] - sig_arg_names = [arg.name for arg in sig.args] - formal_to_actual = map_actuals_to_formals(arg_kinds, - arg_names, - sig_arg_kinds, - sig_arg_names, - lambda n: AnyType(TypeOfAny.special_form)) - - # Flatten out the arguments, loading error values for default - # arguments, constructing tuples/dicts for star args, and - # coercing everything to the expected type. - output_args = [] - for lst, arg in zip(formal_to_actual, sig.args): - output_arg = None - if arg.kind == ARG_STAR: - output_arg = self.primitive_op(new_tuple_op, [args[i] for i in lst], line) - elif arg.kind == ARG_STAR2: - dict_entries = [(self.load_static_unicode(cast(str, arg_names[i])), args[i]) - for i in lst] - output_arg = self.make_dict(dict_entries, line) - elif not lst: - output_arg = self.add(LoadErrorValue(arg.type, is_borrowed=True)) - else: - output_arg = args[lst[0]] - output_args.append(self.coerce(output_arg, arg.type, line)) - - return output_args - # Lacks a good type because there wasn't a reasonable type in 3.5 :( def catch_errors(self, line: int) -> Any: return catch_errors(self.module_path, line) diff --git a/mypyc/ir_builder.py b/mypyc/ir_builder.py new file mode 100644 index 000000000000..aa4b273bd6aa --- /dev/null +++ b/mypyc/ir_builder.py @@ -0,0 +1,729 @@ +"""A "low-level" IR builder class. + +LowLevelIRBuilder provides core abstractions we use for constructing +IR as well as a number of higher-level ones (accessing attributes, +calling functions and methods, and coercing between types, for +example). The core principle of the low-level IR builder is that all +of its facilities operate solely on the IR level and not the AST +level---it has *no knowledge* of mypy types or expressions. + +Currently LowLevelIRBuilder does not have a clean API and the +higher-level IR builder in genops uses LowLevelIRBuilder by inheriting +from it. A next step is to fix this. +""" + +from typing import ( + Callable, List, Tuple, Optional, Union, Sequence, cast +) + +from mypy.nodes import ARG_POS, ARG_NAMED, ARG_STAR, ARG_STAR2, op_methods +from mypy.types import AnyType, TypeOfAny +from mypy.checkexpr import map_actuals_to_formals + +from mypyc.ops import ( + BasicBlock, Environment, Op, LoadInt, RType, Value, Register, + Assign, Branch, Goto, Call, Box, Unbox, Cast, ClassIR, RInstance, GetAttr, + LoadStatic, MethodCall, int_rprimitive, float_rprimitive, bool_rprimitive, list_rprimitive, + str_rprimitive, is_none_rprimitive, object_rprimitive, + PrimitiveOp, ControlOp, OpDescription, RegisterOp, + FuncSignature, NAMESPACE_TYPE, NAMESPACE_MODULE, + LoadErrorValue, FuncDecl, RUnion, optional_value_type, all_concrete_classes +) +from mypyc.common import ( + FAST_ISINSTANCE_MAX_SUBCLASSES +) +from mypyc.ops_primitive import binary_ops, unary_ops, method_ops +from mypyc.ops_list import ( + list_extend_op, list_len_op, new_list_op +) +from mypyc.ops_tuple import list_tuple_op, new_tuple_op +from mypyc.ops_dict import ( + new_dict_op, dict_update_in_display_op, +) +from mypyc.ops_misc import ( + none_op, none_object_op, false_op, + py_getattr_op, py_call_op, py_call_with_kwargs_op, py_method_call_op, + fast_isinstance_op, bool_op, type_is_op, +) +from mypyc.rt_subtype import is_runtime_subtype +from mypyc.subtype import is_subtype +from mypyc.sametype import is_same_type +from mypyc.genopsmapper import Mapper + + +DictEntry = Tuple[Optional[Value], Value] + + +class LowLevelIRBuilder: + def __init__( + self, + current_module: str, + mapper: Mapper, + ) -> None: + self.current_module = current_module + self.mapper = mapper + self.environment = Environment() + self.environments = [self.environment] + self.blocks = [] # type: List[List[BasicBlock]] + # Stack of except handler entry blocks + self.error_handlers = [None] # type: List[Optional[BasicBlock]] + + def add(self, op: Op) -> Value: + if self.blocks[-1][-1].ops: + assert not isinstance(self.blocks[-1][-1].ops[-1], ControlOp), ( + "Can't add to finished block") + + self.blocks[-1][-1].ops.append(op) + if isinstance(op, RegisterOp): + self.environment.add_op(op) + return op + + def goto(self, target: BasicBlock) -> None: + if not self.blocks[-1][-1].ops or not isinstance(self.blocks[-1][-1].ops[-1], ControlOp): + self.add(Goto(target)) + + def activate_block(self, block: BasicBlock) -> None: + if self.blocks[-1]: + assert isinstance(self.blocks[-1][-1].ops[-1], ControlOp) + + block.error_handler = self.error_handlers[-1] + self.blocks[-1].append(block) + + def goto_and_activate(self, block: BasicBlock) -> None: + self.goto(block) + self.activate_block(block) + + def new_block(self) -> BasicBlock: + block = BasicBlock() + self.activate_block(block) + return block + + def goto_new_block(self) -> BasicBlock: + block = BasicBlock() + self.goto_and_activate(block) + return block + + ## + + def get_native_type(self, cls: ClassIR) -> Value: + fullname = '%s.%s' % (cls.module_name, cls.name) + return self.load_native_type_object(fullname) + + def primitive_op(self, desc: OpDescription, args: List[Value], line: int) -> Value: + assert desc.result_type is not None + coerced = [] + for i, arg in enumerate(args): + formal_type = self.op_arg_type(desc, i) + arg = self.coerce(arg, formal_type, line) + coerced.append(arg) + target = self.add(PrimitiveOp(coerced, desc, line)) + return target + + def alloc_temp(self, type: RType) -> Register: + return self.environment.add_temp(type) + + def op_arg_type(self, desc: OpDescription, n: int) -> RType: + if n >= len(desc.arg_types): + assert desc.is_var_arg + return desc.arg_types[-1] + return desc.arg_types[n] + + def box(self, src: Value) -> Value: + if src.type.is_unboxed: + return self.add(Box(src)) + else: + return src + + def unbox_or_cast(self, src: Value, target_type: RType, line: int) -> Value: + if target_type.is_unboxed: + return self.add(Unbox(src, target_type, line)) + else: + return self.add(Cast(src, target_type, line)) + + def coerce(self, src: Value, target_type: RType, line: int, force: bool = False) -> Value: + """Generate a coercion/cast from one type to other (only if needed). + + For example, int -> object boxes the source int; int -> int emits nothing; + object -> int unboxes the object. All conversions preserve object value. + + If force is true, always generate an op (even if it is just an assignment) so + that the result will have exactly target_type as the type. + + Returns the register with the converted value (may be same as src). + """ + if src.type.is_unboxed and not target_type.is_unboxed: + return self.box(src) + if ((src.type.is_unboxed and target_type.is_unboxed) + and not is_runtime_subtype(src.type, target_type)): + # To go from one unboxed type to another, we go through a boxed + # in-between value, for simplicity. + tmp = self.box(src) + return self.unbox_or_cast(tmp, target_type, line) + if ((not src.type.is_unboxed and target_type.is_unboxed) + or not is_subtype(src.type, target_type)): + return self.unbox_or_cast(src, target_type, line) + elif force: + tmp = self.alloc_temp(target_type) + self.add(Assign(tmp, src)) + return tmp + return src + + def none(self) -> Value: + return self.add(PrimitiveOp([], none_op, line=-1)) + + def none_object(self) -> Value: + return self.add(PrimitiveOp([], none_object_op, line=-1)) + + def get_attr(self, obj: Value, attr: str, result_type: RType, line: int) -> Value: + if (isinstance(obj.type, RInstance) and obj.type.class_ir.is_ext_class + and obj.type.class_ir.has_attr(attr)): + return self.add(GetAttr(obj, attr, line)) + elif isinstance(obj.type, RUnion): + return self.union_get_attr(obj, obj.type, attr, result_type, line) + else: + return self.py_get_attr(obj, attr, line) + + def union_get_attr(self, + obj: Value, + rtype: RUnion, + attr: str, + result_type: RType, + line: int) -> Value: + def get_item_attr(value: Value) -> Value: + return self.get_attr(value, attr, result_type, line) + + return self.decompose_union_helper(obj, rtype, result_type, get_item_attr, line) + + def decompose_union_helper(self, + obj: Value, + rtype: RUnion, + result_type: RType, + process_item: Callable[[Value], Value], + line: int) -> Value: + """Generate isinstance() + specialized operations for union items. + + Say, for Union[A, B] generate ops resembling this (pseudocode): + + if isinstance(obj, A): + result = + else: + result = + + Args: + obj: value with a union type + rtype: the union type + result_type: result of the operation + process_item: callback to generate op for a single union item (arg is coerced + to union item type) + line: line number + """ + # TODO: Optimize cases where a single operation can handle multiple union items + # (say a method is implemented in a common base class) + fast_items = [] + rest_items = [] + for item in rtype.items: + if isinstance(item, RInstance): + fast_items.append(item) + else: + # For everything but RInstance we fall back to C API + rest_items.append(item) + exit_block = BasicBlock() + result = self.alloc_temp(result_type) + for i, item in enumerate(fast_items): + more_types = i < len(fast_items) - 1 or rest_items + if more_types: + # We are not at the final item so we need one more branch + op = self.isinstance_native(obj, item.class_ir, line) + true_block, false_block = BasicBlock(), BasicBlock() + self.add_bool_branch(op, true_block, false_block) + self.activate_block(true_block) + coerced = self.coerce(obj, item, line) + temp = process_item(coerced) + temp2 = self.coerce(temp, result_type, line) + self.add(Assign(result, temp2)) + self.goto(exit_block) + if more_types: + self.activate_block(false_block) + if rest_items: + # For everything else we use generic operation. Use force=True to drop the + # union type. + coerced = self.coerce(obj, object_rprimitive, line, force=True) + temp = process_item(coerced) + temp2 = self.coerce(temp, result_type, line) + self.add(Assign(result, temp2)) + self.goto(exit_block) + self.activate_block(exit_block) + return result + + def isinstance_helper(self, obj: Value, class_irs: List[ClassIR], line: int) -> Value: + """Fast path for isinstance() that checks against a list of native classes.""" + if not class_irs: + return self.primitive_op(false_op, [], line) + ret = self.isinstance_native(obj, class_irs[0], line) + for class_ir in class_irs[1:]: + def other() -> Value: + return self.isinstance_native(obj, class_ir, line) + ret = self.shortcircuit_helper('or', bool_rprimitive, lambda: ret, other, line) + return ret + + def isinstance_native(self, obj: Value, class_ir: ClassIR, line: int) -> Value: + """Fast isinstance() check for a native class. + + If there three or less concrete (non-trait) classes among the class and all + its children, use even faster type comparison checks `type(obj) is typ`. + """ + concrete = all_concrete_classes(class_ir) + if concrete is None or len(concrete) > FAST_ISINSTANCE_MAX_SUBCLASSES + 1: + return self.primitive_op(fast_isinstance_op, + [obj, self.get_native_type(class_ir)], + line) + if not concrete: + # There can't be any concrete instance that matches this. + return self.primitive_op(false_op, [], line) + type_obj = self.get_native_type(concrete[0]) + ret = self.primitive_op(type_is_op, [obj, type_obj], line) + for c in concrete[1:]: + def other() -> Value: + return self.primitive_op(type_is_op, [obj, self.get_native_type(c)], line) + ret = self.shortcircuit_helper('or', bool_rprimitive, lambda: ret, other, line) + return ret + + def py_get_attr(self, obj: Value, attr: str, line: int) -> Value: + key = self.load_static_unicode(attr) + return self.add(PrimitiveOp([obj, key], py_getattr_op, line)) + + def py_call(self, + function: Value, + arg_values: List[Value], + line: int, + arg_kinds: Optional[List[int]] = None, + arg_names: Optional[Sequence[Optional[str]]] = None) -> Value: + """Use py_call_op or py_call_with_kwargs_op for function call.""" + # If all arguments are positional, we can use py_call_op. + if (arg_kinds is None) or all(kind == ARG_POS for kind in arg_kinds): + return self.primitive_op(py_call_op, [function] + arg_values, line) + + # Otherwise fallback to py_call_with_kwargs_op. + assert arg_names is not None + + pos_arg_values = [] + kw_arg_key_value_pairs = [] # type: List[DictEntry] + star_arg_values = [] + for value, kind, name in zip(arg_values, arg_kinds, arg_names): + if kind == ARG_POS: + pos_arg_values.append(value) + elif kind == ARG_NAMED: + assert name is not None + key = self.load_static_unicode(name) + kw_arg_key_value_pairs.append((key, value)) + elif kind == ARG_STAR: + star_arg_values.append(value) + elif kind == ARG_STAR2: + # NOTE: mypy currently only supports a single ** arg, but python supports multiple. + # This code supports multiple primarily to make the logic easier to follow. + kw_arg_key_value_pairs.append((None, value)) + else: + assert False, ("Argument kind should not be possible:", kind) + + if len(star_arg_values) == 0: + # We can directly construct a tuple if there are no star args. + pos_args_tuple = self.primitive_op(new_tuple_op, pos_arg_values, line) + else: + # Otherwise we construct a list and call extend it with the star args, since tuples + # don't have an extend method. + pos_args_list = self.primitive_op(new_list_op, pos_arg_values, line) + for star_arg_value in star_arg_values: + self.primitive_op(list_extend_op, [pos_args_list, star_arg_value], line) + pos_args_tuple = self.primitive_op(list_tuple_op, [pos_args_list], line) + + kw_args_dict = self.make_dict(kw_arg_key_value_pairs, line) + + return self.primitive_op( + py_call_with_kwargs_op, [function, pos_args_tuple, kw_args_dict], line) + + def py_method_call(self, + obj: Value, + method_name: str, + arg_values: List[Value], + line: int, + arg_kinds: Optional[List[int]], + arg_names: Optional[Sequence[Optional[str]]]) -> Value: + if (arg_kinds is None) or all(kind == ARG_POS for kind in arg_kinds): + method_name_reg = self.load_static_unicode(method_name) + return self.primitive_op(py_method_call_op, [obj, method_name_reg] + arg_values, line) + else: + method = self.py_get_attr(obj, method_name, line) + return self.py_call(method, arg_values, line, arg_kinds=arg_kinds, arg_names=arg_names) + + def call(self, decl: FuncDecl, args: Sequence[Value], + arg_kinds: List[int], + arg_names: Sequence[Optional[str]], + line: int) -> Value: + # Normalize args to positionals. + args = self.native_args_to_positional( + args, arg_kinds, arg_names, decl.sig, line) + return self.add(Call(decl, args, line)) + + def native_args_to_positional(self, + args: Sequence[Value], + arg_kinds: List[int], + arg_names: Sequence[Optional[str]], + sig: FuncSignature, + line: int) -> List[Value]: + """Prepare arguments for a native call. + + Given args/kinds/names and a target signature for a native call, map + keyword arguments to their appropriate place in the argument list, + fill in error values for unspecified default arguments, + package arguments that will go into *args/**kwargs into a tuple/dict, + and coerce arguments to the appropriate type. + """ + + sig_arg_kinds = [arg.kind for arg in sig.args] + sig_arg_names = [arg.name for arg in sig.args] + formal_to_actual = map_actuals_to_formals(arg_kinds, + arg_names, + sig_arg_kinds, + sig_arg_names, + lambda n: AnyType(TypeOfAny.special_form)) + + # Flatten out the arguments, loading error values for default + # arguments, constructing tuples/dicts for star args, and + # coercing everything to the expected type. + output_args = [] + for lst, arg in zip(formal_to_actual, sig.args): + output_arg = None + if arg.kind == ARG_STAR: + output_arg = self.primitive_op(new_tuple_op, [args[i] for i in lst], line) + elif arg.kind == ARG_STAR2: + dict_entries = [(self.load_static_unicode(cast(str, arg_names[i])), args[i]) + for i in lst] + output_arg = self.make_dict(dict_entries, line) + elif not lst: + output_arg = self.add(LoadErrorValue(arg.type, is_borrowed=True)) + else: + output_arg = args[lst[0]] + output_args.append(self.coerce(output_arg, arg.type, line)) + + return output_args + + def make_dict(self, key_value_pairs: Sequence[DictEntry], line: int) -> Value: + result = None # type: Union[Value, None] + initial_items = [] # type: List[Value] + for key, value in key_value_pairs: + if key is not None: + # key:value + if result is None: + initial_items.extend((key, value)) + continue + + self.translate_special_method_call( + result, + '__setitem__', + [key, value], + result_type=None, + line=line) + else: + # **value + if result is None: + result = self.primitive_op(new_dict_op, initial_items, line) + + self.primitive_op( + dict_update_in_display_op, + [result, value], + line=line + ) + + if result is None: + result = self.primitive_op(new_dict_op, initial_items, line) + + return result + + # Loading stuff + def literal_static_name(self, value: Union[int, float, complex, str, bytes]) -> str: + return self.mapper.literal_static_name(self.current_module, value) + + def load_static_int(self, value: int) -> Value: + """Loads a static integer Python 'int' object into a register.""" + static_symbol = self.literal_static_name(value) + return self.add(LoadStatic(int_rprimitive, static_symbol, ann=value)) + + def load_static_float(self, value: float) -> Value: + """Loads a static float value into a register.""" + static_symbol = self.literal_static_name(value) + return self.add(LoadStatic(float_rprimitive, static_symbol, ann=value)) + + def load_static_bytes(self, value: bytes) -> Value: + """Loads a static bytes value into a register.""" + static_symbol = self.literal_static_name(value) + return self.add(LoadStatic(object_rprimitive, static_symbol, ann=value)) + + def load_static_complex(self, value: complex) -> Value: + """Loads a static complex value into a register.""" + static_symbol = self.literal_static_name(value) + return self.add(LoadStatic(object_rprimitive, static_symbol, ann=value)) + + def load_static_unicode(self, value: str) -> Value: + """Loads a static unicode value into a register. + + This is useful for more than just unicode literals; for example, method calls + also require a PyObject * form for the name of the method. + """ + static_symbol = self.literal_static_name(value) + return self.add(LoadStatic(str_rprimitive, static_symbol, ann=value)) + + def load_module(self, name: str) -> Value: + return self.add(LoadStatic(object_rprimitive, name, namespace=NAMESPACE_MODULE)) + + def load_native_type_object(self, fullname: str) -> Value: + module, name = fullname.rsplit('.', 1) + return self.add(LoadStatic(object_rprimitive, name, module, NAMESPACE_TYPE)) + + def matching_primitive_op(self, + candidates: List[OpDescription], + args: List[Value], + line: int, + result_type: Optional[RType] = None) -> Optional[Value]: + # Find the highest-priority primitive op that matches. + matching = None # type: Optional[OpDescription] + for desc in candidates: + if len(desc.arg_types) != len(args): + continue + if all(is_subtype(actual.type, formal) + for actual, formal in zip(args, desc.arg_types)): + if matching: + assert matching.priority != desc.priority, 'Ambiguous:\n1) %s\n2) %s' % ( + matching, desc) + if desc.priority > matching.priority: + matching = desc + else: + matching = desc + if matching: + target = self.primitive_op(matching, args, line) + if result_type and not is_runtime_subtype(target.type, result_type): + if is_none_rprimitive(result_type): + # Special case None return. The actual result may actually be a bool + # and so we can't just coerce it. + target = self.none() + else: + target = self.coerce(target, result_type, line) + return target + return None + + def binary_op(self, + lreg: Value, + rreg: Value, + expr_op: str, + line: int) -> Value: + # Special case == and != when we can resolve the method call statically. + value = None + if expr_op in ('==', '!='): + value = self.translate_eq_cmp(lreg, rreg, expr_op, line) + if value is not None: + return value + + ops = binary_ops.get(expr_op, []) + target = self.matching_primitive_op(ops, [lreg, rreg], line) + assert target, 'Unsupported binary operation: %s' % expr_op + return target + + def unary_op(self, + lreg: Value, + expr_op: str, + line: int) -> Value: + ops = unary_ops.get(expr_op, []) + target = self.matching_primitive_op(ops, [lreg], line) + assert target, 'Unsupported unary operation: %s' % expr_op + return target + + def shortcircuit_helper(self, op: str, + expr_type: RType, + left: Callable[[], Value], + right: Callable[[], Value], line: int) -> Value: + # Having actual Phi nodes would be really nice here! + target = self.alloc_temp(expr_type) + # left_body takes the value of the left side, right_body the right + left_body, right_body, next = BasicBlock(), BasicBlock(), BasicBlock() + # true_body is taken if the left is true, false_body if it is false. + # For 'and' the value is the right side if the left is true, and for 'or' + # it is the right side if the left is false. + true_body, false_body = ( + (right_body, left_body) if op == 'and' else (left_body, right_body)) + + left_value = left() + self.add_bool_branch(left_value, true_body, false_body) + + self.activate_block(left_body) + left_coerced = self.coerce(left_value, expr_type, line) + self.add(Assign(target, left_coerced)) + self.goto(next) + + self.activate_block(right_body) + right_value = right() + right_coerced = self.coerce(right_value, expr_type, line) + self.add(Assign(target, right_coerced)) + self.goto(next) + + self.activate_block(next) + return target + + def add_bool_branch(self, value: Value, true: BasicBlock, false: BasicBlock) -> None: + if is_runtime_subtype(value.type, int_rprimitive): + zero = self.add(LoadInt(0)) + value = self.binary_op(value, zero, '!=', value.line) + elif is_same_type(value.type, list_rprimitive): + length = self.primitive_op(list_len_op, [value], value.line) + zero = self.add(LoadInt(0)) + value = self.binary_op(length, zero, '!=', value.line) + elif (isinstance(value.type, RInstance) and value.type.class_ir.is_ext_class + and value.type.class_ir.has_method('__bool__')): + # Directly call the __bool__ method on classes that have it. + value = self.gen_method_call(value, '__bool__', [], bool_rprimitive, value.line) + else: + value_type = optional_value_type(value.type) + if value_type is not None: + is_none = self.binary_op(value, self.none_object(), 'is not', value.line) + branch = Branch(is_none, true, false, Branch.BOOL_EXPR) + self.add(branch) + always_truthy = False + if isinstance(value_type, RInstance): + # check whether X.__bool__ is always just the default (object.__bool__) + if (not value_type.class_ir.has_method('__bool__') + and value_type.class_ir.is_method_final('__bool__')): + always_truthy = True + + if not always_truthy: + # Optional[X] where X may be falsey and requires a check + branch.true = self.new_block() + # unbox_or_cast instead of coerce because we want the + # type to change even if it is a subtype. + remaining = self.unbox_or_cast(value, value_type, value.line) + self.add_bool_branch(remaining, true, false) + return + elif not is_same_type(value.type, bool_rprimitive): + value = self.primitive_op(bool_op, [value], value.line) + self.add(Branch(value, true, false, Branch.BOOL_EXPR)) + + def translate_special_method_call(self, + base_reg: Value, + name: str, + args: List[Value], + result_type: Optional[RType], + line: int) -> Optional[Value]: + """Translate a method call which is handled nongenerically. + + These are special in the sense that we have code generated specifically for them. + They tend to be method calls which have equivalents in C that are more direct + than calling with the PyObject api. + + Return None if no translation found; otherwise return the target register. + """ + ops = method_ops.get(name, []) + return self.matching_primitive_op(ops, [base_reg] + args, line, result_type=result_type) + + def translate_eq_cmp(self, + lreg: Value, + rreg: Value, + expr_op: str, + line: int) -> Optional[Value]: + ltype = lreg.type + rtype = rreg.type + if not (isinstance(ltype, RInstance) and ltype == rtype): + return None + + class_ir = ltype.class_ir + # Check whether any subclasses of the operand redefines __eq__ + # or it might be redefined in a Python parent class or by + # dataclasses + cmp_varies_at_runtime = ( + not class_ir.is_method_final('__eq__') + or not class_ir.is_method_final('__ne__') + or class_ir.inherits_python + or class_ir.is_augmented + ) + + if cmp_varies_at_runtime: + # We might need to call left.__eq__(right) or right.__eq__(left) + # depending on which is the more specific type. + return None + + if not class_ir.has_method('__eq__'): + # There's no __eq__ defined, so just use object identity. + identity_ref_op = 'is' if expr_op == '==' else 'is not' + return self.binary_op(lreg, rreg, identity_ref_op, line) + + return self.gen_method_call( + lreg, + op_methods[expr_op], + [rreg], + ltype, + line + ) + + def gen_method_call(self, + base: Value, + name: str, + arg_values: List[Value], + return_rtype: Optional[RType], + line: int, + arg_kinds: Optional[List[int]] = None, + arg_names: Optional[List[Optional[str]]] = None) -> Value: + # If arg_kinds contains values other than arg_pos and arg_named, then fallback to + # Python method call. + if (arg_kinds is not None + and not all(kind in (ARG_POS, ARG_NAMED) for kind in arg_kinds)): + return self.py_method_call(base, name, arg_values, base.line, arg_kinds, arg_names) + + # If the base type is one of ours, do a MethodCall + if (isinstance(base.type, RInstance) and base.type.class_ir.is_ext_class + and not base.type.class_ir.builtin_base): + if base.type.class_ir.has_method(name): + decl = base.type.class_ir.method_decl(name) + if arg_kinds is None: + assert arg_names is None, "arg_kinds not present but arg_names is" + arg_kinds = [ARG_POS for _ in arg_values] + arg_names = [None for _ in arg_values] + else: + assert arg_names is not None, "arg_kinds present but arg_names is not" + + # Normalize args to positionals. + assert decl.bound_sig + arg_values = self.native_args_to_positional( + arg_values, arg_kinds, arg_names, decl.bound_sig, line) + return self.add(MethodCall(base, name, arg_values, line)) + elif base.type.class_ir.has_attr(name): + function = self.add(GetAttr(base, name, line)) + return self.py_call(function, arg_values, line, + arg_kinds=arg_kinds, arg_names=arg_names) + + elif isinstance(base.type, RUnion): + return self.union_method_call(base, base.type, name, arg_values, return_rtype, line, + arg_kinds, arg_names) + + # Try to do a special-cased method call + if not arg_kinds or arg_kinds == [ARG_POS] * len(arg_values): + target = self.translate_special_method_call(base, name, arg_values, return_rtype, line) + if target: + return target + + # Fall back to Python method call + return self.py_method_call(base, name, arg_values, line, arg_kinds, arg_names) + + def union_method_call(self, + base: Value, + obj_type: RUnion, + name: str, + arg_values: List[Value], + return_rtype: Optional[RType], + line: int, + arg_kinds: Optional[List[int]], + arg_names: Optional[List[Optional[str]]]) -> Value: + # Union method call needs a return_rtype for the type of the output register. + # If we don't have one, use object_rprimitive. + return_rtype = return_rtype or object_rprimitive + + def call_union_item(value: Value) -> Value: + return self.gen_method_call(value, name, arg_values, return_rtype, line, + arg_kinds, arg_names) + + return self.decompose_union_helper(base, obj_type, return_rtype, call_union_item, line) From 09676c94ca885929322e0d5191df04d472d4df10 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Fri, 21 Feb 2020 17:31:36 +0000 Subject: [PATCH 099/117] Don't crash if module shadows special library module such as "typing" (#8405) Special cases a small set of modules so that they must be defined in typeshed, as otherwise it's likely that mypy can crash, as it assumes that various things are defined in very specific ways in these modules. Fixes #1876. --- mypy/build.py | 22 +++++++++++++++++++++- mypy/test/testpythoneval.py | 2 ++ test-data/unit/pythoneval.test | 9 +++++++++ 3 files changed, 32 insertions(+), 1 deletion(-) diff --git a/mypy/build.py b/mypy/build.py index 8d6636048e51..890bc06c4b84 100644 --- a/mypy/build.py +++ b/mypy/build.py @@ -36,7 +36,7 @@ from mypy.errors import Errors, CompileError, ErrorInfo, report_internal_error from mypy.util import ( DecodeError, decode_python_encoding, is_sub_path, get_mypy_comments, module_prefix, - read_py_file, hash_digest, + read_py_file, hash_digest, is_typeshed_file ) if TYPE_CHECKING: from mypy.report import Reports # Avoid unconditional slow import @@ -66,6 +66,18 @@ # that it's easy to enable this when running tests. DEBUG_FINE_GRAINED = False # type: Final +# These modules are special and should always come from typeshed. +CORE_BUILTIN_MODULES = { + 'builtins', + 'typing', + 'types', + 'typing_extensions', + 'mypy_extensions', + '_importlib_modulespec', + 'sys', + 'abc', +} + Graph = Dict[str, 'State'] @@ -2390,6 +2402,14 @@ def find_module_and_diagnose(manager: BuildManager, if is_sub_path(path, dir): # Silence errors in site-package dirs and typeshed follow_imports = 'silent' + if (id in CORE_BUILTIN_MODULES + and not is_typeshed_file(path) + and not options.use_builtins_fixtures + and not options.custom_typeshed_dir): + raise CompileError([ + 'mypy: "%s" shadows library module "%s"' % (path, id), + 'note: A user-defined top-level module with name "%s" is not supported' % id + ]) return (path, follow_imports) else: # Could not find a module. Typically the reason is a diff --git a/mypy/test/testpythoneval.py b/mypy/test/testpythoneval.py index 298269b9a71b..7586a3854eea 100644 --- a/mypy/test/testpythoneval.py +++ b/mypy/test/testpythoneval.py @@ -88,6 +88,8 @@ def test_python_evaluation(testcase: DataDrivenTestCase, cache_dir: str) -> None if line.startswith(test_temp_dir + os.sep): output.append(line[len(test_temp_dir + os.sep):].rstrip("\r\n")) else: + # Normalize paths so that the output is the same on Windows and Linux/macOS. + line = line.replace(test_temp_dir + os.sep, test_temp_dir + '/') output.append(line.rstrip("\r\n")) if returncode == 0: # Execute the program. diff --git a/test-data/unit/pythoneval.test b/test-data/unit/pythoneval.test index c07449a6f24b..e29692d24f88 100644 --- a/test-data/unit/pythoneval.test +++ b/test-data/unit/pythoneval.test @@ -1501,3 +1501,12 @@ from typing import List async def foo() -> None: f = [] # type: List[Future[None]] await wait(f) + +[case testShadowTypingModule] +1 + '' +[file typing.py] +x = 0 +1 + '' +[out] +mypy: "tmp/typing.py" shadows library module "typing" +note: A user-defined top-level module with name "typing" is not supported From 310f9e3486054912e431904936c0516a0baaf3d5 Mon Sep 17 00:00:00 2001 From: Shantanu Date: Fri, 21 Feb 2020 12:01:25 -0800 Subject: [PATCH 100/117] mypy: fix typeshed CI (#8424) Resolves #8423 --- mypy/util.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mypy/util.py b/mypy/util.py index f8d9368804ba..dd59f287c9ed 100644 --- a/mypy/util.py +++ b/mypy/util.py @@ -705,4 +705,4 @@ def format_error(self, n_errors: int, n_files: int, n_sources: int, def is_typeshed_file(file: str) -> bool: # gross, but no other clear way to tell - return 'typeshed' in os.path.normpath(file).split(os.sep) + return 'typeshed' in os.path.abspath(file).split(os.sep) From cb0ceb02848494f3dab2d976035f0d56682a7045 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ville=20Skytt=C3=A4?= Date: Fri, 21 Feb 2020 23:37:32 +0200 Subject: [PATCH 101/117] Allow strict in config, explicitly disallow inline (#8192) --- mypy/config_parser.py | 30 ++++++++++++++++++------- mypy/main.py | 12 ++++++---- mypy/test/testfinegrained.py | 2 +- test-data/unit/check-flags.test | 16 +++++++++++++ test-data/unit/check-inline-config.test | 5 +++++ 5 files changed, 52 insertions(+), 13 deletions(-) diff --git a/mypy/config_parser.py b/mypy/config_parser.py index 6a94757f58ed..14dfedbd12a7 100644 --- a/mypy/config_parser.py +++ b/mypy/config_parser.py @@ -6,7 +6,7 @@ import re import sys -from typing import Any, Dict, List, Mapping, Optional, Tuple, TextIO +from typing import Any, Callable, Dict, List, Mapping, Optional, Tuple, TextIO from typing_extensions import Final from mypy import defaults @@ -88,7 +88,8 @@ def split_and_match_files(paths: str) -> List[str]: } # type: Final -def parse_config_file(options: Options, filename: Optional[str], +def parse_config_file(options: Options, set_strict_flags: Callable[[], None], + filename: Optional[str], stdout: Optional[TextIO] = None, stderr: Optional[TextIO] = None) -> None: """Parse a config file into an Options object. @@ -127,7 +128,7 @@ def parse_config_file(options: Options, filename: Optional[str], else: section = parser['mypy'] prefix = '%s: [%s]: ' % (file_read, 'mypy') - updates, report_dirs = parse_section(prefix, options, section, stderr) + updates, report_dirs = parse_section(prefix, options, set_strict_flags, section, stderr) for k, v in updates.items(): setattr(options, k, v) options.report_dirs.update(report_dirs) @@ -135,7 +136,8 @@ def parse_config_file(options: Options, filename: Optional[str], for name, section in parser.items(): if name.startswith('mypy-'): prefix = '%s: [%s]: ' % (file_read, name) - updates, report_dirs = parse_section(prefix, options, section, stderr) + updates, report_dirs = parse_section( + prefix, options, set_strict_flags, section, stderr) if report_dirs: print("%sPer-module sections should not specify reports (%s)" % (prefix, ', '.join(s + '_report' for s in sorted(report_dirs))), @@ -163,6 +165,7 @@ def parse_config_file(options: Options, filename: Optional[str], def parse_section(prefix: str, template: Options, + set_strict_flags: Callable[[], None], section: Mapping[str, str], stderr: TextIO = sys.stderr ) -> Tuple[Dict[str, object], Dict[str, str]]: @@ -205,9 +208,7 @@ def parse_section(prefix: str, template: Options, options_key = key[3:] invert = True elif key == 'strict': - print("%sStrict mode is not supported in configuration files: specify " - "individual flags instead (see 'mypy -h' for the list of flags enabled " - "in strict mode)" % prefix, file=stderr) + set_strict_flags() else: print("%sUnrecognized option: %s = %s" % (prefix, key, section[key]), file=stderr) @@ -330,10 +331,23 @@ def parse_mypy_comments( errors.extend((lineno, x) for x in parse_errors) stderr = StringIO() - new_sections, reports = parse_section('', template, parser['dummy'], stderr=stderr) + strict_found = False + + def set_strict_flags() -> None: + nonlocal strict_found + strict_found = True + + new_sections, reports = parse_section( + '', template, set_strict_flags, parser['dummy'], stderr=stderr) errors.extend((lineno, x) for x in stderr.getvalue().strip().split('\n') if x) if reports: errors.append((lineno, "Reports not supported in inline configuration")) + if strict_found: + errors.append((lineno, + "Setting 'strict' not supported in inline configuration: specify it in " + "a configuration file instead, or set individual inline flags " + "(see 'mypy -h' for the list of flags enabled in strict mode)")) + sections.update(new_sections) return sections, errors diff --git a/mypy/main.py b/mypy/main.py index 4b8d9c5f7b0d..c08aab020dff 100644 --- a/mypy/main.py +++ b/mypy/main.py @@ -806,15 +806,19 @@ def add_invertible_flag(flag: str, if config_file and not os.path.exists(config_file): parser.error("Cannot find config file '%s'" % config_file) - # Parse config file first, so command line can override. options = Options() - parse_config_file(options, config_file, stdout, stderr) + + def set_strict_flags() -> None: + for dest, value in strict_flag_assignments: + setattr(options, dest, value) + + # Parse config file first, so command line can override. + parse_config_file(options, set_strict_flags, config_file, stdout, stderr) # Set strict flags before parsing (if strict mode enabled), so other command # line options can override. if getattr(dummy, 'special-opts:strict'): # noqa - for dest, value in strict_flag_assignments: - setattr(options, dest, value) + set_strict_flags() # Override cache_dir if provided in the environment environ_cache_dir = os.getenv('MYPY_CACHE_DIR', '') diff --git a/mypy/test/testfinegrained.py b/mypy/test/testfinegrained.py index 8939e5ff9fa2..9c50d96712ab 100644 --- a/mypy/test/testfinegrained.py +++ b/mypy/test/testfinegrained.py @@ -193,7 +193,7 @@ def get_options(self, for name, _ in testcase.files: if 'mypy.ini' in name: - parse_config_file(options, name) + parse_config_file(options, lambda: None, name) break return options diff --git a/test-data/unit/check-flags.test b/test-data/unit/check-flags.test index cf6d810d7357..a2c36c0ca0cb 100644 --- a/test-data/unit/check-flags.test +++ b/test-data/unit/check-flags.test @@ -1149,6 +1149,22 @@ def f(c: A) -> None: # E: Missing type parameters for generic type "A" pass [out] +[case testStrictInConfigAnyGeneric] +# flags: --config-file tmp/mypy.ini +from typing import TypeVar, Generic + +T = TypeVar('T') + +class A(Generic[T]): + pass + +def f(c: A) -> None: # E: Missing type parameters for generic type "A" + pass +[file mypy.ini] +\[mypy] +strict = True +[out] + [case testStrictAndStrictEquality] # flags: --strict x = 0 diff --git a/test-data/unit/check-inline-config.test b/test-data/unit/check-inline-config.test index 4cf82b03e671..9bcff53cb523 100644 --- a/test-data/unit/check-inline-config.test +++ b/test-data/unit/check-inline-config.test @@ -157,3 +157,8 @@ main:4: error: Unterminated quote in configuration comment # mypy: skip-file [out] main:1: error: Unrecognized option: skip_file = True + +[case testInlineStrict] +# mypy: strict +[out] +main:1: error: Setting 'strict' not supported in inline configuration: specify it in a configuration file instead, or set individual inline flags (see 'mypy -h' for the list of flags enabled in strict mode) From 8b3b1d8427bdc6f379aed86f6484832790e794a9 Mon Sep 17 00:00:00 2001 From: Jan Verbeek <55185397+janverb@users.noreply.github.com> Date: Fri, 21 Feb 2020 22:38:44 +0100 Subject: [PATCH 102/117] Analyze descriptor methods as if they're methods, not functions (#8365) When __get__ and __set__ were implicitly called they were analyzed without taking the type of the descriptor into account. That meant that they were only visible to plugins as function calls with unclear names. --- mypy/checker.py | 14 ++++++++-- mypy/checkmember.py | 11 +++++++- test-data/unit/check-custom-plugin.test | 23 +++++++++++++++++ test-data/unit/plugins/descriptor.py | 34 +++++++++++++++++++++++++ 4 files changed, 79 insertions(+), 3 deletions(-) create mode 100644 test-data/unit/plugins/descriptor.py diff --git a/mypy/checker.py b/mypy/checker.py index db5f0fb126dc..f4f466bb6ba8 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -2995,6 +2995,14 @@ def check_member_assignment(self, instance_type: Type, attribute_type: Type, typ = map_instance_to_supertype(attribute_type, dunder_set.info) dunder_set_type = expand_type_by_instance(bound_method, typ) + callable_name = self.expr_checker.method_fullname(attribute_type, "__set__") + dunder_set_type = self.expr_checker.transform_callee_type( + callable_name, dunder_set_type, + [TempNode(instance_type, context=context), rvalue], + [nodes.ARG_POS, nodes.ARG_POS], + context, object_type=attribute_type, + ) + # Here we just infer the type, the result should be type-checked like a normal assignment. # For this we use the rvalue as type context. self.msg.disable_errors() @@ -3002,7 +3010,8 @@ def check_member_assignment(self, instance_type: Type, attribute_type: Type, dunder_set_type, [TempNode(instance_type, context=context), rvalue], [nodes.ARG_POS, nodes.ARG_POS], - context) + context, object_type=attribute_type, + callable_name=callable_name) self.msg.enable_errors() # And now we type check the call second time, to show errors related @@ -3012,7 +3021,8 @@ def check_member_assignment(self, instance_type: Type, attribute_type: Type, [TempNode(instance_type, context=context), TempNode(AnyType(TypeOfAny.special_form), context=context)], [nodes.ARG_POS, nodes.ARG_POS], - context) + context, object_type=attribute_type, + callable_name=callable_name) # should be handled by get_method above assert isinstance(inferred_dunder_set_type, CallableType) # type: ignore diff --git a/mypy/checkmember.py b/mypy/checkmember.py index a80db832bece..c9a5a2c86d97 100644 --- a/mypy/checkmember.py +++ b/mypy/checkmember.py @@ -486,11 +486,20 @@ def analyze_descriptor_access(instance_type: Type, else: owner_type = instance_type + callable_name = chk.expr_checker.method_fullname(descriptor_type, "__get__") + dunder_get_type = chk.expr_checker.transform_callee_type( + callable_name, dunder_get_type, + [TempNode(instance_type, context=context), + TempNode(TypeType.make_normalized(owner_type), context=context)], + [ARG_POS, ARG_POS], context, object_type=descriptor_type, + ) + _, inferred_dunder_get_type = chk.expr_checker.check_call( dunder_get_type, [TempNode(instance_type, context=context), TempNode(TypeType.make_normalized(owner_type), context=context)], - [ARG_POS, ARG_POS], context) + [ARG_POS, ARG_POS], context, object_type=descriptor_type, + callable_name=callable_name) inferred_dunder_get_type = get_proper_type(inferred_dunder_get_type) if isinstance(inferred_dunder_get_type, AnyType): diff --git a/test-data/unit/check-custom-plugin.test b/test-data/unit/check-custom-plugin.test index 77225b7df9ba..6e7f6a066a95 100644 --- a/test-data/unit/check-custom-plugin.test +++ b/test-data/unit/check-custom-plugin.test @@ -698,3 +698,26 @@ class A: pass [file mypy.ini] \[mypy] plugins=/test-data/unit/plugins/customize_mro.py + +[case testDescriptorMethods] +# flags: --config-file tmp/mypy.ini + +class Desc: + def __get__(self, obj, cls): + pass + + def __set__(self, obj, val): + pass + +class Cls: + attr = Desc() + +reveal_type(Cls().attr) # N: Revealed type is 'builtins.int' +reveal_type(Cls.attr) # N: Revealed type is 'builtins.str' + +Cls().attr = 3 +Cls().attr = "foo" # E: Incompatible types in assignment (expression has type "str", variable has type "int") + +[file mypy.ini] +\[mypy] +plugins=/test-data/unit/plugins/descriptor.py diff --git a/test-data/unit/plugins/descriptor.py b/test-data/unit/plugins/descriptor.py new file mode 100644 index 000000000000..afbadcdfb671 --- /dev/null +++ b/test-data/unit/plugins/descriptor.py @@ -0,0 +1,34 @@ +from mypy.plugin import Plugin +from mypy.types import NoneType, CallableType + + +class DescriptorPlugin(Plugin): + def get_method_hook(self, fullname): + if fullname == "__main__.Desc.__get__": + return get_hook + return None + + def get_method_signature_hook(self, fullname): + if fullname == "__main__.Desc.__set__": + return set_hook + return None + + +def get_hook(ctx): + if isinstance(ctx.arg_types[0][0], NoneType): + return ctx.api.named_type("builtins.str") + return ctx.api.named_type("builtins.int") + + +def set_hook(ctx): + return CallableType( + [ctx.api.named_type("__main__.Cls"), ctx.api.named_type("builtins.int")], + ctx.default_signature.arg_kinds, + ctx.default_signature.arg_names, + ctx.default_signature.ret_type, + ctx.default_signature.fallback, + ) + + +def plugin(version): + return DescriptorPlugin From 88e8f033ddfd38d74212897d7a127e9e8568b7c0 Mon Sep 17 00:00:00 2001 From: RAHUL RAJA Date: Fri, 21 Feb 2020 13:40:17 -0800 Subject: [PATCH 103/117] Add a section for incompatible overrides (#8377) This is according to changes suggested in https://github.com/python/mypy/issues/7994 --- docs/source/common_issues.rst | 37 ++++++++++++++++++++++++++++++++++- 1 file changed, 36 insertions(+), 1 deletion(-) diff --git a/docs/source/common_issues.rst b/docs/source/common_issues.rst index 6891b3262547..fb56eff84959 100644 --- a/docs/source/common_issues.rst +++ b/docs/source/common_issues.rst @@ -750,4 +750,39 @@ Mypy has both type aliases and variables with types like ``Type[...]`` and it is tp = B # This is OK def fun1(x: Alias) -> None: ... # This is OK - def fun2(x: tp) -> None: ... # error: Variable "__main__.tp" is not valid as a type \ No newline at end of file + def fun2(x: tp) -> None: ... # error: Variable "__main__.tp" is not valid as a type + +Incompatible overrides +------------------------------ + +It's unsafe to override a method with a more specific argument type, as it violates +the `Liskov substitution principle `_. For return types, it's unsafe to override a method with a more general return type. + +Here is an example to demonstrate this + +.. code-block:: python + + from typing import Sequence, List, Iterable + + class A: + def test(self, t: Sequence[int]) -> Sequence[str]: + pass + + # Specific argument type doesn't work + class OverwriteArgumentSpecific(A): + def test(self, t: List[int]) -> Sequence[str]: + pass + + # Specific return type works + class OverwriteReturnSpecific(A): + def test(self, t: Sequence[int]) -> List[str]: + pass + + # Generic return type doesn't work + class OverwriteReturnGeneric(A): + def test(self, t: Sequence[int]) -> Iterable[str]: + pass + +mypy won't report an error for ``OverwriteReturnSpecific`` but it does for ``OverwriteReturnGeneric`` and ``OverwriteArgumentSpecific``. + +We can use ``# type: ignore[override]`` to silence the error (add it to the line that genreates the error) if type safety is not needed. From 4f06ac9962d70b02c21c8692a5d3bf80ae1e1ab7 Mon Sep 17 00:00:00 2001 From: "Michael J. Sullivan" Date: Sat, 22 Feb 2020 00:31:00 -0800 Subject: [PATCH 104/117] Make IRBuilder not inherit from LowLevelIRBuilder. (#8426) Instead we make IRBuilder do all of its building through a LowLevelIRBuilder. A new LowLevelIRBuilder is created for each function that is compiled (which lets LowLevelIRBuilder lose its stack of block lists and environments and only has one of each). A collection of the most commonly used methods in LowLevelIRBuilder are given passthrough methods in IRBuilder for convenience. Work on mypyc/mypyc#714. --- mypyc/genfunc.py | 23 ++--- mypyc/genops.py | 184 ++++++++++++++++++++++++++------------- mypyc/genops_for.py | 4 +- mypyc/genopscontext.py | 2 +- mypyc/ir_builder.py | 53 +++++------ mypyc/nonlocalcontrol.py | 6 +- mypyc/ops.py | 4 + 7 files changed, 169 insertions(+), 107 deletions(-) diff --git a/mypyc/genfunc.py b/mypyc/genfunc.py index 4011a6506a1c..c55e76c31428 100644 --- a/mypyc/genfunc.py +++ b/mypyc/genfunc.py @@ -39,7 +39,6 @@ class BuildFuncIR: def __init__(self, builder: 'IRBuilder') -> None: self.builder = builder self.module_name = builder.module_name - self.environments = builder.environments self.functions = builder.functions self.mapper = builder.mapper @@ -117,7 +116,7 @@ def visit_yield_expr(self, expr: YieldExpr) -> Value: if expr.expr: retval = self.builder.accept(expr.expr) else: - retval = self.builder.none() + retval = self.builder.builder.none() return self.emit_yield(retval, expr.line) def visit_yield_from_expr(self, o: YieldFromExpr) -> Value: @@ -504,14 +503,16 @@ def setup_generator_class(self) -> ClassIR: def create_switch_for_generator_class(self) -> None: self.add(Goto(self.fn_info.generator_class.switch_block)) - self.fn_info.generator_class.blocks.append(self.builder.new_block()) + block = BasicBlock() + self.fn_info.generator_class.continuation_blocks.append(block) + self.builder.activate_block(block) def populate_switch_for_generator_class(self) -> None: cls = self.fn_info.generator_class line = self.fn_info.fitem.line self.builder.activate_block(cls.switch_block) - for label, true_block in enumerate(cls.blocks): + for label, true_block in enumerate(cls.continuation_blocks): false_block = BasicBlock() comparison = self.builder.binary_op( cls.next_label_reg, self.add(LoadInt(label)), '==', line @@ -767,8 +768,8 @@ def emit_yield(self, val: Value, line: int) -> Value: # set the next label so that the next time '__next__' is called on the generator object, # the function continues at the new block. next_block = BasicBlock() - next_label = len(cls.blocks) - cls.blocks.append(next_block) + next_label = len(cls.continuation_blocks) + cls.continuation_blocks.append(next_block) self.assign(cls.next_label_target, self.add(LoadInt(next_label)), line) self.add(Return(retval)) self.builder.activate_block(next_block) @@ -944,10 +945,10 @@ def f(self, x: object) -> int: ... arg_kinds = [concrete_arg_kind(arg.kind) for arg in rt_args] if do_pycall: - retval = self.builder.py_method_call( + retval = self.builder.builder.py_method_call( args[0], target.name, args[1:], line, arg_kinds[1:], arg_names[1:]) else: - retval = self.builder.call(target.decl, args, arg_kinds, arg_names, line) + retval = self.builder.builder.call(target.decl, args, arg_kinds, arg_names, line) retval = self.builder.coerce(retval, sig.ret_type, line) self.add(Return(retval)) @@ -1216,13 +1217,13 @@ def load_outer_env(self, base: Value, outer_env: Environment) -> Value: return env def load_outer_envs(self, base: ImplicitClass) -> None: - index = len(self.environments) - 2 + index = len(self.builder.builders) - 2 # Load the first outer environment. This one is special because it gets saved in the # FuncInfo instance's prev_env_reg field. if index > 1: # outer_env = self.fn_infos[index].environment - outer_env = self.environments[index] + outer_env = self.builder.builders[index].environment if isinstance(base, GeneratorClass): base.prev_env_reg = self.load_outer_env(base.curr_env_reg, outer_env) else: @@ -1233,7 +1234,7 @@ def load_outer_envs(self, base: ImplicitClass) -> None: # Load the remaining outer environments into registers. while index > 1: # outer_env = self.fn_infos[index].environment - outer_env = self.environments[index] + outer_env = self.builder.builders[index].environment env_reg = self.load_outer_env(env_reg, outer_env) index -= 1 diff --git a/mypyc/genops.py b/mypyc/genops.py index fd19ae73e277..7fd9327eff7e 100644 --- a/mypyc/genops.py +++ b/mypyc/genops.py @@ -45,19 +45,17 @@ def f(x: int) -> int: from mypy.state import strict_optional_set from mypy.util import split_target -from mypyc.common import ( - TEMP_ATTR_NAME, MAX_LITERAL_SHORT_INT, TOP_LEVEL_NAME, -) +from mypyc.common import TEMP_ATTR_NAME, TOP_LEVEL_NAME from mypyc.prebuildvisitor import PreBuildVisitor from mypyc.ops import ( BasicBlock, AssignmentTarget, AssignmentTargetRegister, AssignmentTargetIndex, - AssignmentTargetAttr, AssignmentTargetTuple, Environment, LoadInt, RType, Value, Register, + AssignmentTargetAttr, AssignmentTargetTuple, Environment, LoadInt, RType, Value, Register, Op, FuncIR, Assign, Branch, RTuple, Unreachable, TupleGet, TupleSet, ClassIR, NonExtClassInfo, RInstance, ModuleIR, ModuleIRs, GetAttr, SetAttr, LoadStatic, InitStatic, INVALID_FUNC_DEF, int_rprimitive, bool_rprimitive, list_rprimitive, is_list_rprimitive, dict_rprimitive, set_rprimitive, str_rprimitive, none_rprimitive, is_none_rprimitive, object_rprimitive, - exc_rtuple, PrimitiveOp, ControlOp, OpDescription, is_object_rprimitive, + exc_rtuple, PrimitiveOp, OpDescription, is_object_rprimitive, FuncSignature, NAMESPACE_MODULE, RaiseStandardError, LoadErrorValue, NO_TRACEBACK_LINE_NO, FuncDecl, FUNC_STATICMETHOD, FUNC_CLASSMETHOD, @@ -186,7 +184,7 @@ def wrapper(f: Specializer) -> Specializer: return wrapper -class IRBuilder(LowLevelIRBuilder, ExpressionVisitor[Value], StatementVisitor[None]): +class IRBuilder(ExpressionVisitor[Value], StatementVisitor[None]): def __init__(self, current_module: str, types: Dict[Expression, Type], @@ -195,9 +193,11 @@ def __init__(self, mapper: Mapper, pbv: PreBuildVisitor, options: CompilerOptions) -> None: - super().__init__(current_module, mapper) + self.builder = LowLevelIRBuilder(current_module, mapper) + self.builders = [self.builder] self.current_module = current_module + self.mapper = mapper self.types = types self.graph = graph self.ret_types = [] # type: List[RType] @@ -238,6 +238,78 @@ def __init__(self, # can also do quick lookups. self.imports = OrderedDict() # type: OrderedDict[str, None] + # Pass through methods for the most common low-level builder ops, for convenience. + def add(self, op: Op) -> Value: + return self.builder.add(op) + + def goto(self, target: BasicBlock) -> None: + self.builder.goto(target) + + def activate_block(self, block: BasicBlock) -> None: + self.builder.activate_block(block) + + def goto_and_activate(self, block: BasicBlock) -> None: + self.builder.goto_and_activate(block) + + def alloc_temp(self, type: RType) -> Register: + return self.builder.alloc_temp(type) + + def py_get_attr(self, obj: Value, attr: str, line: int) -> Value: + return self.builder.py_get_attr(obj, attr, line) + + def load_static_unicode(self, value: str) -> Value: + return self.builder.load_static_unicode(value) + + def primitive_op(self, desc: OpDescription, args: List[Value], line: int) -> Value: + return self.builder.primitive_op(desc, args, line) + + def unary_op(self, lreg: Value, expr_op: str, line: int) -> Value: + return self.builder.unary_op(lreg, expr_op, line) + + def binary_op(self, lreg: Value, rreg: Value, expr_op: str, line: int) -> Value: + return self.builder.binary_op(lreg, rreg, expr_op, line) + + def coerce(self, src: Value, target_type: RType, line: int, force: bool = False) -> Value: + return self.builder.coerce(src, target_type, line, force) + + def none_object(self) -> Value: + return self.builder.none_object() + + def py_call(self, + function: Value, + arg_values: List[Value], + line: int, + arg_kinds: Optional[List[int]] = None, + arg_names: Optional[Sequence[Optional[str]]] = None) -> Value: + return self.builder.py_call(function, arg_values, line, arg_kinds, arg_names) + + def add_bool_branch(self, value: Value, true: BasicBlock, false: BasicBlock) -> None: + self.builder.add_bool_branch(value, true, false) + + def load_native_type_object(self, fullname: str) -> Value: + return self.builder.load_native_type_object(fullname) + + def gen_method_call(self, + base: Value, + name: str, + arg_values: List[Value], + result_type: Optional[RType], + line: int, + arg_kinds: Optional[List[int]] = None, + arg_names: Optional[List[Optional[str]]] = None) -> Value: + return self.builder.gen_method_call( + base, name, arg_values, result_type, line, arg_kinds, arg_names + ) + + def load_module(self, name: str) -> Value: + return self.builder.load_module(name) + + @property + def environment(self) -> Environment: + return self.builder.environment + + ## + def visit_mypy_file(self, mypyfile: MypyFile) -> None: if mypyfile.fullname in ('typing', 'abc'): # These module are special; their contents are currently all @@ -323,7 +395,7 @@ def visit_import(self, node: Import) -> None: mod_dict = self.primitive_op(get_module_dict_op, [], node.line) obj = self.primitive_op(dict_get_item_op, [mod_dict, self.load_static_unicode(base)], node.line) - self.translate_special_method_call( + self.gen_method_call( globals, '__setitem__', [self.load_static_unicode(name), obj], result_type=None, line=node.line) @@ -356,7 +428,7 @@ def visit_import_from(self, node: ImportFrom) -> None: as_name = maybe_as_name or name obj = self.py_get_attr(module, name, node.line) - self.translate_special_method_call( + self.gen_method_call( globals, '__setitem__', [self.load_static_unicode(as_name), obj], result_type=None, line=node.line) @@ -388,14 +460,14 @@ def visit_overloaded_func_def(self, o: OverloadedFuncDef) -> None: BuildFuncIR(self).visit_overloaded_func_def(o) def add_implicit_return(self) -> None: - block = self.blocks[-1][-1] - if not block.ops or not isinstance(block.ops[-1], ControlOp): - retval = self.coerce(self.none(), self.ret_types[-1], -1) + block = self.builder.blocks[-1] + if not block.terminated: + retval = self.coerce(self.builder.none(), self.ret_types[-1], -1) self.nonlocal_control[-1].gen_return(self, retval, self.fn_info.fitem.line) def add_implicit_unreachable(self) -> None: - block = self.blocks[-1][-1] - if not block.ops or not isinstance(block.ops[-1], ControlOp): + block = self.builder.blocks[-1] + if not block.terminated: self.add(Unreachable()) def visit_block(self, block: Block) -> None: @@ -422,7 +494,7 @@ def visit_return_stmt(self, stmt: ReturnStmt) -> None: if stmt.expr: retval = self.accept(stmt.expr) else: - retval = self.none() + retval = self.builder.none() retval = self.coerce(retval, self.ret_types[-1], stmt.line) self.nonlocal_control[-1].gen_return(self, retval, stmt.line) @@ -486,15 +558,13 @@ def load_final_literal_value(self, val: Union[int, str, bytes, float, bool], elif isinstance(val, int): # TODO: take care of negative integer initializers # (probably easier to fix this in mypy itself). - if val > MAX_LITERAL_SHORT_INT: - return self.load_static_int(val) - return self.add(LoadInt(val)) + return self.builder.load_static_int(val) elif isinstance(val, float): - return self.load_static_float(val) + return self.builder.load_static_float(val) elif isinstance(val, str): - return self.load_static_unicode(val) + return self.builder.load_static_unicode(val) elif isinstance(val, bytes): - return self.load_static_bytes(val) + return self.builder.load_static_bytes(val) else: assert False, "Unsupported final literal value" @@ -625,7 +695,7 @@ def assign(self, target: Union[Register, AssignmentTarget], self.add(SetAttr(target.obj, target.attr, rvalue_reg, line)) else: key = self.load_static_unicode(target.attr) - boxed_reg = self.box(rvalue_reg) + boxed_reg = self.builder.box(rvalue_reg) self.add(PrimitiveOp([target.obj, key, boxed_reg], py_setattr_op, line)) elif isinstance(target, AssignmentTargetIndex): target_reg2 = self.gen_method_call( @@ -846,7 +916,8 @@ def for_loop_helper(self, index: Lvalue, expr: Expression, for_gen = self.make_for_loop_generator(index, expr, body_block, normal_loop_exit, line) self.push_loop_stack(step_block, exit_block) - condition_block = self.goto_new_block() + condition_block = BasicBlock() + self.goto_and_activate(condition_block) # Add loop condition check. for_gen.gen_condition() @@ -1019,19 +1090,17 @@ def visit_index_expr(self, expr: IndexExpr) -> Value: base, '__getitem__', [index_reg], self.node_type(expr), expr.line) def visit_int_expr(self, expr: IntExpr) -> Value: - if expr.value > MAX_LITERAL_SHORT_INT: - return self.load_static_int(expr.value) - return self.add(LoadInt(expr.value)) + return self.builder.load_static_int(expr.value) def visit_float_expr(self, expr: FloatExpr) -> Value: - return self.load_static_float(expr.value) + return self.builder.load_static_float(expr.value) def visit_complex_expr(self, expr: ComplexExpr) -> Value: - return self.load_static_complex(expr.value) + return self.builder.load_static_complex(expr.value) def visit_bytes_expr(self, expr: BytesExpr) -> Value: value = bytes(expr.value, 'utf8').decode('unicode-escape').encode('raw-unicode-escape') - return self.load_static_bytes(value) + return self.builder.load_static_bytes(value) def is_native_module(self, module: str) -> bool: """Is the given module one compiled by mypyc?""" @@ -1158,7 +1227,7 @@ def visit_member_expr(self, expr: MemberExpr) -> Value: return self.load_module(expr.node.fullname) obj = self.accept(expr.expr) - return self.get_attr(obj, expr.name, self.node_type(expr), expr.line) + return self.builder.get_attr(obj, expr.name, self.node_type(expr), expr.line) def visit_call_expr(self, expr: CallExpr) -> Value: if isinstance(expr.analyzed, CastExpr): @@ -1208,7 +1277,9 @@ def call_refexpr_with_args( # Handle data-driven special-cased primitive call ops. if callee.fullname is not None and expr.arg_kinds == [ARG_POS] * len(arg_values): ops = func_ops.get(callee.fullname, []) - target = self.matching_primitive_op(ops, arg_values, expr.line, self.node_type(expr)) + target = self.builder.matching_primitive_op( + ops, arg_values, expr.line, self.node_type(expr) + ) if target: return target @@ -1222,7 +1293,7 @@ def call_refexpr_with_args( and callee_node in self.mapper.func_to_decl and all(kind in (ARG_POS, ARG_NAMED) for kind in expr.arg_kinds)): decl = self.mapper.func_to_decl[callee_node] - return self.call(decl, arg_values, expr.arg_kinds, expr.arg_names, expr.line) + return self.builder.call(decl, arg_values, expr.arg_kinds, expr.arg_names, expr.line) # Fall back to a Python call function = self.accept(callee) @@ -1257,7 +1328,7 @@ def translate_method_call(self, expr: CallExpr, callee: MemberExpr) -> Value: args += [self.accept(arg) for arg in expr.args] if ir.is_ext_class: - return self.call(decl, args, arg_kinds, arg_names, expr.line) + return self.builder.call(decl, args, arg_kinds, arg_names, expr.line) else: obj = self.accept(callee.expr) return self.gen_method_call(obj, @@ -1324,7 +1395,7 @@ def translate_super_method_call(self, expr: CallExpr, callee: SuperExpr) -> Valu arg_kinds.insert(0, ARG_POS) arg_names.insert(0, None) - return self.call(decl, arg_values, arg_kinds, arg_names, expr.line) + return self.builder.call(decl, arg_values, arg_kinds, arg_names, expr.line) def translate_cast_expr(self, expr: CastExpr) -> Value: src = self.accept(expr.expr) @@ -1332,7 +1403,7 @@ def translate_cast_expr(self, expr: CastExpr) -> Value: return self.coerce(src, target_type, expr.line) def shortcircuit_expr(self, expr: OpExpr) -> Value: - return self.shortcircuit_helper( + return self.builder.shortcircuit_helper( expr.op, self.node_type(expr), lambda: self.accept(expr.left), lambda: self.accept(expr.right), @@ -1437,7 +1508,7 @@ def visit_dict_expr(self, expr: DictExpr) -> Value: value = self.accept(value_expr) key_value_pairs.append((key, value)) - return self.make_dict(key_value_pairs, expr.line) + return self.builder.make_dict(key_value_pairs, expr.line) def visit_set_expr(self, expr: SetExpr) -> Value: return self._visit_display( @@ -1503,7 +1574,7 @@ def go(i: int, prev: Value) -> Value: e.operators[i], prev, self.accept(e.operands[i + 1]), e.line) next = self.accept(e.operands[i + 1]) - return self.shortcircuit_helper( + return self.builder.shortcircuit_helper( 'and', expr_type, lambda: self.visit_basic_comparison( e.operators[i], prev, next, e.line), @@ -1554,11 +1625,11 @@ def visit_try_except(self, else_block = BasicBlock() if else_body else exit_block # Compile the try block with an error handler - self.error_handlers.append(except_entry) + self.builder.push_error_handler(except_entry) self.goto_and_activate(BasicBlock()) body() self.goto(else_block) - self.error_handlers.pop() + self.builder.pop_error_handler() # The error handler catches the error and then checks it # against the except clauses. We compile the error handler @@ -1566,7 +1637,7 @@ def visit_try_except(self, # the *old* exc_info if an exception occurs. # The exception chaining will be done automatically when the # exception is raised, based on the exception in exc_info. - self.error_handlers.append(double_except_block) + self.builder.push_error_handler(double_except_block) self.activate_block(except_entry) old_exc = self.maybe_spill(self.primitive_op(error_catch_op, [], line)) # Compile the except blocks with the nonlocal control flow overridden to clear exc_info @@ -1595,7 +1666,7 @@ def visit_try_except(self, self.add(Unreachable()) self.nonlocal_control.pop() - self.error_handlers.pop() + self.builder.pop_error_handler() # Cleanup for if we leave except through normal control flow: # restore the saved exc_info information and continue propagating @@ -1637,14 +1708,14 @@ def try_finally_try(self, err_handler: BasicBlock, return_entry: BasicBlock, main_entry: BasicBlock, try_body: GenFunc) -> Optional[Register]: # Compile the try block with an error handler control = TryFinallyNonlocalControl(return_entry) - self.error_handlers.append(err_handler) + self.builder.push_error_handler(err_handler) self.nonlocal_control.append(control) self.goto_and_activate(BasicBlock()) try_body() self.goto(main_entry) self.nonlocal_control.pop() - self.error_handlers.pop() + self.builder.pop_error_handler() return control.ret_reg @@ -1679,7 +1750,7 @@ def try_finally_body( 'FinallyNonlocalControl']: cleanup_block = BasicBlock() # Compile the finally block with the nonlocal control flow overridden to restore exc_info - self.error_handlers.append(cleanup_block) + self.builder.push_error_handler(cleanup_block) finally_control = FinallyNonlocalControl( self.nonlocal_control[-1], ret_reg, old_exc) self.nonlocal_control.append(finally_control) @@ -1704,7 +1775,7 @@ def try_finally_resolve_control(self, cleanup_block: BasicBlock, self.activate_block(reraise) self.primitive_op(reraise_exception_op, [], NO_TRACEBACK_LINE_NO) self.add(Unreachable()) - self.error_handlers.pop() + self.builder.pop_error_handler() # If there was a return, keep returning if ret_reg: @@ -1968,7 +2039,7 @@ def visit_del_stmt(self, o: DelStmt) -> None: def visit_del_item(self, target: AssignmentTarget, line: int) -> None: if isinstance(target, AssignmentTargetIndex): - self.translate_special_method_call( + self.gen_method_call( target.base, '__delitem__', [target.index], @@ -2175,7 +2246,7 @@ def translate_isinstance(self, expr: CallExpr, callee: RefExpr) -> Optional[Valu and isinstance(expr.args[1], (RefExpr, TupleExpr))): irs = self.flatten_classes(expr.args[1]) if irs is not None: - return self.isinstance_helper(self.accept(expr.args[0]), irs, expr.line) + return self.builder.isinstance_helper(self.accept(expr.args[0]), irs, expr.line) return None def flatten_classes(self, arg: Union[RefExpr, TupleExpr]) -> Optional[List[ClassIR]]: @@ -2266,29 +2337,25 @@ def visit_star_expr(self, o: StarExpr) -> Value: def enter(self, fn_info: Union[FuncInfo, str] = '') -> None: if isinstance(fn_info, str): fn_info = FuncInfo(name=fn_info) - self.environment = Environment(fn_info.name) - self.environments.append(self.environment) + self.builder = LowLevelIRBuilder(self.current_module, self.mapper) + self.builders.append(self.builder) self.fn_info = fn_info self.fn_infos.append(self.fn_info) self.ret_types.append(none_rprimitive) - self.error_handlers.append(None) if fn_info.is_generator: self.nonlocal_control.append(GeneratorNonlocalControl()) else: self.nonlocal_control.append(BaseNonlocalControl()) - self.blocks.append([]) - self.new_block() + self.activate_block(BasicBlock()) def leave(self) -> Tuple[List[BasicBlock], Environment, RType, FuncInfo]: - blocks = self.blocks.pop() - env = self.environments.pop() + builder = self.builders.pop() ret_type = self.ret_types.pop() fn_info = self.fn_infos.pop() - self.error_handlers.pop() self.nonlocal_control.pop() - self.environment = self.environments[-1] + self.builder = self.builders[-1] self.fn_info = self.fn_infos[-1] - return blocks, env, ret_type, fn_info + return builder.blocks, builder.environment, ret_type, fn_info @overload def accept(self, node: Expression) -> Value: ... @@ -2328,9 +2395,6 @@ def node_type(self, node: Expression) -> RType: mypy_type = self.types[node] return self.type_to_rtype(mypy_type) - def box_expr(self, expr: Expression) -> Value: - return self.box(self.accept(expr)) - def add_var_to_env_class(self, var: SymbolNode, rtype: RType, diff --git a/mypyc/genops_for.py b/mypyc/genops_for.py index 03d4094b960c..48cfb1aa2afd 100644 --- a/mypyc/genops_for.py +++ b/mypyc/genops_for.py @@ -106,7 +106,7 @@ def begin_body(self) -> None: line = self.line # We unbox here so that iterating with tuple unpacking generates a tuple based # unpack instead of an iterator based one. - next_reg = builder.unbox_or_cast(self.next_reg, self.target_type, line) + next_reg = builder.coerce(self.next_reg, self.target_type, line) builder.assign(builder.get_assignment_target(self.index), next_reg, line) def gen_step(self) -> None: @@ -178,7 +178,7 @@ def begin_body(self) -> None: # iterating with tuple unpacking generates a tuple based # unpack instead of an iterator based one. builder.assign(builder.get_assignment_target(self.index), - builder.unbox_or_cast(value_box, self.target_type, line), line) + builder.coerce(value_box, self.target_type, line), line) def gen_step(self) -> None: # Step to the next item. diff --git a/mypyc/genopscontext.py b/mypyc/genopscontext.py index 7b54f16854b5..f7eccbebf234 100644 --- a/mypyc/genopscontext.py +++ b/mypyc/genopscontext.py @@ -146,7 +146,7 @@ def __init__(self, ir: ClassIR) -> None: # The switch block is used to decide which instruction to go using the value held in the # next-label register. self.switch_block = BasicBlock() - self.blocks = [] # type: List[BasicBlock] + self.continuation_blocks = [] # type: List[BasicBlock] @property def next_label_reg(self) -> Value: diff --git a/mypyc/ir_builder.py b/mypyc/ir_builder.py index aa4b273bd6aa..fdf81f3df29b 100644 --- a/mypyc/ir_builder.py +++ b/mypyc/ir_builder.py @@ -6,10 +6,6 @@ example). The core principle of the low-level IR builder is that all of its facilities operate solely on the IR level and not the AST level---it has *no knowledge* of mypy types or expressions. - -Currently LowLevelIRBuilder does not have a clean API and the -higher-level IR builder in genops uses LowLevelIRBuilder by inheriting -from it. A next step is to fix this. """ from typing import ( @@ -25,12 +21,12 @@ Assign, Branch, Goto, Call, Box, Unbox, Cast, ClassIR, RInstance, GetAttr, LoadStatic, MethodCall, int_rprimitive, float_rprimitive, bool_rprimitive, list_rprimitive, str_rprimitive, is_none_rprimitive, object_rprimitive, - PrimitiveOp, ControlOp, OpDescription, RegisterOp, + PrimitiveOp, OpDescription, RegisterOp, FuncSignature, NAMESPACE_TYPE, NAMESPACE_MODULE, LoadErrorValue, FuncDecl, RUnion, optional_value_type, all_concrete_classes ) from mypyc.common import ( - FAST_ISINSTANCE_MAX_SUBCLASSES + FAST_ISINSTANCE_MAX_SUBCLASSES, MAX_LITERAL_SHORT_INT, ) from mypyc.ops_primitive import binary_ops, unary_ops, method_ops from mypyc.ops_list import ( @@ -63,45 +59,38 @@ def __init__( self.current_module = current_module self.mapper = mapper self.environment = Environment() - self.environments = [self.environment] - self.blocks = [] # type: List[List[BasicBlock]] + self.blocks = [] # type: List[BasicBlock] # Stack of except handler entry blocks self.error_handlers = [None] # type: List[Optional[BasicBlock]] def add(self, op: Op) -> Value: - if self.blocks[-1][-1].ops: - assert not isinstance(self.blocks[-1][-1].ops[-1], ControlOp), ( - "Can't add to finished block") + assert not self.blocks[-1].terminated, "Can't add to finished block" - self.blocks[-1][-1].ops.append(op) + self.blocks[-1].ops.append(op) if isinstance(op, RegisterOp): self.environment.add_op(op) return op def goto(self, target: BasicBlock) -> None: - if not self.blocks[-1][-1].ops or not isinstance(self.blocks[-1][-1].ops[-1], ControlOp): + if not self.blocks[-1].terminated: self.add(Goto(target)) def activate_block(self, block: BasicBlock) -> None: - if self.blocks[-1]: - assert isinstance(self.blocks[-1][-1].ops[-1], ControlOp) + if self.blocks: + assert self.blocks[-1].terminated block.error_handler = self.error_handlers[-1] - self.blocks[-1].append(block) + self.blocks.append(block) def goto_and_activate(self, block: BasicBlock) -> None: self.goto(block) self.activate_block(block) - def new_block(self) -> BasicBlock: - block = BasicBlock() - self.activate_block(block) - return block + def push_error_handler(self, handler: Optional[BasicBlock]) -> None: + self.error_handlers.append(handler) - def goto_new_block(self) -> BasicBlock: - block = BasicBlock() - self.goto_and_activate(block) - return block + def pop_error_handler(self) -> Optional[BasicBlock]: + return self.error_handlers.pop() ## @@ -445,8 +434,11 @@ def literal_static_name(self, value: Union[int, float, complex, str, bytes]) -> def load_static_int(self, value: int) -> Value: """Loads a static integer Python 'int' object into a register.""" - static_symbol = self.literal_static_name(value) - return self.add(LoadStatic(int_rprimitive, static_symbol, ann=value)) + if abs(value) > MAX_LITERAL_SHORT_INT: + static_symbol = self.literal_static_name(value) + return self.add(LoadStatic(int_rprimitive, static_symbol, ann=value)) + else: + return self.add(LoadInt(value)) def load_static_float(self, value: float) -> Value: """Loads a static float value into a register.""" @@ -594,7 +586,8 @@ def add_bool_branch(self, value: Value, true: BasicBlock, false: BasicBlock) -> if not always_truthy: # Optional[X] where X may be falsey and requires a check - branch.true = self.new_block() + branch.true = BasicBlock() + self.activate_block(branch.true) # unbox_or_cast instead of coerce because we want the # type to change even if it is a subtype. remaining = self.unbox_or_cast(value, value_type, value.line) @@ -664,7 +657,7 @@ def gen_method_call(self, base: Value, name: str, arg_values: List[Value], - return_rtype: Optional[RType], + result_type: Optional[RType], line: int, arg_kinds: Optional[List[int]] = None, arg_names: Optional[List[Optional[str]]] = None) -> Value: @@ -697,12 +690,12 @@ def gen_method_call(self, arg_kinds=arg_kinds, arg_names=arg_names) elif isinstance(base.type, RUnion): - return self.union_method_call(base, base.type, name, arg_values, return_rtype, line, + return self.union_method_call(base, base.type, name, arg_values, result_type, line, arg_kinds, arg_names) # Try to do a special-cased method call if not arg_kinds or arg_kinds == [ARG_POS] * len(arg_values): - target = self.translate_special_method_call(base, name, arg_values, return_rtype, line) + target = self.translate_special_method_call(base, name, arg_values, result_type, line) if target: return target diff --git a/mypyc/nonlocalcontrol.py b/mypyc/nonlocalcontrol.py index d794429b9ee6..ba44d038feed 100644 --- a/mypyc/nonlocalcontrol.py +++ b/mypyc/nonlocalcontrol.py @@ -72,8 +72,8 @@ def gen_return(self, builder: 'IRBuilder', value: Value, line: int) -> None: # Raise a StopIteration containing a field for the value that should be returned. Before # doing so, create a new block without an error handler set so that the implicitly thrown # StopIteration isn't caught by except blocks inside of the generator function. - builder.error_handlers.append(None) - builder.goto_new_block() + builder.builder.push_error_handler(None) + builder.goto_and_activate(BasicBlock()) # Skip creating a traceback frame when we raise here, because # we don't care about the traceback frame and it is kind of # expensive since raising StopIteration is an extremely common case. @@ -82,7 +82,7 @@ def gen_return(self, builder: 'IRBuilder', value: Value, line: int) -> None: # value is a tuple (???). builder.primitive_op(set_stop_iteration_value, [value], NO_TRACEBACK_LINE_NO) builder.add(Unreachable()) - builder.error_handlers.pop() + builder.builder.pop_error_handler() class CleanupNonlocalControl(NonlocalControl): diff --git a/mypyc/ops.py b/mypyc/ops.py index 96b2fff506af..75e4dd54b19b 100644 --- a/mypyc/ops.py +++ b/mypyc/ops.py @@ -628,6 +628,10 @@ def __init__(self, label: int = -1) -> None: self.ops = [] # type: List[Op] self.error_handler = None # type: Optional[BasicBlock] + @property + def terminated(self) -> bool: + return bool(self.ops) and isinstance(self.ops[-1], ControlOp) + # Never generates an exception ERR_NEVER = 0 # type: Final From ac6fc49f7de86f04b4e15453fcc580fd479a074d Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Sat, 22 Feb 2020 13:16:42 +0000 Subject: [PATCH 105/117] [mypyc] Refactor: extract IR generation for expressions from genops (#8428) This follows the pattern established in recent refactoring PRs. Also extract specializer code since it's used both by mypyc.genops and mypyc.genexpr. Work on mypyc/mypyc#714. --- mypyc/genexpr.py | 507 ++++++++++++++++++++++++++++++++++++++ mypyc/genops.py | 579 +++++++------------------------------------- mypyc/specialize.py | 47 ++++ 3 files changed, 638 insertions(+), 495 deletions(-) create mode 100644 mypyc/genexpr.py create mode 100644 mypyc/specialize.py diff --git a/mypyc/genexpr.py b/mypyc/genexpr.py new file mode 100644 index 000000000000..3f827556c9ac --- /dev/null +++ b/mypyc/genexpr.py @@ -0,0 +1,507 @@ +"""Transform mypy expression ASTs to mypyc IR (Intermediate Representation). + +The top-level AST transformation logic is implemented in mypyc.genops. +""" + +from typing import List, Optional, Union +from typing_extensions import TYPE_CHECKING + +from mypy.nodes import ( + Expression, NameExpr, MemberExpr, SuperExpr, CallExpr, UnaryExpr, OpExpr, IndexExpr, + ConditionalExpr, ComparisonExpr, IntExpr, FloatExpr, ComplexExpr, StrExpr, + BytesExpr, EllipsisExpr, ListExpr, TupleExpr, DictExpr, SetExpr, ListComprehension, + SetComprehension, DictionaryComprehension, SliceExpr, GeneratorExpr, CastExpr, StarExpr, + Var, RefExpr, MypyFile, TypeInfo, TypeApplication, LDEF, ARG_POS +) + +from mypyc.ops import ( + Value, TupleGet, TupleSet, PrimitiveOp, BasicBlock, RTuple, OpDescription, Assign, + object_rprimitive, is_none_rprimitive, FUNC_CLASSMETHOD, FUNC_STATICMETHOD +) +from mypyc.ops_primitive import name_ref_ops +from mypyc.ops_misc import new_slice_op, iter_op, ellipsis_op, type_op +from mypyc.ops_list import new_list_op, list_append_op, list_extend_op +from mypyc.ops_tuple import list_tuple_op +from mypyc.ops_dict import new_dict_op, dict_set_item_op +from mypyc.ops_set import new_set_op, set_add_op, set_update_op +from mypyc.specialize import specializers + +if TYPE_CHECKING: + from mypyc.genops import IRBuilder + + +class BuildExpressionIR: + def __init__(self, builder: 'IRBuilder') -> None: + self.builder = builder + + # Name and attribute references + + def visit_name_expr(self, expr: NameExpr) -> Value: + assert expr.node, "RefExpr not resolved" + fullname = expr.node.fullname + if fullname in name_ref_ops: + # Use special access op for this particular name. + desc = name_ref_ops[fullname] + assert desc.result_type is not None + return self.builder.add(PrimitiveOp([], desc, expr.line)) + + if isinstance(expr.node, Var) and expr.node.is_final: + value = self.builder.emit_load_final( + expr.node, + fullname, + expr.name, + self.builder.is_native_ref_expr(expr), + self.builder.types[expr], + expr.line, + ) + if value is not None: + return value + + if isinstance(expr.node, MypyFile) and expr.node.fullname in self.builder.imports: + return self.builder.load_module(expr.node.fullname) + + # If the expression is locally defined, then read the result from the corresponding + # assignment target and return it. Otherwise if the expression is a global, load it from + # the globals dictionary. + # Except for imports, that currently always happens in the global namespace. + if expr.kind == LDEF and not (isinstance(expr.node, Var) + and expr.node.is_suppressed_import): + # Try to detect and error when we hit the irritating mypy bug + # where a local variable is cast to None. (#5423) + if (isinstance(expr.node, Var) and is_none_rprimitive(self.builder.node_type(expr)) + and expr.node.is_inferred): + self.builder.error( + "Local variable '{}' has inferred type None; add an annotation".format( + expr.node.name), + expr.node.line) + + # TODO: Behavior currently only defined for Var and FuncDef node types. + return self.builder.read(self.builder.get_assignment_target(expr), expr.line) + + return self.builder.load_global(expr) + + def visit_member_expr(self, expr: MemberExpr) -> Value: + # First check if this is maybe a final attribute. + final = self.builder.get_final_ref(expr) + if final is not None: + fullname, final_var, native = final + value = self.builder.emit_load_final(final_var, fullname, final_var.name, native, + self.builder.types[expr], expr.line) + if value is not None: + return value + + if isinstance(expr.node, MypyFile) and expr.node.fullname in self.builder.imports: + return self.builder.load_module(expr.node.fullname) + + obj = self.builder.accept(expr.expr) + return self.builder.builder.get_attr( + obj, expr.name, self.builder.node_type(expr), expr.line + ) + + def visit_super_expr(self, o: SuperExpr) -> Value: + # self.warning('can not optimize super() expression', o.line) + sup_val = self.builder.load_module_attr_by_fullname('builtins.super', o.line) + if o.call.args: + args = [self.builder.accept(arg) for arg in o.call.args] + else: + assert o.info is not None + typ = self.builder.load_native_type_object(o.info.fullname) + ir = self.builder.mapper.type_to_ir[o.info] + iter_env = iter(self.builder.environment.indexes) + vself = next(iter_env) # grab first argument + if self.builder.fn_info.is_generator: + # grab sixth argument (see comment in translate_super_method_call) + self_targ = list(self.builder.environment.symtable.values())[6] + vself = self.builder.read(self_targ, self.builder.fn_info.fitem.line) + elif not ir.is_ext_class: + vself = next(iter_env) # second argument is self if non_extension class + args = [typ, vself] + res = self.builder.py_call(sup_val, args, o.line) + return self.builder.py_get_attr(res, o.name, o.line) + + # Calls + + def visit_call_expr(self, expr: CallExpr) -> Value: + if isinstance(expr.analyzed, CastExpr): + return self.translate_cast_expr(expr.analyzed) + + callee = expr.callee + if isinstance(callee, IndexExpr) and isinstance(callee.analyzed, TypeApplication): + callee = callee.analyzed.expr # Unwrap type application + + if isinstance(callee, MemberExpr): + return self.translate_method_call(expr, callee) + elif isinstance(callee, SuperExpr): + return self.translate_super_method_call(expr, callee) + else: + return self.translate_call(expr, callee) + + def translate_call(self, expr: CallExpr, callee: Expression) -> Value: + # The common case of calls is refexprs + if isinstance(callee, RefExpr): + return self.translate_refexpr_call(expr, callee) + + function = self.builder.accept(callee) + args = [self.builder.accept(arg) for arg in expr.args] + return self.builder.py_call(function, args, expr.line, + arg_kinds=expr.arg_kinds, arg_names=expr.arg_names) + + def translate_refexpr_call(self, expr: CallExpr, callee: RefExpr) -> Value: + """Translate a non-method call.""" + + # TODO: Allow special cases to have default args or named args. Currently they don't since + # they check that everything in arg_kinds is ARG_POS. + + # If there is a specializer for this function, try calling it. + if callee.fullname and (callee.fullname, None) in specializers: + val = specializers[callee.fullname, None](self.builder, expr, callee) + if val is not None: + return val + + # Gen the argument values + arg_values = [self.builder.accept(arg) for arg in expr.args] + + return self.builder.call_refexpr_with_args(expr, callee, arg_values) + + def translate_method_call(self, expr: CallExpr, callee: MemberExpr) -> Value: + """Generate IR for an arbitrary call of form e.m(...). + + This can also deal with calls to module-level functions. + """ + if self.builder.is_native_ref_expr(callee): + # Call to module-level native function or such + return self.translate_call(expr, callee) + elif ( + isinstance(callee.expr, RefExpr) + and isinstance(callee.expr.node, TypeInfo) + and callee.expr.node in self.builder.mapper.type_to_ir + and self.builder.mapper.type_to_ir[callee.expr.node].has_method(callee.name) + ): + # Call a method via the *class* + assert isinstance(callee.expr.node, TypeInfo) + ir = self.builder.mapper.type_to_ir[callee.expr.node] + decl = ir.method_decl(callee.name) + args = [] + arg_kinds, arg_names = expr.arg_kinds[:], expr.arg_names[:] + # Add the class argument for class methods in extension classes + if decl.kind == FUNC_CLASSMETHOD and ir.is_ext_class: + args.append(self.builder.load_native_type_object(callee.expr.node.fullname)) + arg_kinds.insert(0, ARG_POS) + arg_names.insert(0, None) + args += [self.builder.accept(arg) for arg in expr.args] + + if ir.is_ext_class: + return self.builder.builder.call(decl, args, arg_kinds, arg_names, expr.line) + else: + obj = self.builder.accept(callee.expr) + return self.builder.gen_method_call(obj, + callee.name, + args, + self.builder.node_type(expr), + expr.line, + expr.arg_kinds, + expr.arg_names) + + elif self.builder.is_module_member_expr(callee): + # Fall back to a PyCall for non-native module calls + function = self.builder.accept(callee) + args = [self.builder.accept(arg) for arg in expr.args] + return self.builder.py_call(function, args, expr.line, + arg_kinds=expr.arg_kinds, arg_names=expr.arg_names) + else: + receiver_typ = self.builder.node_type(callee.expr) + + # If there is a specializer for this method name/type, try calling it. + if (callee.name, receiver_typ) in specializers: + val = specializers[callee.name, receiver_typ](self.builder, expr, callee) + if val is not None: + return val + + obj = self.builder.accept(callee.expr) + args = [self.builder.accept(arg) for arg in expr.args] + return self.builder.gen_method_call(obj, + callee.name, + args, + self.builder.node_type(expr), + expr.line, + expr.arg_kinds, + expr.arg_names) + + def translate_super_method_call(self, expr: CallExpr, callee: SuperExpr) -> Value: + if callee.info is None or callee.call.args: + return self.translate_call(expr, callee) + ir = self.builder.mapper.type_to_ir[callee.info] + # Search for the method in the mro, skipping ourselves. + for base in ir.mro[1:]: + if callee.name in base.method_decls: + break + else: + return self.translate_call(expr, callee) + + decl = base.method_decl(callee.name) + arg_values = [self.builder.accept(arg) for arg in expr.args] + arg_kinds, arg_names = expr.arg_kinds[:], expr.arg_names[:] + + if decl.kind != FUNC_STATICMETHOD: + vself = next(iter(self.builder.environment.indexes)) # grab first argument + if decl.kind == FUNC_CLASSMETHOD: + vself = self.builder.primitive_op(type_op, [vself], expr.line) + elif self.builder.fn_info.is_generator: + # For generator classes, the self target is the 6th value + # in the symbol table (which is an ordered dict). This is sort + # of ugly, but we can't search by name since the 'self' parameter + # could be named anything, and it doesn't get added to the + # environment indexes. + self_targ = list(self.builder.environment.symtable.values())[6] + vself = self.builder.read(self_targ, self.builder.fn_info.fitem.line) + arg_values.insert(0, vself) + arg_kinds.insert(0, ARG_POS) + arg_names.insert(0, None) + + return self.builder.builder.call(decl, arg_values, arg_kinds, arg_names, expr.line) + + def translate_cast_expr(self, expr: CastExpr) -> Value: + src = self.builder.accept(expr.expr) + target_type = self.builder.type_to_rtype(expr.type) + return self.builder.coerce(src, target_type, expr.line) + + # Operators + + def visit_unary_expr(self, expr: UnaryExpr) -> Value: + return self.builder.unary_op(self.builder.accept(expr.expr), expr.op, expr.line) + + def visit_op_expr(self, expr: OpExpr) -> Value: + if expr.op in ('and', 'or'): + return self.builder.shortcircuit_expr(expr) + return self.builder.binary_op( + self.builder.accept(expr.left), self.builder.accept(expr.right), expr.op, expr.line + ) + + def visit_index_expr(self, expr: IndexExpr) -> Value: + base = self.builder.accept(expr.base) + + if isinstance(base.type, RTuple) and isinstance(expr.index, IntExpr): + return self.builder.add(TupleGet(base, expr.index.value, expr.line)) + + index_reg = self.builder.accept(expr.index) + return self.builder.gen_method_call( + base, '__getitem__', [index_reg], self.builder.node_type(expr), expr.line) + + def visit_conditional_expr(self, expr: ConditionalExpr) -> Value: + if_body, else_body, next = BasicBlock(), BasicBlock(), BasicBlock() + + self.builder.process_conditional(expr.cond, if_body, else_body) + expr_type = self.builder.node_type(expr) + # Having actual Phi nodes would be really nice here! + target = self.builder.alloc_temp(expr_type) + + self.builder.activate_block(if_body) + true_value = self.builder.accept(expr.if_expr) + true_value = self.builder.coerce(true_value, expr_type, expr.line) + self.builder.add(Assign(target, true_value)) + self.builder.goto(next) + + self.builder.activate_block(else_body) + false_value = self.builder.accept(expr.else_expr) + false_value = self.builder.coerce(false_value, expr_type, expr.line) + self.builder.add(Assign(target, false_value)) + self.builder.goto(next) + + self.builder.activate_block(next) + + return target + + def visit_comparison_expr(self, e: ComparisonExpr) -> Value: + # TODO: Don't produce an expression when used in conditional context + + # All of the trickiness here is due to support for chained conditionals + # (`e1 < e2 > e3`, etc). `e1 < e2 > e3` is approximately equivalent to + # `e1 < e2 and e2 > e3` except that `e2` is only evaluated once. + expr_type = self.builder.node_type(e) + + # go(i, prev) generates code for `ei opi e{i+1} op{i+1} ... en`, + # assuming that prev contains the value of `ei`. + def go(i: int, prev: Value) -> Value: + if i == len(e.operators) - 1: + return self.visit_basic_comparison( + e.operators[i], prev, self.builder.accept(e.operands[i + 1]), e.line) + + next = self.builder.accept(e.operands[i + 1]) + return self.builder.builder.shortcircuit_helper( + 'and', expr_type, + lambda: self.visit_basic_comparison( + e.operators[i], prev, next, e.line), + lambda: go(i + 1, next), + e.line) + + return go(0, self.builder.accept(e.operands[0])) + + def visit_basic_comparison(self, op: str, left: Value, right: Value, line: int) -> Value: + negate = False + if op == 'is not': + op, negate = 'is', True + elif op == 'not in': + op, negate = 'in', True + + target = self.builder.binary_op(left, right, op, line) + + if negate: + target = self.builder.unary_op(target, 'not', line) + return target + + # Literals + + def visit_int_expr(self, expr: IntExpr) -> Value: + return self.builder.builder.load_static_int(expr.value) + + def visit_float_expr(self, expr: FloatExpr) -> Value: + return self.builder.builder.load_static_float(expr.value) + + def visit_complex_expr(self, expr: ComplexExpr) -> Value: + return self.builder.builder.load_static_complex(expr.value) + + def visit_str_expr(self, expr: StrExpr) -> Value: + return self.builder.load_static_unicode(expr.value) + + def visit_bytes_expr(self, expr: BytesExpr) -> Value: + value = bytes(expr.value, 'utf8').decode('unicode-escape').encode('raw-unicode-escape') + return self.builder.builder.load_static_bytes(value) + + def visit_ellipsis(self, o: EllipsisExpr) -> Value: + return self.builder.primitive_op(ellipsis_op, [], o.line) + + # Display expressions + + def visit_list_expr(self, expr: ListExpr) -> Value: + return self._visit_list_display(expr.items, expr.line) + + def _visit_list_display(self, items: List[Expression], line: int) -> Value: + return self._visit_display( + items, + new_list_op, + list_append_op, + list_extend_op, + line + ) + + def visit_tuple_expr(self, expr: TupleExpr) -> Value: + if any(isinstance(item, StarExpr) for item in expr.items): + # create a tuple of unknown length + return self._visit_tuple_display(expr) + + # create a tuple of fixed length (RTuple) + tuple_type = self.builder.node_type(expr) + # When handling NamedTuple et. al we might not have proper type info, + # so make some up if we need it. + types = (tuple_type.types if isinstance(tuple_type, RTuple) + else [object_rprimitive] * len(expr.items)) + + items = [] + for item_expr, item_type in zip(expr.items, types): + reg = self.builder.accept(item_expr) + items.append(self.builder.coerce(reg, item_type, item_expr.line)) + return self.builder.add(TupleSet(items, expr.line)) + + def _visit_tuple_display(self, expr: TupleExpr) -> Value: + """Create a list, then turn it into a tuple.""" + val_as_list = self._visit_list_display(expr.items, expr.line) + return self.builder.primitive_op(list_tuple_op, [val_as_list], expr.line) + + def visit_dict_expr(self, expr: DictExpr) -> Value: + """First accepts all keys and values, then makes a dict out of them.""" + key_value_pairs = [] + for key_expr, value_expr in expr.items: + key = self.builder.accept(key_expr) if key_expr is not None else None + value = self.builder.accept(value_expr) + key_value_pairs.append((key, value)) + + return self.builder.builder.make_dict(key_value_pairs, expr.line) + + def visit_set_expr(self, expr: SetExpr) -> Value: + return self._visit_display( + expr.items, + new_set_op, + set_add_op, + set_update_op, + expr.line + ) + + def _visit_display(self, + items: List[Expression], + constructor_op: OpDescription, + append_op: OpDescription, + extend_op: OpDescription, + line: int + ) -> Value: + accepted_items = [] + for item in items: + if isinstance(item, StarExpr): + accepted_items.append((True, self.builder.accept(item.expr))) + else: + accepted_items.append((False, self.builder.accept(item))) + + result = None # type: Union[Value, None] + initial_items = [] + for starred, value in accepted_items: + if result is None and not starred and constructor_op.is_var_arg: + initial_items.append(value) + continue + + if result is None: + result = self.builder.primitive_op(constructor_op, initial_items, line) + + self.builder.primitive_op(extend_op if starred else append_op, [result, value], line) + + if result is None: + result = self.builder.primitive_op(constructor_op, initial_items, line) + + return result + + # Comprehensions + + def visit_list_comprehension(self, o: ListComprehension) -> Value: + return self.builder.translate_list_comprehension(o.generator) + + def visit_set_comprehension(self, o: SetComprehension) -> Value: + gen = o.generator + set_ops = self.builder.primitive_op(new_set_op, [], o.line) + loop_params = list(zip(gen.indices, gen.sequences, gen.condlists)) + + def gen_inner_stmts() -> None: + e = self.builder.accept(gen.left_expr) + self.builder.primitive_op(set_add_op, [set_ops, e], o.line) + + self.builder.comprehension_helper(loop_params, gen_inner_stmts, o.line) + return set_ops + + def visit_dictionary_comprehension(self, o: DictionaryComprehension) -> Value: + d = self.builder.primitive_op(new_dict_op, [], o.line) + loop_params = list(zip(o.indices, o.sequences, o.condlists)) + + def gen_inner_stmts() -> None: + k = self.builder.accept(o.key) + v = self.builder.accept(o.value) + self.builder.primitive_op(dict_set_item_op, [d, k, v], o.line) + + self.builder.comprehension_helper(loop_params, gen_inner_stmts, o.line) + return d + + # Misc + + def visit_slice_expr(self, expr: SliceExpr) -> Value: + def get_arg(arg: Optional[Expression]) -> Value: + if arg is None: + return self.builder.none_object() + else: + return self.builder.accept(arg) + + args = [get_arg(expr.begin_index), + get_arg(expr.end_index), + get_arg(expr.stride)] + return self.builder.primitive_op(new_slice_op, args, expr.line) + + def visit_generator_expr(self, o: GeneratorExpr) -> Value: + self.builder.warning('Treating generator comprehension as list', o.line) + return self.builder.primitive_op( + iter_op, [self.builder.translate_list_comprehension(o)], o.line + ) diff --git a/mypyc/genops.py b/mypyc/genops.py index 7fd9327eff7e..25d59a6fc469 100644 --- a/mypyc/genops.py +++ b/mypyc/genops.py @@ -51,27 +51,20 @@ def f(x: int) -> int: BasicBlock, AssignmentTarget, AssignmentTargetRegister, AssignmentTargetIndex, AssignmentTargetAttr, AssignmentTargetTuple, Environment, LoadInt, RType, Value, Register, Op, FuncIR, Assign, Branch, RTuple, Unreachable, - TupleGet, TupleSet, ClassIR, NonExtClassInfo, RInstance, ModuleIR, ModuleIRs, GetAttr, SetAttr, + TupleGet, ClassIR, NonExtClassInfo, RInstance, ModuleIR, ModuleIRs, GetAttr, SetAttr, LoadStatic, InitStatic, INVALID_FUNC_DEF, int_rprimitive, bool_rprimitive, list_rprimitive, is_list_rprimitive, dict_rprimitive, set_rprimitive, str_rprimitive, none_rprimitive, is_none_rprimitive, object_rprimitive, exc_rtuple, PrimitiveOp, OpDescription, is_object_rprimitive, FuncSignature, NAMESPACE_MODULE, RaiseStandardError, LoadErrorValue, NO_TRACEBACK_LINE_NO, FuncDecl, - FUNC_STATICMETHOD, FUNC_CLASSMETHOD, ) -from mypyc.ops_primitive import func_ops, name_ref_ops -from mypyc.ops_list import ( - list_append_op, list_extend_op, list_len_op, new_list_op, to_list, list_pop_last -) -from mypyc.ops_tuple import list_tuple_op -from mypyc.ops_dict import ( - new_dict_op, dict_get_item_op, dict_set_item_op -) -from mypyc.ops_set import new_set_op, set_add_op, set_update_op +from mypyc.ops_primitive import func_ops +from mypyc.ops_list import list_append_op, list_len_op, new_list_op, to_list, list_pop_last +from mypyc.ops_dict import dict_get_item_op, dict_set_item_op from mypyc.ops_misc import ( true_op, false_op, iter_op, next_op, py_setattr_op, py_delattr_op, - new_slice_op, type_op, import_op, get_module_dict_op, ellipsis_op, + type_op, import_op, get_module_dict_op ) from mypyc.ops_exc import ( raise_exception_op, reraise_exception_op, @@ -88,11 +81,13 @@ def f(x: int) -> int: ) from mypyc.genclass import BuildClassIR from mypyc.genfunc import BuildFuncIR +from mypyc.genexpr import BuildExpressionIR from mypyc.genopscontext import FuncInfo, ImplicitClass from mypyc.genopsmapper import Mapper from mypyc.genopsvtable import compute_vtable from mypyc.genopsprepare import build_type_map from mypyc.ir_builder import LowLevelIRBuilder +from mypyc.specialize import specialize_function GenFunc = Callable[[], None] @@ -150,40 +145,6 @@ def build_ir(modules: List[MypyFile], return result -# Infrastructure for special casing calls to builtin functions in a -# programmatic way. Most special cases should be handled using the -# data driven "primitive ops" system, but certain operations require -# special handling that has access to the AST/IR directly and can make -# decisions/optimizations based on it. -# -# For example, we use specializers to statically emit the length of a -# fixed length tuple and to emit optimized code for any/all calls with -# generator comprehensions as the argument. -# -# Specalizers are attempted before compiling the arguments to the -# function. Specializers can return None to indicate that they failed -# and the call should be compiled normally. Otherwise they should emit -# code for the call and return a value containing the result. -# -# Specializers take three arguments: the IRBuilder, the CallExpr being -# compiled, and the RefExpr that is the left hand side of the call. -# -# Specializers can operate on methods as well, and are keyed on the -# name and RType in that case. -Specializer = Callable[['IRBuilder', CallExpr, RefExpr], Optional[Value]] - -specializers = {} # type: Dict[Tuple[str, Optional[RType]], Specializer] - - -def specialize_function( - name: str, typ: Optional[RType] = None) -> Callable[[Specializer], Specializer]: - """Decorator to register a function as being a specializer.""" - def wrapper(f: Specializer) -> Specializer: - specializers[name, typ] = f - return f - return wrapper - - class IRBuilder(ExpressionVisitor[Value], StatementVisitor[None]): def __init__(self, current_module: str, @@ -239,6 +200,7 @@ def __init__(self, self.imports = OrderedDict() # type: OrderedDict[str, None] # Pass through methods for the most common low-level builder ops, for convenience. + def add(self, op: Op) -> Value: return self.builder.add(op) @@ -1071,37 +1033,6 @@ def visit_break_stmt(self, node: BreakStmt) -> None: def visit_continue_stmt(self, node: ContinueStmt) -> None: self.nonlocal_control[-1].gen_continue(self, node.line) - def visit_unary_expr(self, expr: UnaryExpr) -> Value: - return self.unary_op(self.accept(expr.expr), expr.op, expr.line) - - def visit_op_expr(self, expr: OpExpr) -> Value: - if expr.op in ('and', 'or'): - return self.shortcircuit_expr(expr) - return self.binary_op(self.accept(expr.left), self.accept(expr.right), expr.op, expr.line) - - def visit_index_expr(self, expr: IndexExpr) -> Value: - base = self.accept(expr.base) - - if isinstance(base.type, RTuple) and isinstance(expr.index, IntExpr): - return self.add(TupleGet(base, expr.index.value, expr.line)) - - index_reg = self.accept(expr.index) - return self.gen_method_call( - base, '__getitem__', [index_reg], self.node_type(expr), expr.line) - - def visit_int_expr(self, expr: IntExpr) -> Value: - return self.builder.load_static_int(expr.value) - - def visit_float_expr(self, expr: FloatExpr) -> Value: - return self.builder.load_static_float(expr.value) - - def visit_complex_expr(self, expr: ComplexExpr) -> Value: - return self.builder.load_static_complex(expr.value) - - def visit_bytes_expr(self, expr: BytesExpr) -> Value: - value = bytes(expr.value, 'utf8').decode('unicode-escape').encode('raw-unicode-escape') - return self.builder.load_static_bytes(value) - def is_native_module(self, module: str) -> bool: """Is the given module one compiled by mypyc?""" return module in self.mapper.group_map @@ -1171,106 +1102,9 @@ def emit_load_final(self, final_var: Var, fullname: str, else: return None - def visit_name_expr(self, expr: NameExpr) -> Value: - assert expr.node, "RefExpr not resolved" - fullname = expr.node.fullname - if fullname in name_ref_ops: - # Use special access op for this particular name. - desc = name_ref_ops[fullname] - assert desc.result_type is not None - return self.add(PrimitiveOp([], desc, expr.line)) - - if isinstance(expr.node, Var) and expr.node.is_final: - value = self.emit_load_final(expr.node, fullname, expr.name, - self.is_native_ref_expr(expr), self.types[expr], - expr.line) - if value is not None: - return value - - if isinstance(expr.node, MypyFile) and expr.node.fullname in self.imports: - return self.load_module(expr.node.fullname) - - # If the expression is locally defined, then read the result from the corresponding - # assignment target and return it. Otherwise if the expression is a global, load it from - # the globals dictionary. - # Except for imports, that currently always happens in the global namespace. - if expr.kind == LDEF and not (isinstance(expr.node, Var) - and expr.node.is_suppressed_import): - # Try to detect and error when we hit the irritating mypy bug - # where a local variable is cast to None. (#5423) - if (isinstance(expr.node, Var) and is_none_rprimitive(self.node_type(expr)) - and expr.node.is_inferred): - self.error( - "Local variable '{}' has inferred type None; add an annotation".format( - expr.node.name), - expr.node.line) - - # TODO: Behavior currently only defined for Var and FuncDef node types. - return self.read(self.get_assignment_target(expr), expr.line) - - return self.load_global(expr) - def is_module_member_expr(self, expr: MemberExpr) -> bool: return isinstance(expr.expr, RefExpr) and isinstance(expr.expr.node, MypyFile) - def visit_member_expr(self, expr: MemberExpr) -> Value: - # First check if this is maybe a final attribute. - final = self.get_final_ref(expr) - if final is not None: - fullname, final_var, native = final - value = self.emit_load_final(final_var, fullname, final_var.name, native, - self.types[expr], expr.line) - if value is not None: - return value - - if isinstance(expr.node, MypyFile) and expr.node.fullname in self.imports: - return self.load_module(expr.node.fullname) - - obj = self.accept(expr.expr) - return self.builder.get_attr(obj, expr.name, self.node_type(expr), expr.line) - - def visit_call_expr(self, expr: CallExpr) -> Value: - if isinstance(expr.analyzed, CastExpr): - return self.translate_cast_expr(expr.analyzed) - - callee = expr.callee - if isinstance(callee, IndexExpr) and isinstance(callee.analyzed, TypeApplication): - callee = callee.analyzed.expr # Unwrap type application - - if isinstance(callee, MemberExpr): - return self.translate_method_call(expr, callee) - elif isinstance(callee, SuperExpr): - return self.translate_super_method_call(expr, callee) - else: - return self.translate_call(expr, callee) - - def translate_call(self, expr: CallExpr, callee: Expression) -> Value: - # The common case of calls is refexprs - if isinstance(callee, RefExpr): - return self.translate_refexpr_call(expr, callee) - - function = self.accept(callee) - args = [self.accept(arg) for arg in expr.args] - return self.py_call(function, args, expr.line, - arg_kinds=expr.arg_kinds, arg_names=expr.arg_names) - - def translate_refexpr_call(self, expr: CallExpr, callee: RefExpr) -> Value: - """Translate a non-method call.""" - - # TODO: Allow special cases to have default args or named args. Currently they don't since - # they check that everything in arg_kinds is ARG_POS. - - # If there is a specializer for this function, try calling it. - if callee.fullname and (callee.fullname, None) in specializers: - val = specializers[callee.fullname, None](self, expr, callee) - if val is not None: - return val - - # Gen the argument values - arg_values = [self.accept(arg) for arg in expr.args] - - return self.call_refexpr_with_args(expr, callee, arg_values) - def call_refexpr_with_args( self, expr: CallExpr, callee: RefExpr, arg_values: List[Value]) -> Value: @@ -1300,108 +1134,6 @@ def call_refexpr_with_args( return self.py_call(function, arg_values, expr.line, arg_kinds=expr.arg_kinds, arg_names=expr.arg_names) - def translate_method_call(self, expr: CallExpr, callee: MemberExpr) -> Value: - """Generate IR for an arbitrary call of form e.m(...). - - This can also deal with calls to module-level functions. - """ - if self.is_native_ref_expr(callee): - # Call to module-level native function or such - return self.translate_call(expr, callee) - elif ( - isinstance(callee.expr, RefExpr) - and isinstance(callee.expr.node, TypeInfo) - and callee.expr.node in self.mapper.type_to_ir - and self.mapper.type_to_ir[callee.expr.node].has_method(callee.name) - ): - # Call a method via the *class* - assert isinstance(callee.expr.node, TypeInfo) - ir = self.mapper.type_to_ir[callee.expr.node] - decl = ir.method_decl(callee.name) - args = [] - arg_kinds, arg_names = expr.arg_kinds[:], expr.arg_names[:] - # Add the class argument for class methods in extension classes - if decl.kind == FUNC_CLASSMETHOD and ir.is_ext_class: - args.append(self.load_native_type_object(callee.expr.node.fullname)) - arg_kinds.insert(0, ARG_POS) - arg_names.insert(0, None) - args += [self.accept(arg) for arg in expr.args] - - if ir.is_ext_class: - return self.builder.call(decl, args, arg_kinds, arg_names, expr.line) - else: - obj = self.accept(callee.expr) - return self.gen_method_call(obj, - callee.name, - args, - self.node_type(expr), - expr.line, - expr.arg_kinds, - expr.arg_names) - - elif self.is_module_member_expr(callee): - # Fall back to a PyCall for non-native module calls - function = self.accept(callee) - args = [self.accept(arg) for arg in expr.args] - return self.py_call(function, args, expr.line, - arg_kinds=expr.arg_kinds, arg_names=expr.arg_names) - else: - receiver_typ = self.node_type(callee.expr) - - # If there is a specializer for this method name/type, try calling it. - if (callee.name, receiver_typ) in specializers: - val = specializers[callee.name, receiver_typ](self, expr, callee) - if val is not None: - return val - - obj = self.accept(callee.expr) - args = [self.accept(arg) for arg in expr.args] - return self.gen_method_call(obj, - callee.name, - args, - self.node_type(expr), - expr.line, - expr.arg_kinds, - expr.arg_names) - - def translate_super_method_call(self, expr: CallExpr, callee: SuperExpr) -> Value: - if callee.info is None or callee.call.args: - return self.translate_call(expr, callee) - ir = self.mapper.type_to_ir[callee.info] - # Search for the method in the mro, skipping ourselves. - for base in ir.mro[1:]: - if callee.name in base.method_decls: - break - else: - return self.translate_call(expr, callee) - - decl = base.method_decl(callee.name) - arg_values = [self.accept(arg) for arg in expr.args] - arg_kinds, arg_names = expr.arg_kinds[:], expr.arg_names[:] - - if decl.kind != FUNC_STATICMETHOD: - vself = next(iter(self.environment.indexes)) # grab first argument - if decl.kind == FUNC_CLASSMETHOD: - vself = self.primitive_op(type_op, [vself], expr.line) - elif self.fn_info.is_generator: - # For generator classes, the self target is the 6th value - # in the symbol table (which is an ordered dict). This is sort - # of ugly, but we can't search by name since the 'self' parameter - # could be named anything, and it doesn't get added to the - # environment indexes. - self_targ = list(self.environment.symtable.values())[6] - vself = self.read(self_targ, self.fn_info.fitem.line) - arg_values.insert(0, vself) - arg_kinds.insert(0, ARG_POS) - arg_names.insert(0, None) - - return self.builder.call(decl, arg_values, arg_kinds, arg_names, expr.line) - - def translate_cast_expr(self, expr: CastExpr) -> Value: - src = self.accept(expr.expr) - target_type = self.type_to_rtype(expr.type) - return self.coerce(src, target_type, expr.line) - def shortcircuit_expr(self, expr: OpExpr) -> Value: return self.builder.shortcircuit_helper( expr.op, self.node_type(expr), @@ -1410,118 +1142,6 @@ def shortcircuit_expr(self, expr: OpExpr) -> Value: expr.line ) - def visit_conditional_expr(self, expr: ConditionalExpr) -> Value: - if_body, else_body, next = BasicBlock(), BasicBlock(), BasicBlock() - - self.process_conditional(expr.cond, if_body, else_body) - expr_type = self.node_type(expr) - # Having actual Phi nodes would be really nice here! - target = self.alloc_temp(expr_type) - - self.activate_block(if_body) - true_value = self.accept(expr.if_expr) - true_value = self.coerce(true_value, expr_type, expr.line) - self.add(Assign(target, true_value)) - self.goto(next) - - self.activate_block(else_body) - false_value = self.accept(expr.else_expr) - false_value = self.coerce(false_value, expr_type, expr.line) - self.add(Assign(target, false_value)) - self.goto(next) - - self.activate_block(next) - - return target - - def visit_list_expr(self, expr: ListExpr) -> Value: - return self._visit_list_display(expr.items, expr.line) - - def _visit_list_display(self, items: List[Expression], line: int) -> Value: - return self._visit_display( - items, - new_list_op, - list_append_op, - list_extend_op, - line - ) - - def _visit_display(self, - items: List[Expression], - constructor_op: OpDescription, - append_op: OpDescription, - extend_op: OpDescription, - line: int - ) -> Value: - accepted_items = [] - for item in items: - if isinstance(item, StarExpr): - accepted_items.append((True, self.accept(item.expr))) - else: - accepted_items.append((False, self.accept(item))) - - result = None # type: Union[Value, None] - initial_items = [] - for starred, value in accepted_items: - if result is None and not starred and constructor_op.is_var_arg: - initial_items.append(value) - continue - - if result is None: - result = self.primitive_op(constructor_op, initial_items, line) - - self.primitive_op(extend_op if starred else append_op, [result, value], line) - - if result is None: - result = self.primitive_op(constructor_op, initial_items, line) - - return result - - def visit_tuple_expr(self, expr: TupleExpr) -> Value: - if any(isinstance(item, StarExpr) for item in expr.items): - # create a tuple of unknown length - return self._visit_tuple_display(expr) - - # create a tuple of fixed length (RTuple) - tuple_type = self.node_type(expr) - # When handling NamedTuple et. al we might not have proper type info, - # so make some up if we need it. - types = (tuple_type.types if isinstance(tuple_type, RTuple) - else [object_rprimitive] * len(expr.items)) - - items = [] - for item_expr, item_type in zip(expr.items, types): - reg = self.accept(item_expr) - items.append(self.coerce(reg, item_type, item_expr.line)) - return self.add(TupleSet(items, expr.line)) - - def _visit_tuple_display(self, expr: TupleExpr) -> Value: - """Create a list, then turn it into a tuple.""" - val_as_list = self._visit_list_display(expr.items, expr.line) - return self.primitive_op(list_tuple_op, [val_as_list], expr.line) - - def visit_dict_expr(self, expr: DictExpr) -> Value: - """First accepts all keys and values, then makes a dict out of them.""" - key_value_pairs = [] - for key_expr, value_expr in expr.items: - key = self.accept(key_expr) if key_expr is not None else None - value = self.accept(value_expr) - key_value_pairs.append((key, value)) - - return self.builder.make_dict(key_value_pairs, expr.line) - - def visit_set_expr(self, expr: SetExpr) -> Value: - return self._visit_display( - expr.items, - new_set_op, - set_add_op, - set_update_op, - expr.line - ) - - def visit_str_expr(self, expr: StrExpr) -> Value: - return self.load_static_unicode(expr.value) - # Conditional expressions def process_conditional(self, e: Expression, true: BasicBlock, false: BasicBlock) -> None: @@ -1545,59 +1165,9 @@ def process_conditional(self, e: Expression, true: BasicBlock, false: BasicBlock reg = self.accept(e) self.add_bool_branch(reg, true, false) - def visit_basic_comparison(self, op: str, left: Value, right: Value, line: int) -> Value: - negate = False - if op == 'is not': - op, negate = 'is', True - elif op == 'not in': - op, negate = 'in', True - - target = self.binary_op(left, right, op, line) - - if negate: - target = self.unary_op(target, 'not', line) - return target - - def visit_comparison_expr(self, e: ComparisonExpr) -> Value: - # TODO: Don't produce an expression when used in conditional context - - # All of the trickiness here is due to support for chained conditionals - # (`e1 < e2 > e3`, etc). `e1 < e2 > e3` is approximately equivalent to - # `e1 < e2 and e2 > e3` except that `e2` is only evaluated once. - expr_type = self.node_type(e) - - # go(i, prev) generates code for `ei opi e{i+1} op{i+1} ... en`, - # assuming that prev contains the value of `ei`. - def go(i: int, prev: Value) -> Value: - if i == len(e.operators) - 1: - return self.visit_basic_comparison( - e.operators[i], prev, self.accept(e.operands[i + 1]), e.line) - - next = self.accept(e.operands[i + 1]) - return self.builder.shortcircuit_helper( - 'and', expr_type, - lambda: self.visit_basic_comparison( - e.operators[i], prev, next, e.line), - lambda: go(i + 1, next), - e.line) - - return go(0, self.accept(e.operands[0])) - def visit_nonlocal_decl(self, o: NonlocalDecl) -> None: pass - def visit_slice_expr(self, expr: SliceExpr) -> Value: - def get_arg(arg: Optional[Expression]) -> Value: - if arg is None: - return self.none_object() - else: - return self.accept(arg) - - args = [get_arg(expr.begin_index), - get_arg(expr.end_index), - get_arg(expr.stride)] - return self.primitive_op(new_slice_op, args, expr.line) - def visit_raise_stmt(self, s: RaiseStmt) -> None: if s.expr is None: self.primitive_op(reraise_exception_op, [], NO_TRACEBACK_LINE_NO) @@ -1901,9 +1471,6 @@ def generate(i: int) -> None: generate(0) - def visit_lambda_expr(self, expr: LambdaExpr) -> Value: - return BuildFuncIR(self).visit_lambda_expr(expr) - def visit_pass_stmt(self, o: PassStmt) -> None: pass @@ -1945,37 +1512,6 @@ def gen_inner_stmts() -> None: self.comprehension_helper(loop_params, gen_inner_stmts, gen.line) return list_ops - def visit_list_comprehension(self, o: ListComprehension) -> Value: - return self.translate_list_comprehension(o.generator) - - def visit_set_comprehension(self, o: SetComprehension) -> Value: - gen = o.generator - set_ops = self.primitive_op(new_set_op, [], o.line) - loop_params = list(zip(gen.indices, gen.sequences, gen.condlists)) - - def gen_inner_stmts() -> None: - e = self.accept(gen.left_expr) - self.primitive_op(set_add_op, [set_ops, e], o.line) - - self.comprehension_helper(loop_params, gen_inner_stmts, o.line) - return set_ops - - def visit_dictionary_comprehension(self, o: DictionaryComprehension) -> Value: - d = self.primitive_op(new_dict_op, [], o.line) - loop_params = list(zip(o.indices, o.sequences, o.condlists)) - - def gen_inner_stmts() -> None: - k = self.accept(o.key) - v = self.accept(o.value) - self.primitive_op(dict_set_item_op, [d, k, v], o.line) - - self.comprehension_helper(loop_params, gen_inner_stmts, o.line) - return d - - def visit_generator_expr(self, o: GeneratorExpr) -> Value: - self.warning('Treating generator comprehension as list', o.line) - return self.primitive_op(iter_op, [self.translate_list_comprehension(o)], o.line) - def comprehension_helper(self, loop_params: List[Tuple[Lvalue, Expression, List[Expression]]], gen_inner_stmts: Callable[[], None], @@ -2058,26 +1594,82 @@ def visit_del_item(self, target: AssignmentTarget, line: int) -> None: for subtarget in target.items: self.visit_del_item(subtarget, line) - def visit_super_expr(self, o: SuperExpr) -> Value: - # self.warning('can not optimize super() expression', o.line) - sup_val = self.load_module_attr_by_fullname('builtins.super', o.line) - if o.call.args: - args = [self.accept(arg) for arg in o.call.args] - else: - assert o.info is not None - typ = self.load_native_type_object(o.info.fullname) - ir = self.mapper.type_to_ir[o.info] - iter_env = iter(self.environment.indexes) - vself = next(iter_env) # grab first argument - if self.fn_info.is_generator: - # grab sixth argument (see comment in translate_super_method_call) - self_targ = list(self.environment.symtable.values())[6] - vself = self.read(self_targ, self.fn_info.fitem.line) - elif not ir.is_ext_class: - vself = next(iter_env) # second argument is self if non_extension class - args = [typ, vself] - res = self.py_call(sup_val, args, o.line) - return self.py_get_attr(res, o.name, o.line) + # Expressions + + def visit_name_expr(self, expr: NameExpr) -> Value: + return BuildExpressionIR(self).visit_name_expr(expr) + + def visit_member_expr(self, expr: MemberExpr) -> Value: + return BuildExpressionIR(self).visit_member_expr(expr) + + def visit_super_expr(self, expr: SuperExpr) -> Value: + return BuildExpressionIR(self).visit_super_expr(expr) + + def visit_call_expr(self, expr: CallExpr) -> Value: + return BuildExpressionIR(self).visit_call_expr(expr) + + def visit_unary_expr(self, expr: UnaryExpr) -> Value: + return BuildExpressionIR(self).visit_unary_expr(expr) + + def visit_op_expr(self, expr: OpExpr) -> Value: + return BuildExpressionIR(self).visit_op_expr(expr) + + def visit_index_expr(self, expr: IndexExpr) -> Value: + return BuildExpressionIR(self).visit_index_expr(expr) + + def visit_conditional_expr(self, expr: ConditionalExpr) -> Value: + return BuildExpressionIR(self).visit_conditional_expr(expr) + + def visit_comparison_expr(self, expr: ComparisonExpr) -> Value: + return BuildExpressionIR(self).visit_comparison_expr(expr) + + def visit_int_expr(self, expr: IntExpr) -> Value: + return BuildExpressionIR(self).visit_int_expr(expr) + + def visit_float_expr(self, expr: FloatExpr) -> Value: + return BuildExpressionIR(self).visit_float_expr(expr) + + def visit_complex_expr(self, expr: ComplexExpr) -> Value: + return BuildExpressionIR(self).visit_complex_expr(expr) + + def visit_str_expr(self, expr: StrExpr) -> Value: + return BuildExpressionIR(self).visit_str_expr(expr) + + def visit_bytes_expr(self, expr: BytesExpr) -> Value: + return BuildExpressionIR(self).visit_bytes_expr(expr) + + def visit_ellipsis(self, expr: EllipsisExpr) -> Value: + return BuildExpressionIR(self).visit_ellipsis(expr) + + def visit_list_expr(self, expr: ListExpr) -> Value: + return BuildExpressionIR(self).visit_list_expr(expr) + + def visit_tuple_expr(self, expr: TupleExpr) -> Value: + return BuildExpressionIR(self).visit_tuple_expr(expr) + + def visit_dict_expr(self, expr: DictExpr) -> Value: + return BuildExpressionIR(self).visit_dict_expr(expr) + + def visit_set_expr(self, expr: SetExpr) -> Value: + return BuildExpressionIR(self).visit_set_expr(expr) + + def visit_list_comprehension(self, expr: ListComprehension) -> Value: + return BuildExpressionIR(self).visit_list_comprehension(expr) + + def visit_set_comprehension(self, expr: SetComprehension) -> Value: + return BuildExpressionIR(self).visit_set_comprehension(expr) + + def visit_dictionary_comprehension(self, expr: DictionaryComprehension) -> Value: + return BuildExpressionIR(self).visit_dictionary_comprehension(expr) + + def visit_slice_expr(self, expr: SliceExpr) -> Value: + return BuildExpressionIR(self).visit_slice_expr(expr) + + def visit_generator_expr(self, expr: GeneratorExpr) -> Value: + return BuildExpressionIR(self).visit_generator_expr(expr) + + def visit_lambda_expr(self, expr: LambdaExpr) -> Value: + return BuildFuncIR(self).visit_lambda_expr(expr) def visit_yield_expr(self, expr: YieldExpr) -> Value: return BuildFuncIR(self).visit_yield_expr(expr) @@ -2085,9 +1677,6 @@ def visit_yield_expr(self, expr: YieldExpr) -> Value: def visit_yield_from_expr(self, o: YieldFromExpr) -> Value: return BuildFuncIR(self).visit_yield_from_expr(o) - def visit_ellipsis(self, o: EllipsisExpr) -> Value: - return self.primitive_op(ellipsis_op, [], o.line) - # Builtin function special cases @specialize_function('builtins.globals') diff --git a/mypyc/specialize.py b/mypyc/specialize.py new file mode 100644 index 000000000000..d2b796439d46 --- /dev/null +++ b/mypyc/specialize.py @@ -0,0 +1,47 @@ +"""General infrastructure for special casing calls to builtin functions. + +Most special cases should be handled using the data driven "primitive +ops" system, but certain operations require special handling that has +access to the AST/IR directly and can make decisions/optimizations +based on it. + +For example, we use specializers to statically emit the length of a +fixed length tuple and to emit optimized code for any()/all() calls with +generator comprehensions as the argument. + +See comment below for more documentation. +""" + +from typing import Callable, Optional, Dict, Tuple +from typing_extensions import TYPE_CHECKING + +from mypy.nodes import CallExpr, RefExpr + +from mypyc.ops import Value, RType + +if TYPE_CHECKING: + from mypyc.genops import IRBuilder # noqa + + +# Specializers are attempted before compiling the arguments to the +# function. Specializers can return None to indicate that they failed +# and the call should be compiled normally. Otherwise they should emit +# code for the call and return a Value containing the result. +# +# Specializers take three arguments: the IRBuilder, the CallExpr being +# compiled, and the RefExpr that is the left hand side of the call. +# +# Specializers can operate on methods as well, and are keyed on the +# name and RType in that case. +Specializer = Callable[['IRBuilder', CallExpr, RefExpr], Optional[Value]] + +specializers = {} # type: Dict[Tuple[str, Optional[RType]], Specializer] + + +def specialize_function( + name: str, typ: Optional[RType] = None) -> Callable[[Specializer], Specializer]: + """Decorator to register a function as being a specializer.""" + def wrapper(f: Specializer) -> Specializer: + specializers[name, typ] = f + return f + return wrapper From 7e152faa0d95fdc900975e92362ca2d96dabe058 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Sat, 22 Feb 2020 16:46:39 +0000 Subject: [PATCH 106/117] [mypyc] Refactor: extract IR generation for statements from genops (#8429) This makes `mypyc.genfunc` depend on `mypyc.genstatement` which is a little unfortunate. One way to (partially) fix this would be by extracting try statement related code from `mypyc.genstatement`. It doesn't seem urgent, though. Work on mypyc/mypyc#714. --- mypyc/genfunc.py | 5 +- mypyc/genops.py | 549 +++++------------------------------------- mypyc/genstatement.py | 526 ++++++++++++++++++++++++++++++++++++++++ 3 files changed, 591 insertions(+), 489 deletions(-) create mode 100644 mypyc/genstatement.py diff --git a/mypyc/genfunc.py b/mypyc/genfunc.py index c55e76c31428..bfb629ebfbef 100644 --- a/mypyc/genfunc.py +++ b/mypyc/genfunc.py @@ -30,6 +30,7 @@ from mypyc.sametype import is_same_method_signature from mypyc.genopsutil import concrete_arg_kind, is_constant, add_self_to_env from mypyc.genopscontext import FuncInfo, GeneratorClass, ImplicitClass +from mypyc.genstatement import BuildStatementIR if TYPE_CHECKING: from mypyc.genops import IRBuilder @@ -854,7 +855,9 @@ def else_body() -> None: self.builder.nonlocal_control[-1].gen_break(self.builder, o.line) self.builder.push_loop_stack(loop_block, done_block) - self.builder.visit_try_except(try_body, [(None, None, except_body)], else_body, o.line) + BuildStatementIR(self.builder).visit_try_except( + try_body, [(None, None, except_body)], else_body, o.line + ) self.builder.pop_loop_stack() self.builder.goto_and_activate(done_block) diff --git a/mypyc/genops.py b/mypyc/genops.py index 25d59a6fc469..99e9d71d1faa 100644 --- a/mypyc/genops.py +++ b/mypyc/genops.py @@ -55,32 +55,25 @@ def f(x: int) -> int: LoadStatic, InitStatic, INVALID_FUNC_DEF, int_rprimitive, bool_rprimitive, list_rprimitive, is_list_rprimitive, dict_rprimitive, set_rprimitive, str_rprimitive, none_rprimitive, is_none_rprimitive, object_rprimitive, - exc_rtuple, PrimitiveOp, OpDescription, is_object_rprimitive, - FuncSignature, NAMESPACE_MODULE, - RaiseStandardError, LoadErrorValue, NO_TRACEBACK_LINE_NO, FuncDecl, + PrimitiveOp, OpDescription, is_object_rprimitive, + FuncSignature, NAMESPACE_MODULE, RaiseStandardError, FuncDecl ) from mypyc.ops_primitive import func_ops from mypyc.ops_list import list_append_op, list_len_op, new_list_op, to_list, list_pop_last from mypyc.ops_dict import dict_get_item_op, dict_set_item_op from mypyc.ops_misc import ( - true_op, false_op, iter_op, next_op, py_setattr_op, py_delattr_op, - type_op, import_op, get_module_dict_op -) -from mypyc.ops_exc import ( - raise_exception_op, reraise_exception_op, - error_catch_op, restore_exc_info_op, exc_matches_op, get_exc_value_op, - get_exc_info_op, keep_propagating_op + true_op, false_op, iter_op, next_op, py_setattr_op, import_op, get_module_dict_op ) from mypyc.genops_for import ForGenerator, ForRange, ForList, ForIterable, ForEnumerate, ForZip from mypyc.crash import catch_errors from mypyc.options import CompilerOptions from mypyc.errors import Errors from mypyc.nonlocalcontrol import ( - NonlocalControl, BaseNonlocalControl, LoopNonlocalControl, ExceptNonlocalControl, - FinallyNonlocalControl, TryFinallyNonlocalControl, GeneratorNonlocalControl + NonlocalControl, BaseNonlocalControl, LoopNonlocalControl, GeneratorNonlocalControl ) from mypyc.genclass import BuildClassIR from mypyc.genfunc import BuildFuncIR +from mypyc.genstatement import BuildStatementIR from mypyc.genexpr import BuildExpressionIR from mypyc.genopscontext import FuncInfo, ImplicitClass from mypyc.genopsmapper import Mapper @@ -432,34 +425,6 @@ def add_implicit_unreachable(self) -> None: if not block.terminated: self.add(Unreachable()) - def visit_block(self, block: Block) -> None: - if not block.is_unreachable: - for stmt in block.body: - self.accept(stmt) - # Raise a RuntimeError if we hit a non-empty unreachable block. - # Don't complain about empty unreachable blocks, since mypy inserts - # those after `if MYPY`. - elif block.body: - self.add(RaiseStandardError(RaiseStandardError.RUNTIME_ERROR, - 'Reached allegedly unreachable code!', - block.line)) - self.add(Unreachable()) - - def visit_expression_stmt(self, stmt: ExpressionStmt) -> None: - if isinstance(stmt.expr, StrExpr): - # Docstring. Ignore - return - # ExpressionStmts do not need to be coerced like other Expressions. - stmt.expr.accept(self) - - def visit_return_stmt(self, stmt: ReturnStmt) -> None: - if stmt.expr: - retval = self.accept(stmt.expr) - else: - retval = self.builder.none() - retval = self.coerce(retval, self.ret_types[-1], stmt.line) - self.nonlocal_control[-1].gen_return(self, retval, stmt.line) - def disallow_class_assignments(self, lvalues: List[Lvalue], line: int) -> None: # Some best-effort attempts to disallow assigning to class # variables that aren't marked ClassVar, since we blatantly @@ -530,38 +495,6 @@ def load_final_literal_value(self, val: Union[int, str, bytes, float, bool], else: assert False, "Unsupported final literal value" - def visit_assignment_stmt(self, stmt: AssignmentStmt) -> None: - assert len(stmt.lvalues) >= 1 - self.disallow_class_assignments(stmt.lvalues, stmt.line) - lvalue = stmt.lvalues[0] - if stmt.type and isinstance(stmt.rvalue, TempNode): - # This is actually a variable annotation without initializer. Don't generate - # an assignment but we need to call get_assignment_target since it adds a - # name binding as a side effect. - self.get_assignment_target(lvalue, stmt.line) - return - - line = stmt.rvalue.line - rvalue_reg = self.accept(stmt.rvalue) - if self.non_function_scope() and stmt.is_final_def: - self.init_final_static(lvalue, rvalue_reg) - for lvalue in stmt.lvalues: - target = self.get_assignment_target(lvalue) - self.assign(target, rvalue_reg, line) - - def visit_operator_assignment_stmt(self, stmt: OperatorAssignmentStmt) -> None: - """Operator assignment statement such as x += 1""" - self.disallow_class_assignments([stmt.lvalue], stmt.line) - target = self.get_assignment_target(stmt.lvalue) - target_value = self.read(target, stmt.line) - rreg = self.accept(stmt.rvalue) - # the Python parser strips the '=' from operator assignment statements, so re-add it - op = stmt.op + '=' - res = self.binary_op(target_value, rreg, op, stmt.line) - # usually operator assignments are done in-place - # but when target doesn't support that we need to manually assign - self.assign(target, res, res.line) - def get_assignment_target(self, lvalue: Lvalue, line: int = -1) -> AssignmentTarget: if isinstance(lvalue, NameExpr): @@ -753,23 +686,6 @@ def process_iterator_tuple_assignment(self, self.activate_block(ok_block) - def visit_if_stmt(self, stmt: IfStmt) -> None: - if_body, next = BasicBlock(), BasicBlock() - else_body = BasicBlock() if stmt.else_body else next - - # If statements are normalized - assert len(stmt.expr) == 1 - - self.process_conditional(stmt.expr[0], if_body, else_body) - self.activate_block(if_body) - self.accept(stmt.body[0]) - self.goto(next) - if stmt.else_body: - self.activate_block(else_body) - self.accept(stmt.else_body) - self.goto(next) - self.activate_block(next) - def push_loop_stack(self, continue_block: BasicBlock, break_block: BasicBlock) -> None: self.nonlocal_control.append( LoopNonlocalControl(self.nonlocal_control[-1], continue_block, break_block)) @@ -777,41 +693,6 @@ def push_loop_stack(self, continue_block: BasicBlock, break_block: BasicBlock) - def pop_loop_stack(self) -> None: self.nonlocal_control.pop() - def visit_while_stmt(self, s: WhileStmt) -> None: - body, next, top, else_block = BasicBlock(), BasicBlock(), BasicBlock(), BasicBlock() - normal_loop_exit = else_block if s.else_body is not None else next - - self.push_loop_stack(top, next) - - # Split block so that we get a handle to the top of the loop. - self.goto_and_activate(top) - self.process_conditional(s.expr, body, normal_loop_exit) - - self.activate_block(body) - self.accept(s.body) - # Add branch to the top at the end of the body. - self.goto(top) - - self.pop_loop_stack() - - if s.else_body is not None: - self.activate_block(else_block) - self.accept(s.else_body) - self.goto(next) - - self.activate_block(next) - - def visit_for_stmt(self, s: ForStmt) -> None: - def body() -> None: - self.accept(s.body) - - def else_block() -> None: - assert s.else_body is not None - self.accept(s.else_body) - - self.for_loop_helper(s.index, s.expr, body, - else_block if s.else_body else None, s.line) - def spill(self, value: Value) -> AssignmentTarget: """Moves a given Value instance into the generator class' environment class.""" name = '{}{}'.format(TEMP_ATTR_NAME, self.temp_counter) @@ -1027,12 +908,6 @@ def _analyze_iterable_item_type(self, expr: Expression) -> Type: # Non-tuple iterable. return echk.check_method_call_by_name('__next__', iterator, [], [], expr)[0] - def visit_break_stmt(self, node: BreakStmt) -> None: - self.nonlocal_control[-1].gen_break(self, node.line) - - def visit_continue_stmt(self, node: ContinueStmt) -> None: - self.nonlocal_control[-1].gen_continue(self, node.line) - def is_native_module(self, module: str) -> bool: """Is the given module one compiled by mypyc?""" return module in self.mapper.group_map @@ -1165,342 +1040,6 @@ def process_conditional(self, e: Expression, true: BasicBlock, false: BasicBlock reg = self.accept(e) self.add_bool_branch(reg, true, false) - def visit_nonlocal_decl(self, o: NonlocalDecl) -> None: - pass - - def visit_raise_stmt(self, s: RaiseStmt) -> None: - if s.expr is None: - self.primitive_op(reraise_exception_op, [], NO_TRACEBACK_LINE_NO) - self.add(Unreachable()) - return - - exc = self.accept(s.expr) - self.primitive_op(raise_exception_op, [exc], s.line) - self.add(Unreachable()) - - def visit_try_except(self, - body: GenFunc, - handlers: Sequence[ - Tuple[Optional[Expression], Optional[Expression], GenFunc]], - else_body: Optional[GenFunc], - line: int) -> None: - """Generalized try/except/else handling that takes functions to gen the bodies. - - The point of this is to also be able to support with.""" - assert handlers, "try needs except" - - except_entry, exit_block, cleanup_block = BasicBlock(), BasicBlock(), BasicBlock() - double_except_block = BasicBlock() - # If there is an else block, jump there after the try, otherwise just leave - else_block = BasicBlock() if else_body else exit_block - - # Compile the try block with an error handler - self.builder.push_error_handler(except_entry) - self.goto_and_activate(BasicBlock()) - body() - self.goto(else_block) - self.builder.pop_error_handler() - - # The error handler catches the error and then checks it - # against the except clauses. We compile the error handler - # itself with an error handler so that it can properly restore - # the *old* exc_info if an exception occurs. - # The exception chaining will be done automatically when the - # exception is raised, based on the exception in exc_info. - self.builder.push_error_handler(double_except_block) - self.activate_block(except_entry) - old_exc = self.maybe_spill(self.primitive_op(error_catch_op, [], line)) - # Compile the except blocks with the nonlocal control flow overridden to clear exc_info - self.nonlocal_control.append( - ExceptNonlocalControl(self.nonlocal_control[-1], old_exc)) - - # Process the bodies - for type, var, handler_body in handlers: - next_block = None - if type: - next_block, body_block = BasicBlock(), BasicBlock() - matches = self.primitive_op(exc_matches_op, [self.accept(type)], type.line) - self.add(Branch(matches, body_block, next_block, Branch.BOOL_EXPR)) - self.activate_block(body_block) - if var: - target = self.get_assignment_target(var) - self.assign(target, self.primitive_op(get_exc_value_op, [], var.line), var.line) - handler_body() - self.goto(cleanup_block) - if next_block: - self.activate_block(next_block) - - # Reraise the exception if needed - if next_block: - self.primitive_op(reraise_exception_op, [], NO_TRACEBACK_LINE_NO) - self.add(Unreachable()) - - self.nonlocal_control.pop() - self.builder.pop_error_handler() - - # Cleanup for if we leave except through normal control flow: - # restore the saved exc_info information and continue propagating - # the exception if it exists. - self.activate_block(cleanup_block) - self.primitive_op(restore_exc_info_op, [self.read(old_exc)], line) - self.goto(exit_block) - - # Cleanup for if we leave except through a raised exception: - # restore the saved exc_info information and continue propagating - # the exception. - self.activate_block(double_except_block) - self.primitive_op(restore_exc_info_op, [self.read(old_exc)], line) - self.primitive_op(keep_propagating_op, [], NO_TRACEBACK_LINE_NO) - self.add(Unreachable()) - - # If present, compile the else body in the obvious way - if else_body: - self.activate_block(else_block) - else_body() - self.goto(exit_block) - - self.activate_block(exit_block) - - def visit_try_except_stmt(self, t: TryStmt) -> None: - def body() -> None: - self.accept(t.body) - - # Work around scoping woes - def make_handler(body: Block) -> GenFunc: - return lambda: self.accept(body) - - handlers = [(type, var, make_handler(body)) for type, var, body in - zip(t.types, t.vars, t.handlers)] - else_body = (lambda: self.accept(t.else_body)) if t.else_body else None - self.visit_try_except(body, handlers, else_body, t.line) - - def try_finally_try(self, err_handler: BasicBlock, return_entry: BasicBlock, - main_entry: BasicBlock, try_body: GenFunc) -> Optional[Register]: - # Compile the try block with an error handler - control = TryFinallyNonlocalControl(return_entry) - self.builder.push_error_handler(err_handler) - - self.nonlocal_control.append(control) - self.goto_and_activate(BasicBlock()) - try_body() - self.goto(main_entry) - self.nonlocal_control.pop() - self.builder.pop_error_handler() - - return control.ret_reg - - def try_finally_entry_blocks(self, - err_handler: BasicBlock, return_entry: BasicBlock, - main_entry: BasicBlock, finally_block: BasicBlock, - ret_reg: Optional[Register]) -> Value: - old_exc = self.alloc_temp(exc_rtuple) - - # Entry block for non-exceptional flow - self.activate_block(main_entry) - if ret_reg: - self.add(Assign(ret_reg, self.add(LoadErrorValue(self.ret_types[-1])))) - self.goto(return_entry) - - self.activate_block(return_entry) - self.add(Assign(old_exc, self.add(LoadErrorValue(exc_rtuple)))) - self.goto(finally_block) - - # Entry block for errors - self.activate_block(err_handler) - if ret_reg: - self.add(Assign(ret_reg, self.add(LoadErrorValue(self.ret_types[-1])))) - self.add(Assign(old_exc, self.primitive_op(error_catch_op, [], -1))) - self.goto(finally_block) - - return old_exc - - def try_finally_body( - self, finally_block: BasicBlock, finally_body: GenFunc, - ret_reg: Optional[Value], old_exc: Value) -> Tuple[BasicBlock, - 'FinallyNonlocalControl']: - cleanup_block = BasicBlock() - # Compile the finally block with the nonlocal control flow overridden to restore exc_info - self.builder.push_error_handler(cleanup_block) - finally_control = FinallyNonlocalControl( - self.nonlocal_control[-1], ret_reg, old_exc) - self.nonlocal_control.append(finally_control) - self.activate_block(finally_block) - finally_body() - self.nonlocal_control.pop() - - return cleanup_block, finally_control - - def try_finally_resolve_control(self, cleanup_block: BasicBlock, - finally_control: FinallyNonlocalControl, - old_exc: Value, ret_reg: Optional[Value]) -> BasicBlock: - """Resolve the control flow out of a finally block. - - This means returning if there was a return, propagating - exceptions, break/continue (soon), or just continuing on. - """ - reraise, rest = BasicBlock(), BasicBlock() - self.add(Branch(old_exc, rest, reraise, Branch.IS_ERROR)) - - # Reraise the exception if there was one - self.activate_block(reraise) - self.primitive_op(reraise_exception_op, [], NO_TRACEBACK_LINE_NO) - self.add(Unreachable()) - self.builder.pop_error_handler() - - # If there was a return, keep returning - if ret_reg: - self.activate_block(rest) - return_block, rest = BasicBlock(), BasicBlock() - self.add(Branch(ret_reg, rest, return_block, Branch.IS_ERROR)) - - self.activate_block(return_block) - self.nonlocal_control[-1].gen_return(self, ret_reg, -1) - - # TODO: handle break/continue - self.activate_block(rest) - out_block = BasicBlock() - self.goto(out_block) - - # If there was an exception, restore again - self.activate_block(cleanup_block) - finally_control.gen_cleanup(self, -1) - self.primitive_op(keep_propagating_op, [], NO_TRACEBACK_LINE_NO) - self.add(Unreachable()) - - return out_block - - def visit_try_finally_stmt(self, try_body: GenFunc, finally_body: GenFunc) -> None: - """Generalized try/finally handling that takes functions to gen the bodies. - - The point of this is to also be able to support with.""" - # Finally is a big pain, because there are so many ways that - # exits can occur. We emit 10+ basic blocks for every finally! - - err_handler, main_entry, return_entry, finally_block = ( - BasicBlock(), BasicBlock(), BasicBlock(), BasicBlock()) - - # Compile the body of the try - ret_reg = self.try_finally_try( - err_handler, return_entry, main_entry, try_body) - - # Set up the entry blocks for the finally statement - old_exc = self.try_finally_entry_blocks( - err_handler, return_entry, main_entry, finally_block, ret_reg) - - # Compile the body of the finally - cleanup_block, finally_control = self.try_finally_body( - finally_block, finally_body, ret_reg, old_exc) - - # Resolve the control flow out of the finally block - out_block = self.try_finally_resolve_control( - cleanup_block, finally_control, old_exc, ret_reg) - - self.activate_block(out_block) - - def visit_try_stmt(self, t: TryStmt) -> None: - # Our compilation strategy for try/except/else/finally is to - # treat try/except/else and try/finally as separate language - # constructs that we compile separately. When we have a - # try/except/else/finally, we treat the try/except/else as the - # body of a try/finally block. - if t.finally_body: - def visit_try_body() -> None: - if t.handlers: - self.visit_try_except_stmt(t) - else: - self.accept(t.body) - body = t.finally_body - - self.visit_try_finally_stmt(visit_try_body, lambda: self.accept(body)) - else: - self.visit_try_except_stmt(t) - - def get_sys_exc_info(self) -> List[Value]: - exc_info = self.primitive_op(get_exc_info_op, [], -1) - return [self.add(TupleGet(exc_info, i, -1)) for i in range(3)] - - def visit_with(self, expr: Expression, target: Optional[Lvalue], - body: GenFunc, line: int) -> None: - - # This is basically a straight transcription of the Python code in PEP 343. - # I don't actually understand why a bunch of it is the way it is. - # We could probably optimize the case where the manager is compiled by us, - # but that is not our common case at all, so. - mgr_v = self.accept(expr) - typ = self.primitive_op(type_op, [mgr_v], line) - exit_ = self.maybe_spill(self.py_get_attr(typ, '__exit__', line)) - value = self.py_call(self.py_get_attr(typ, '__enter__', line), [mgr_v], line) - mgr = self.maybe_spill(mgr_v) - exc = self.maybe_spill_assignable(self.primitive_op(true_op, [], -1)) - - def try_body() -> None: - if target: - self.assign(self.get_assignment_target(target), value, line) - body() - - def except_body() -> None: - self.assign(exc, self.primitive_op(false_op, [], -1), line) - out_block, reraise_block = BasicBlock(), BasicBlock() - self.add_bool_branch(self.py_call(self.read(exit_), - [self.read(mgr)] + self.get_sys_exc_info(), line), - out_block, reraise_block) - self.activate_block(reraise_block) - self.primitive_op(reraise_exception_op, [], NO_TRACEBACK_LINE_NO) - self.add(Unreachable()) - self.activate_block(out_block) - - def finally_body() -> None: - out_block, exit_block = BasicBlock(), BasicBlock() - self.add(Branch(self.read(exc), exit_block, out_block, Branch.BOOL_EXPR)) - self.activate_block(exit_block) - none = self.none_object() - self.py_call(self.read(exit_), [self.read(mgr), none, none, none], line) - self.goto_and_activate(out_block) - - self.visit_try_finally_stmt( - lambda: self.visit_try_except(try_body, [(None, None, except_body)], None, line), - finally_body) - - def visit_with_stmt(self, o: WithStmt) -> None: - # Generate separate logic for each expr in it, left to right - def generate(i: int) -> None: - if i >= len(o.expr): - self.accept(o.body) - else: - self.visit_with(o.expr[i], o.target[i], lambda: generate(i + 1), o.line) - - generate(0) - - def visit_pass_stmt(self, o: PassStmt) -> None: - pass - - def visit_global_decl(self, o: GlobalDecl) -> None: - # Pure declaration -- no runtime effect - pass - - def visit_assert_stmt(self, a: AssertStmt) -> None: - if self.options.strip_asserts: - return - cond = self.accept(a.expr) - ok_block, error_block = BasicBlock(), BasicBlock() - self.add_bool_branch(cond, ok_block, error_block) - self.activate_block(error_block) - if a.msg is None: - # Special case (for simpler generated code) - self.add(RaiseStandardError(RaiseStandardError.ASSERTION_ERROR, None, a.line)) - elif isinstance(a.msg, StrExpr): - # Another special case - self.add(RaiseStandardError(RaiseStandardError.ASSERTION_ERROR, a.msg.value, - a.line)) - else: - # The general case -- explicitly construct an exception instance - message = self.accept(a.msg) - exc_type = self.load_module_attr_by_fullname('builtins.AssertionError', a.line) - exc = self.py_call(exc_type, [message], a.line) - self.primitive_op(raise_exception_op, [exc], a.line) - self.add(Unreachable()) - self.activate_block(ok_block) - def translate_list_comprehension(self, gen: GeneratorExpr) -> Value: list_ops = self.primitive_op(new_list_op, [], gen.line) loop_params = list(zip(gen.indices, gen.sequences, gen.condlists)) @@ -1570,29 +1109,63 @@ def loop_contents( def visit_decorator(self, dec: Decorator) -> None: BuildFuncIR(self).visit_decorator(dec) - def visit_del_stmt(self, o: DelStmt) -> None: - self.visit_del_item(self.get_assignment_target(o.expr), o.line) + def visit_block(self, block: Block) -> None: + BuildStatementIR(self).visit_block(block) - def visit_del_item(self, target: AssignmentTarget, line: int) -> None: - if isinstance(target, AssignmentTargetIndex): - self.gen_method_call( - target.base, - '__delitem__', - [target.index], - result_type=None, - line=line - ) - elif isinstance(target, AssignmentTargetAttr): - key = self.load_static_unicode(target.attr) - self.add(PrimitiveOp([target.obj, key], py_delattr_op, line)) - elif isinstance(target, AssignmentTargetRegister): - # Delete a local by assigning an error value to it, which will - # prompt the insertion of uninit checks. - self.add(Assign(target.register, - self.add(LoadErrorValue(target.type, undefines=True)))) - elif isinstance(target, AssignmentTargetTuple): - for subtarget in target.items: - self.visit_del_item(subtarget, line) + # Statements + + def visit_expression_stmt(self, stmt: ExpressionStmt) -> None: + BuildStatementIR(self).visit_expression_stmt(stmt) + + def visit_return_stmt(self, stmt: ReturnStmt) -> None: + BuildStatementIR(self).visit_return_stmt(stmt) + + def visit_assignment_stmt(self, stmt: AssignmentStmt) -> None: + BuildStatementIR(self).visit_assignment_stmt(stmt) + + def visit_operator_assignment_stmt(self, stmt: OperatorAssignmentStmt) -> None: + BuildStatementIR(self).visit_operator_assignment_stmt(stmt) + + def visit_if_stmt(self, stmt: IfStmt) -> None: + BuildStatementIR(self).visit_if_stmt(stmt) + + def visit_while_stmt(self, stmt: WhileStmt) -> None: + BuildStatementIR(self).visit_while_stmt(stmt) + + def visit_for_stmt(self, stmt: ForStmt) -> None: + BuildStatementIR(self).visit_for_stmt(stmt) + + def visit_break_stmt(self, stmt: BreakStmt) -> None: + BuildStatementIR(self).visit_break_stmt(stmt) + + def visit_continue_stmt(self, stmt: ContinueStmt) -> None: + BuildStatementIR(self).visit_continue_stmt(stmt) + + def visit_raise_stmt(self, stmt: RaiseStmt) -> None: + BuildStatementIR(self).visit_raise_stmt(stmt) + + def visit_try_stmt(self, stmt: TryStmt) -> None: + BuildStatementIR(self).visit_try_stmt(stmt) + + def visit_with_stmt(self, stmt: WithStmt) -> None: + BuildStatementIR(self).visit_with_stmt(stmt) + + def visit_pass_stmt(self, stmt: PassStmt) -> None: + pass + + def visit_assert_stmt(self, stmt: AssertStmt) -> None: + BuildStatementIR(self).visit_assert_stmt(stmt) + + def visit_del_stmt(self, stmt: DelStmt) -> None: + BuildStatementIR(self).visit_del_stmt(stmt) + + def visit_global_decl(self, stmt: GlobalDecl) -> None: + # Pure declaration -- no runtime effect + pass + + def visit_nonlocal_decl(self, stmt: NonlocalDecl) -> None: + # Pure declaration -- no runtime effect + pass # Expressions diff --git a/mypyc/genstatement.py b/mypyc/genstatement.py new file mode 100644 index 000000000000..108cdbafe457 --- /dev/null +++ b/mypyc/genstatement.py @@ -0,0 +1,526 @@ +from typing import Optional, List, Tuple, Sequence, Callable +from typing_extensions import TYPE_CHECKING + +from mypy.nodes import ( + Block, ExpressionStmt, ReturnStmt, AssignmentStmt, OperatorAssignmentStmt, IfStmt, WhileStmt, + ForStmt, BreakStmt, ContinueStmt, RaiseStmt, TryStmt, WithStmt, AssertStmt, DelStmt, + Expression, StrExpr, TempNode, Lvalue +) + +from mypyc.ops import ( + Assign, Unreachable, AssignmentTarget, AssignmentTargetRegister, AssignmentTargetIndex, + AssignmentTargetAttr, AssignmentTargetTuple, PrimitiveOp, RaiseStandardError, LoadErrorValue, + BasicBlock, TupleGet, Value, Register, Branch, exc_rtuple, NO_TRACEBACK_LINE_NO +) +from mypyc.ops_misc import true_op, false_op, type_op, py_delattr_op +from mypyc.ops_exc import ( + raise_exception_op, reraise_exception_op, error_catch_op, exc_matches_op, restore_exc_info_op, + get_exc_value_op, keep_propagating_op, get_exc_info_op +) +from mypyc.nonlocalcontrol import ( + ExceptNonlocalControl, FinallyNonlocalControl, TryFinallyNonlocalControl +) + + +if TYPE_CHECKING: + from mypyc.genops import IRBuilder + +GenFunc = Callable[[], None] + + +class BuildStatementIR: + def __init__(self, builder: 'IRBuilder') -> None: + self.builder = builder + + def visit_block(self, block: Block) -> None: + if not block.is_unreachable: + for stmt in block.body: + self.builder.accept(stmt) + # Raise a RuntimeError if we hit a non-empty unreachable block. + # Don't complain about empty unreachable blocks, since mypy inserts + # those after `if MYPY`. + elif block.body: + self.builder.add(RaiseStandardError(RaiseStandardError.RUNTIME_ERROR, + 'Reached allegedly unreachable code!', + block.line)) + self.builder.add(Unreachable()) + + def visit_expression_stmt(self, stmt: ExpressionStmt) -> None: + if isinstance(stmt.expr, StrExpr): + # Docstring. Ignore + return + # ExpressionStmts do not need to be coerced like other Expressions. + stmt.expr.accept(self.builder) + + def visit_return_stmt(self, stmt: ReturnStmt) -> None: + if stmt.expr: + retval = self.builder.accept(stmt.expr) + else: + retval = self.builder.builder.none() + retval = self.builder.coerce(retval, self.builder.ret_types[-1], stmt.line) + self.builder.nonlocal_control[-1].gen_return(self.builder, retval, stmt.line) + + def visit_assignment_stmt(self, stmt: AssignmentStmt) -> None: + assert len(stmt.lvalues) >= 1 + self.builder.disallow_class_assignments(stmt.lvalues, stmt.line) + lvalue = stmt.lvalues[0] + if stmt.type and isinstance(stmt.rvalue, TempNode): + # This is actually a variable annotation without initializer. Don't generate + # an assignment but we need to call get_assignment_target since it adds a + # name binding as a side effect. + self.builder.get_assignment_target(lvalue, stmt.line) + return + + line = stmt.rvalue.line + rvalue_reg = self.builder.accept(stmt.rvalue) + if self.builder.non_function_scope() and stmt.is_final_def: + self.builder.init_final_static(lvalue, rvalue_reg) + for lvalue in stmt.lvalues: + target = self.builder.get_assignment_target(lvalue) + self.builder.assign(target, rvalue_reg, line) + + def visit_operator_assignment_stmt(self, stmt: OperatorAssignmentStmt) -> None: + """Operator assignment statement such as x += 1""" + self.builder.disallow_class_assignments([stmt.lvalue], stmt.line) + target = self.builder.get_assignment_target(stmt.lvalue) + target_value = self.builder.read(target, stmt.line) + rreg = self.builder.accept(stmt.rvalue) + # the Python parser strips the '=' from operator assignment statements, so re-add it + op = stmt.op + '=' + res = self.builder.binary_op(target_value, rreg, op, stmt.line) + # usually operator assignments are done in-place + # but when target doesn't support that we need to manually assign + self.builder.assign(target, res, res.line) + + def visit_if_stmt(self, stmt: IfStmt) -> None: + if_body, next = BasicBlock(), BasicBlock() + else_body = BasicBlock() if stmt.else_body else next + + # If statements are normalized + assert len(stmt.expr) == 1 + + self.builder.process_conditional(stmt.expr[0], if_body, else_body) + self.builder.activate_block(if_body) + self.builder.accept(stmt.body[0]) + self.builder.goto(next) + if stmt.else_body: + self.builder.activate_block(else_body) + self.builder.accept(stmt.else_body) + self.builder.goto(next) + self.builder.activate_block(next) + + def visit_while_stmt(self, s: WhileStmt) -> None: + body, next, top, else_block = BasicBlock(), BasicBlock(), BasicBlock(), BasicBlock() + normal_loop_exit = else_block if s.else_body is not None else next + + self.builder.push_loop_stack(top, next) + + # Split block so that we get a handle to the top of the loop. + self.builder.goto_and_activate(top) + self.builder.process_conditional(s.expr, body, normal_loop_exit) + + self.builder.activate_block(body) + self.builder.accept(s.body) + # Add branch to the top at the end of the body. + self.builder.goto(top) + + self.builder.pop_loop_stack() + + if s.else_body is not None: + self.builder.activate_block(else_block) + self.builder.accept(s.else_body) + self.builder.goto(next) + + self.builder.activate_block(next) + + def visit_for_stmt(self, s: ForStmt) -> None: + def body() -> None: + self.builder.accept(s.body) + + def else_block() -> None: + assert s.else_body is not None + self.builder.accept(s.else_body) + + self.builder.for_loop_helper(s.index, s.expr, body, + else_block if s.else_body else None, s.line) + + def visit_break_stmt(self, node: BreakStmt) -> None: + self.builder.nonlocal_control[-1].gen_break(self.builder, node.line) + + def visit_continue_stmt(self, node: ContinueStmt) -> None: + self.builder.nonlocal_control[-1].gen_continue(self.builder, node.line) + + def visit_raise_stmt(self, s: RaiseStmt) -> None: + if s.expr is None: + self.builder.primitive_op(reraise_exception_op, [], NO_TRACEBACK_LINE_NO) + self.builder.add(Unreachable()) + return + + exc = self.builder.accept(s.expr) + self.builder.primitive_op(raise_exception_op, [exc], s.line) + self.builder.add(Unreachable()) + + def visit_try_except(self, + body: GenFunc, + handlers: Sequence[ + Tuple[Optional[Expression], Optional[Expression], GenFunc]], + else_body: Optional[GenFunc], + line: int) -> None: + """Generalized try/except/else handling that takes functions to gen the bodies. + + The point of this is to also be able to support with.""" + assert handlers, "try needs except" + + except_entry, exit_block, cleanup_block = BasicBlock(), BasicBlock(), BasicBlock() + double_except_block = BasicBlock() + # If there is an else block, jump there after the try, otherwise just leave + else_block = BasicBlock() if else_body else exit_block + + # Compile the try block with an error handler + self.builder.builder.push_error_handler(except_entry) + self.builder.goto_and_activate(BasicBlock()) + body() + self.builder.goto(else_block) + self.builder.builder.pop_error_handler() + + # The error handler catches the error and then checks it + # against the except clauses. We compile the error handler + # itself with an error handler so that it can properly restore + # the *old* exc_info if an exception occurs. + # The exception chaining will be done automatically when the + # exception is raised, based on the exception in exc_info. + self.builder.builder.push_error_handler(double_except_block) + self.builder.activate_block(except_entry) + old_exc = self.builder.maybe_spill(self.builder.primitive_op(error_catch_op, [], line)) + # Compile the except blocks with the nonlocal control flow overridden to clear exc_info + self.builder.nonlocal_control.append( + ExceptNonlocalControl(self.builder.nonlocal_control[-1], old_exc)) + + # Process the bodies + for type, var, handler_body in handlers: + next_block = None + if type: + next_block, body_block = BasicBlock(), BasicBlock() + matches = self.builder.primitive_op( + exc_matches_op, [self.builder.accept(type)], type.line + ) + self.builder.add(Branch(matches, body_block, next_block, Branch.BOOL_EXPR)) + self.builder.activate_block(body_block) + if var: + target = self.builder.get_assignment_target(var) + self.builder.assign( + target, + self.builder.primitive_op(get_exc_value_op, [], var.line), + var.line + ) + handler_body() + self.builder.goto(cleanup_block) + if next_block: + self.builder.activate_block(next_block) + + # Reraise the exception if needed + if next_block: + self.builder.primitive_op(reraise_exception_op, [], NO_TRACEBACK_LINE_NO) + self.builder.add(Unreachable()) + + self.builder.nonlocal_control.pop() + self.builder.builder.pop_error_handler() + + # Cleanup for if we leave except through normal control flow: + # restore the saved exc_info information and continue propagating + # the exception if it exists. + self.builder.activate_block(cleanup_block) + self.builder.primitive_op(restore_exc_info_op, [self.builder.read(old_exc)], line) + self.builder.goto(exit_block) + + # Cleanup for if we leave except through a raised exception: + # restore the saved exc_info information and continue propagating + # the exception. + self.builder.activate_block(double_except_block) + self.builder.primitive_op(restore_exc_info_op, [self.builder.read(old_exc)], line) + self.builder.primitive_op(keep_propagating_op, [], NO_TRACEBACK_LINE_NO) + self.builder.add(Unreachable()) + + # If present, compile the else body in the obvious way + if else_body: + self.builder.activate_block(else_block) + else_body() + self.builder.goto(exit_block) + + self.builder.activate_block(exit_block) + + def visit_try_except_stmt(self, t: TryStmt) -> None: + def body() -> None: + self.builder.accept(t.body) + + # Work around scoping woes + def make_handler(body: Block) -> GenFunc: + return lambda: self.builder.accept(body) + + handlers = [(type, var, make_handler(body)) for type, var, body in + zip(t.types, t.vars, t.handlers)] + else_body = (lambda: self.builder.accept(t.else_body)) if t.else_body else None + self.visit_try_except(body, handlers, else_body, t.line) + + def try_finally_try(self, err_handler: BasicBlock, return_entry: BasicBlock, + main_entry: BasicBlock, try_body: GenFunc) -> Optional[Register]: + # Compile the try block with an error handler + control = TryFinallyNonlocalControl(return_entry) + self.builder.builder.push_error_handler(err_handler) + + self.builder.nonlocal_control.append(control) + self.builder.goto_and_activate(BasicBlock()) + try_body() + self.builder.goto(main_entry) + self.builder.nonlocal_control.pop() + self.builder.builder.pop_error_handler() + + return control.ret_reg + + def try_finally_entry_blocks(self, + err_handler: BasicBlock, return_entry: BasicBlock, + main_entry: BasicBlock, finally_block: BasicBlock, + ret_reg: Optional[Register]) -> Value: + old_exc = self.builder.alloc_temp(exc_rtuple) + + # Entry block for non-exceptional flow + self.builder.activate_block(main_entry) + if ret_reg: + self.builder.add( + Assign( + ret_reg, + self.builder.add(LoadErrorValue(self.builder.ret_types[-1])) + ) + ) + self.builder.goto(return_entry) + + self.builder.activate_block(return_entry) + self.builder.add(Assign(old_exc, self.builder.add(LoadErrorValue(exc_rtuple)))) + self.builder.goto(finally_block) + + # Entry block for errors + self.builder.activate_block(err_handler) + if ret_reg: + self.builder.add( + Assign( + ret_reg, + self.builder.add(LoadErrorValue(self.builder.ret_types[-1])) + ) + ) + self.builder.add(Assign(old_exc, self.builder.primitive_op(error_catch_op, [], -1))) + self.builder.goto(finally_block) + + return old_exc + + def try_finally_body( + self, finally_block: BasicBlock, finally_body: GenFunc, + ret_reg: Optional[Value], old_exc: Value) -> Tuple[BasicBlock, + 'FinallyNonlocalControl']: + cleanup_block = BasicBlock() + # Compile the finally block with the nonlocal control flow overridden to restore exc_info + self.builder.builder.push_error_handler(cleanup_block) + finally_control = FinallyNonlocalControl( + self.builder.nonlocal_control[-1], ret_reg, old_exc) + self.builder.nonlocal_control.append(finally_control) + self.builder.activate_block(finally_block) + finally_body() + self.builder.nonlocal_control.pop() + + return cleanup_block, finally_control + + def try_finally_resolve_control(self, cleanup_block: BasicBlock, + finally_control: FinallyNonlocalControl, + old_exc: Value, ret_reg: Optional[Value]) -> BasicBlock: + """Resolve the control flow out of a finally block. + + This means returning if there was a return, propagating + exceptions, break/continue (soon), or just continuing on. + """ + reraise, rest = BasicBlock(), BasicBlock() + self.builder.add(Branch(old_exc, rest, reraise, Branch.IS_ERROR)) + + # Reraise the exception if there was one + self.builder.activate_block(reraise) + self.builder.primitive_op(reraise_exception_op, [], NO_TRACEBACK_LINE_NO) + self.builder.add(Unreachable()) + self.builder.builder.pop_error_handler() + + # If there was a return, keep returning + if ret_reg: + self.builder.activate_block(rest) + return_block, rest = BasicBlock(), BasicBlock() + self.builder.add(Branch(ret_reg, rest, return_block, Branch.IS_ERROR)) + + self.builder.activate_block(return_block) + self.builder.nonlocal_control[-1].gen_return(self.builder, ret_reg, -1) + + # TODO: handle break/continue + self.builder.activate_block(rest) + out_block = BasicBlock() + self.builder.goto(out_block) + + # If there was an exception, restore again + self.builder.activate_block(cleanup_block) + finally_control.gen_cleanup(self.builder, -1) + self.builder.primitive_op(keep_propagating_op, [], NO_TRACEBACK_LINE_NO) + self.builder.add(Unreachable()) + + return out_block + + def visit_try_finally_stmt(self, try_body: GenFunc, finally_body: GenFunc) -> None: + """Generalized try/finally handling that takes functions to gen the bodies. + + The point of this is to also be able to support with.""" + # Finally is a big pain, because there are so many ways that + # exits can occur. We emit 10+ basic blocks for every finally! + + err_handler, main_entry, return_entry, finally_block = ( + BasicBlock(), BasicBlock(), BasicBlock(), BasicBlock()) + + # Compile the body of the try + ret_reg = self.try_finally_try( + err_handler, return_entry, main_entry, try_body) + + # Set up the entry blocks for the finally statement + old_exc = self.try_finally_entry_blocks( + err_handler, return_entry, main_entry, finally_block, ret_reg) + + # Compile the body of the finally + cleanup_block, finally_control = self.try_finally_body( + finally_block, finally_body, ret_reg, old_exc) + + # Resolve the control flow out of the finally block + out_block = self.try_finally_resolve_control( + cleanup_block, finally_control, old_exc, ret_reg) + + self.builder.activate_block(out_block) + + def visit_try_stmt(self, t: TryStmt) -> None: + # Our compilation strategy for try/except/else/finally is to + # treat try/except/else and try/finally as separate language + # constructs that we compile separately. When we have a + # try/except/else/finally, we treat the try/except/else as the + # body of a try/finally block. + if t.finally_body: + def visit_try_body() -> None: + if t.handlers: + self.visit_try_except_stmt(t) + else: + self.builder.accept(t.body) + body = t.finally_body + + self.visit_try_finally_stmt(visit_try_body, lambda: self.builder.accept(body)) + else: + self.visit_try_except_stmt(t) + + def get_sys_exc_info(self) -> List[Value]: + exc_info = self.builder.primitive_op(get_exc_info_op, [], -1) + return [self.builder.add(TupleGet(exc_info, i, -1)) for i in range(3)] + + def visit_with(self, expr: Expression, target: Optional[Lvalue], + body: GenFunc, line: int) -> None: + + # This is basically a straight transcription of the Python code in PEP 343. + # I don't actually understand why a bunch of it is the way it is. + # We could probably optimize the case where the manager is compiled by us, + # but that is not our common case at all, so. + mgr_v = self.builder.accept(expr) + typ = self.builder.primitive_op(type_op, [mgr_v], line) + exit_ = self.builder.maybe_spill(self.builder.py_get_attr(typ, '__exit__', line)) + value = self.builder.py_call( + self.builder.py_get_attr(typ, '__enter__', line), [mgr_v], line + ) + mgr = self.builder.maybe_spill(mgr_v) + exc = self.builder.maybe_spill_assignable(self.builder.primitive_op(true_op, [], -1)) + + def try_body() -> None: + if target: + self.builder.assign(self.builder.get_assignment_target(target), value, line) + body() + + def except_body() -> None: + self.builder.assign(exc, self.builder.primitive_op(false_op, [], -1), line) + out_block, reraise_block = BasicBlock(), BasicBlock() + self.builder.add_bool_branch( + self.builder.py_call(self.builder.read(exit_), + [self.builder.read(mgr)] + self.get_sys_exc_info(), line), + out_block, + reraise_block + ) + self.builder.activate_block(reraise_block) + self.builder.primitive_op(reraise_exception_op, [], NO_TRACEBACK_LINE_NO) + self.builder.add(Unreachable()) + self.builder.activate_block(out_block) + + def finally_body() -> None: + out_block, exit_block = BasicBlock(), BasicBlock() + self.builder.add( + Branch(self.builder.read(exc), exit_block, out_block, Branch.BOOL_EXPR) + ) + self.builder.activate_block(exit_block) + none = self.builder.none_object() + self.builder.py_call( + self.builder.read(exit_), [self.builder.read(mgr), none, none, none], line + ) + self.builder.goto_and_activate(out_block) + + self.visit_try_finally_stmt( + lambda: self.visit_try_except(try_body, [(None, None, except_body)], None, line), + finally_body) + + def visit_with_stmt(self, o: WithStmt) -> None: + # Generate separate logic for each expr in it, left to right + def generate(i: int) -> None: + if i >= len(o.expr): + self.builder.accept(o.body) + else: + self.visit_with(o.expr[i], o.target[i], lambda: generate(i + 1), o.line) + + generate(0) + + def visit_assert_stmt(self, a: AssertStmt) -> None: + if self.builder.options.strip_asserts: + return + cond = self.builder.accept(a.expr) + ok_block, error_block = BasicBlock(), BasicBlock() + self.builder.add_bool_branch(cond, ok_block, error_block) + self.builder.activate_block(error_block) + if a.msg is None: + # Special case (for simpler generated code) + self.builder.add(RaiseStandardError(RaiseStandardError.ASSERTION_ERROR, None, a.line)) + elif isinstance(a.msg, StrExpr): + # Another special case + self.builder.add(RaiseStandardError(RaiseStandardError.ASSERTION_ERROR, a.msg.value, + a.line)) + else: + # The general case -- explicitly construct an exception instance + message = self.builder.accept(a.msg) + exc_type = self.builder.load_module_attr_by_fullname('builtins.AssertionError', a.line) + exc = self.builder.py_call(exc_type, [message], a.line) + self.builder.primitive_op(raise_exception_op, [exc], a.line) + self.builder.add(Unreachable()) + self.builder.activate_block(ok_block) + + def visit_del_stmt(self, o: DelStmt) -> None: + self.visit_del_item(self.builder.get_assignment_target(o.expr), o.line) + + def visit_del_item(self, target: AssignmentTarget, line: int) -> None: + if isinstance(target, AssignmentTargetIndex): + self.builder.gen_method_call( + target.base, + '__delitem__', + [target.index], + result_type=None, + line=line + ) + elif isinstance(target, AssignmentTargetAttr): + key = self.builder.load_static_unicode(target.attr) + self.builder.add(PrimitiveOp([target.obj, key], py_delattr_op, line)) + elif isinstance(target, AssignmentTargetRegister): + # Delete a local by assigning an error value to it, which will + # prompt the insertion of uninit checks. + self.builder.add(Assign(target.register, + self.builder.add(LoadErrorValue(target.type, undefines=True)))) + elif isinstance(target, AssignmentTargetTuple): + for subtarget in target.items: + self.visit_del_item(subtarget, line) From d9e209f625f4359deafddead70009433f5dbbc80 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Sat, 22 Feb 2020 18:09:21 +0000 Subject: [PATCH 107/117] [mypyc] Refactor: remove cyclic deps by extracting genops visitor (#8430) Add a simple visitor module that doesn't contain any implementation and just dispatches to various transform implementations. The main benefit is that the implementations don't need to depend on the visitor (and thus also don't need to depend on the other implementations) and that `IRBuilder` doesn't need to depend on the implementations. Also extract the driver function to another module, as this would otherwise cause an import cycle. Some cyclic dependencies remain and wouldn't be hard to get rid of. I wanted to keep this PR relatively simple so I didn't fix them yet. Work on mypyc/mypyc#714. --- mypyc/emitmodule.py | 15 +- mypyc/genclass.py | 15 +- mypyc/genexpr.py | 7 +- mypyc/genfunc.py | 9 +- mypyc/genops.py | 339 +++--------------------------------- mypyc/genopsmain.py | 91 ++++++++++ mypyc/genopsvisitor.py | 285 ++++++++++++++++++++++++++++++ mypyc/genstatement.py | 10 +- mypyc/test/test_emitfunc.py | 2 +- mypyc/test/testutil.py | 7 +- 10 files changed, 428 insertions(+), 352 deletions(-) create mode 100644 mypyc/genopsmain.py create mode 100644 mypyc/genopsvisitor.py diff --git a/mypyc/emitmodule.py b/mypyc/emitmodule.py index cb7cd82d9f2f..39b786fccfe7 100644 --- a/mypyc/emitmodule.py +++ b/mypyc/emitmodule.py @@ -19,8 +19,9 @@ from mypy.fscache import FileSystemCache from mypy.util import hash_digest -from mypyc import genops +from mypyc import genopsmain from mypyc.genopsprepare import load_type_map +from mypyc.genopsmapper import Mapper from mypyc.common import ( PREFIX, TOP_LEVEL_NAME, INT_PREFIX, MODULE_PREFIX, shared_lib_name, ) @@ -178,7 +179,7 @@ def parse_and_typecheck( def compile_scc_to_ir( scc: List[MypyFile], result: BuildResult, - mapper: genops.Mapper, + mapper: Mapper, compiler_options: CompilerOptions, errors: Errors, ) -> ModuleIRs: @@ -201,7 +202,7 @@ def compile_scc_to_ir( print("Compiling {}".format(", ".join(x.name for x in scc))) # Generate basic IR, with missing exception and refcount handling. - modules = genops.build_ir( + modules = genopsmain.build_ir( scc, result.graph, result.types, mapper, compiler_options, errors ) if errors.num_errors > 0: @@ -225,7 +226,7 @@ def compile_scc_to_ir( def compile_modules_to_ir( result: BuildResult, - mapper: genops.Mapper, + mapper: Mapper, compiler_options: CompilerOptions, errors: Errors, ) -> ModuleIRs: @@ -260,7 +261,7 @@ def compile_ir_to_c( groups: Groups, modules: ModuleIRs, result: BuildResult, - mapper: genops.Mapper, + mapper: Mapper, compiler_options: CompilerOptions, ) -> Dict[Optional[str], List[Tuple[str, str]]]: """Compile a collection of ModuleIRs to C source text. @@ -358,7 +359,7 @@ def write_cache( def load_scc_from_cache( scc: List[MypyFile], result: BuildResult, - mapper: genops.Mapper, + mapper: Mapper, ctx: DeserMaps, ) -> ModuleIRs: """Load IR for an SCC of modules from the cache. @@ -401,7 +402,7 @@ def compile_modules_to_c( """ # Construct a map from modules to what group they belong to group_map = {source.module: lib_name for group, lib_name in groups for source in group} - mapper = genops.Mapper(group_map) + mapper = Mapper(group_map) modules = compile_modules_to_ir(result, mapper, compiler_options, errors) ctext = compile_ir_to_c(groups, modules, result, mapper, compiler_options) diff --git a/mypyc/genclass.py b/mypyc/genclass.py index 009c1009fa0f..76fc4dd4a607 100644 --- a/mypyc/genclass.py +++ b/mypyc/genclass.py @@ -1,5 +1,5 @@ from typing import List, Optional, Union -from typing_extensions import overload, TYPE_CHECKING +from typing_extensions import overload from mypy.nodes import ( ClassDef, FuncDef, OverloadedFuncDef, PassStmt, AssignmentStmt, NameExpr, StrExpr, @@ -23,14 +23,13 @@ from mypyc.genopsutil import ( is_dataclass_decorator, get_func_def, is_dataclass, is_constant, add_self_to_env ) +from mypyc.genfunc import BuildFuncIR from mypyc.common import SELF_NAME - -if TYPE_CHECKING: - from mypyc.genops import IRBuilder +from mypyc.genops import IRBuilder class BuildClassIR: - def __init__(self, builder: 'IRBuilder') -> None: + def __init__(self, builder: IRBuilder) -> None: self.builder = builder self.mapper = builder.mapper self.module_name = builder.module_name @@ -82,7 +81,7 @@ def visit_class_def(self, cdef: ClassDef) -> None: stmt.line) for item in stmt.items: with self.builder.catch_errors(stmt.line): - self.builder.visit_method(cdef, non_ext, get_func_def(item)) + BuildFuncIR(self.builder).visit_method(cdef, non_ext, get_func_def(item)) elif isinstance(stmt, (FuncDef, Decorator, OverloadedFuncDef)): # Ignore plugin generated methods (since they have no # bodies to compile and will need to have the bodies @@ -90,7 +89,7 @@ def visit_class_def(self, cdef: ClassDef) -> None: if cdef.info.names[stmt.name].plugin_generated: continue with self.builder.catch_errors(stmt.line): - self.builder.visit_method(cdef, non_ext, get_func_def(stmt)) + BuildFuncIR(self.builder).visit_method(cdef, non_ext, get_func_def(stmt)) elif isinstance(stmt, PassStmt): continue elif isinstance(stmt, AssignmentStmt): @@ -417,7 +416,7 @@ def load_decorated_class(self, cdef: ClassDef, type_obj: Value) -> Value: decorators = cdef.decorators dec_class = type_obj for d in reversed(decorators): - decorator = d.accept(self.builder) + decorator = d.accept(self.builder.visitor) assert isinstance(decorator, Value) dec_class = self.builder.py_call(decorator, [dec_class], dec_class.line) return dec_class diff --git a/mypyc/genexpr.py b/mypyc/genexpr.py index 3f827556c9ac..6f1302b4b667 100644 --- a/mypyc/genexpr.py +++ b/mypyc/genexpr.py @@ -4,7 +4,6 @@ """ from typing import List, Optional, Union -from typing_extensions import TYPE_CHECKING from mypy.nodes import ( Expression, NameExpr, MemberExpr, SuperExpr, CallExpr, UnaryExpr, OpExpr, IndexExpr, @@ -25,13 +24,11 @@ from mypyc.ops_dict import new_dict_op, dict_set_item_op from mypyc.ops_set import new_set_op, set_add_op, set_update_op from mypyc.specialize import specializers - -if TYPE_CHECKING: - from mypyc.genops import IRBuilder +from mypyc.genops import IRBuilder class BuildExpressionIR: - def __init__(self, builder: 'IRBuilder') -> None: + def __init__(self, builder: IRBuilder) -> None: self.builder = builder # Name and attribute references diff --git a/mypyc/genfunc.py b/mypyc/genfunc.py index bfb629ebfbef..3ed76382a0aa 100644 --- a/mypyc/genfunc.py +++ b/mypyc/genfunc.py @@ -4,7 +4,6 @@ """ from typing import Optional, List, Tuple, Union -from typing_extensions import TYPE_CHECKING from mypy.nodes import ( ClassDef, FuncDef, OverloadedFuncDef, Decorator, Var, YieldFromExpr, AwaitExpr, YieldExpr, @@ -31,13 +30,11 @@ from mypyc.genopsutil import concrete_arg_kind, is_constant, add_self_to_env from mypyc.genopscontext import FuncInfo, GeneratorClass, ImplicitClass from mypyc.genstatement import BuildStatementIR - -if TYPE_CHECKING: - from mypyc.genops import IRBuilder +from mypyc.genops import IRBuilder class BuildFuncIR: - def __init__(self, builder: 'IRBuilder') -> None: + def __init__(self, builder: IRBuilder) -> None: self.builder = builder self.module_name = builder.module_name self.functions = builder.functions @@ -878,7 +875,7 @@ def load_decorated_func(self, fdef: FuncDef, orig_func_reg: Value) -> Value: decorators = self.builder.fdefs_to_decorators[fdef] func_reg = orig_func_reg for d in reversed(decorators): - decorator = d.accept(self.builder) + decorator = d.accept(self.builder.visitor) assert isinstance(decorator, Value) func_reg = self.builder.py_call(decorator, [func_reg], func_reg.line) return func_reg diff --git a/mypyc/genops.py b/mypyc/genops.py index 99e9d71d1faa..85c88c2b29c3 100644 --- a/mypyc/genops.py +++ b/mypyc/genops.py @@ -1,48 +1,32 @@ -"""Transform a mypy AST to the IR form (Intermediate Representation). +"""Builder class used to transform a mypy AST to the IR form. -For example, consider a function like this: +The IRBuilder class maintains transformation state and provides access +to various helpers used to implement the transform. - def f(x: int) -> int: - return x * 2 + 1 +The top-level transform control logic is in mypyc.genopsmain. -It would be translated to something that conceptually looks like this: - - r0 = 2 - r1 = 1 - r2 = x * r0 :: int - r3 = r2 + r1 :: int - return r3 - -The IR is implemented in mypyc.ops. +mypyc.genopsvisitor.IRBuilderVisitor is used to dispatch based on mypy +AST node type to code that actually does the bulk of the work. For +example, expressions are transformed in mypyc.genexpr and functions are +transformed in mypyc.genfunc. """ -from typing import ( - TypeVar, Callable, Dict, List, Tuple, Optional, Union, Sequence, Set, Any, cast -) -from typing_extensions import overload, NoReturn +from typing import Callable, Dict, List, Tuple, Optional, Union, Sequence, Set, Any +from typing_extensions import overload from collections import OrderedDict import importlib.util from mypy.build import Graph from mypy.nodes import ( - MypyFile, SymbolNode, Statement, FuncDef, ReturnStmt, AssignmentStmt, OpExpr, - IntExpr, NameExpr, LDEF, Var, IfStmt, UnaryExpr, ComparisonExpr, WhileStmt, CallExpr, - IndexExpr, Block, Expression, ListExpr, ExpressionStmt, MemberExpr, ForStmt, RefExpr, Lvalue, - BreakStmt, ContinueStmt, ConditionalExpr, OperatorAssignmentStmt, TupleExpr, ClassDef, - TypeInfo, Import, ImportFrom, ImportAll, DictExpr, StrExpr, CastExpr, TempNode, - PassStmt, PromoteExpr, AssignmentExpr, AwaitExpr, BackquoteExpr, AssertStmt, BytesExpr, - ComplexExpr, Decorator, DelStmt, DictionaryComprehension, EllipsisExpr, EnumCallExpr, ExecStmt, - FloatExpr, GeneratorExpr, GlobalDecl, LambdaExpr, ListComprehension, SetComprehension, - NamedTupleExpr, NewTypeExpr, NonlocalDecl, OverloadedFuncDef, PrintStmt, RaiseStmt, - RevealExpr, SetExpr, SliceExpr, StarExpr, SuperExpr, TryStmt, TypeAliasExpr, TypeApplication, - TypeVarExpr, TypedDictExpr, UnicodeExpr, WithStmt, YieldFromExpr, YieldExpr, GDEF, ARG_POS, - ARG_NAMED, + MypyFile, SymbolNode, Statement, OpExpr, IntExpr, NameExpr, LDEF, Var, UnaryExpr, + CallExpr, IndexExpr, Expression, MemberExpr, RefExpr, Lvalue, TupleExpr, ClassDef, + TypeInfo, Import, ImportFrom, ImportAll, Decorator, GeneratorExpr, OverloadedFuncDef, + StarExpr, GDEF, ARG_POS, ARG_NAMED ) from mypy.types import ( Type, Instance, TupleType, AnyType, TypeOfAny, UninhabitedType, get_proper_type ) from mypy.visitor import ExpressionVisitor, StatementVisitor -from mypy.state import strict_optional_set from mypy.util import split_target from mypyc.common import TEMP_ATTR_NAME, TOP_LEVEL_NAME @@ -51,7 +35,7 @@ def f(x: int) -> int: BasicBlock, AssignmentTarget, AssignmentTargetRegister, AssignmentTargetIndex, AssignmentTargetAttr, AssignmentTargetTuple, Environment, LoadInt, RType, Value, Register, Op, FuncIR, Assign, Branch, RTuple, Unreachable, - TupleGet, ClassIR, NonExtClassInfo, RInstance, ModuleIR, ModuleIRs, GetAttr, SetAttr, + TupleGet, ClassIR, NonExtClassInfo, RInstance, GetAttr, SetAttr, LoadStatic, InitStatic, INVALID_FUNC_DEF, int_rprimitive, bool_rprimitive, list_rprimitive, is_list_rprimitive, dict_rprimitive, set_rprimitive, str_rprimitive, none_rprimitive, is_none_rprimitive, object_rprimitive, @@ -71,74 +55,23 @@ def f(x: int) -> int: from mypyc.nonlocalcontrol import ( NonlocalControl, BaseNonlocalControl, LoopNonlocalControl, GeneratorNonlocalControl ) -from mypyc.genclass import BuildClassIR -from mypyc.genfunc import BuildFuncIR -from mypyc.genstatement import BuildStatementIR -from mypyc.genexpr import BuildExpressionIR from mypyc.genopscontext import FuncInfo, ImplicitClass from mypyc.genopsmapper import Mapper -from mypyc.genopsvtable import compute_vtable -from mypyc.genopsprepare import build_type_map from mypyc.ir_builder import LowLevelIRBuilder from mypyc.specialize import specialize_function GenFunc = Callable[[], None] -class UnsupportedException(Exception): +class IRVisitor(ExpressionVisitor[Value], StatementVisitor[None]): pass -# The stubs for callable contextmanagers are busted so cast it to the -# right type... -F = TypeVar('F', bound=Callable[..., Any]) -strict_optional_dec = cast(Callable[[F], F], strict_optional_set(True)) - - -@strict_optional_dec # Turn on strict optional for any type manipulations we do -def build_ir(modules: List[MypyFile], - graph: Graph, - types: Dict[Expression, Type], - mapper: 'Mapper', - options: CompilerOptions, - errors: Errors) -> ModuleIRs: - - build_type_map(mapper, modules, graph, types, options, errors) - - result = OrderedDict() # type: ModuleIRs - - # Generate IR for all modules. - class_irs = [] - - for module in modules: - # First pass to determine free symbols. - pbv = PreBuildVisitor() - module.accept(pbv) - - # Second pass. - builder = IRBuilder( - module.fullname, types, graph, errors, mapper, pbv, options - ) - builder.visit_mypy_file(module) - module_ir = ModuleIR( - module.fullname, - list(builder.imports), - builder.functions, - builder.classes, - builder.final_names - ) - result[module.fullname] = module_ir - class_irs.extend(builder.classes) - - # Compute vtables. - for cir in class_irs: - if cir.is_ext_class: - compute_vtable(cir) - - return result +class UnsupportedException(Exception): + pass -class IRBuilder(ExpressionVisitor[Value], StatementVisitor[None]): +class IRBuilder: def __init__(self, current_module: str, types: Dict[Expression, Type], @@ -146,6 +79,7 @@ def __init__(self, errors: Errors, mapper: Mapper, pbv: PreBuildVisitor, + visitor: IRVisitor, options: CompilerOptions) -> None: self.builder = LowLevelIRBuilder(current_module, mapper) self.builders = [self.builder] @@ -174,6 +108,8 @@ def __init__(self, self.nested_fitems = pbv.nested_funcs.keys() self.fdefs_to_decorators = pbv.funcs_to_decorators + self.visitor = visitor + # This list operates similarly to a function call stack for nested functions. Whenever a # function definition begins to be generated, a FuncInfo instance is added to the stack, # and information about that function (e.g. whether it is nested, its environment class to @@ -298,13 +234,6 @@ def visit_mypy_file(self, mypyfile: MypyFile) -> None: traceback_name="") self.functions.append(func_ir) - def visit_method( - self, cdef: ClassDef, non_ext: Optional[NonExtClassInfo], fdef: FuncDef) -> None: - BuildFuncIR(self).visit_method(cdef, non_ext, fdef) - - def visit_class_def(self, cdef: ClassDef) -> None: - BuildClassIR(self).visit_class_def(cdef) - def add_to_non_ext_dict(self, non_ext: NonExtClassInfo, key: str, val: Value, line: int) -> None: # Add an attribute entry into the class dict of a non-extension class. @@ -408,12 +337,6 @@ def maybe_add_implicit_return(self) -> None: else: self.add_implicit_unreachable() - def visit_func_def(self, fdef: FuncDef) -> None: - BuildFuncIR(self).visit_func_def(fdef) - - def visit_overloaded_func_def(self, o: OverloadedFuncDef) -> None: - BuildFuncIR(self).visit_overloaded_func_def(o) - def add_implicit_return(self) -> None: block = self.builder.blocks[-1] if not block.terminated: @@ -1106,150 +1029,6 @@ def loop_contents( handle_loop(loop_params) - def visit_decorator(self, dec: Decorator) -> None: - BuildFuncIR(self).visit_decorator(dec) - - def visit_block(self, block: Block) -> None: - BuildStatementIR(self).visit_block(block) - - # Statements - - def visit_expression_stmt(self, stmt: ExpressionStmt) -> None: - BuildStatementIR(self).visit_expression_stmt(stmt) - - def visit_return_stmt(self, stmt: ReturnStmt) -> None: - BuildStatementIR(self).visit_return_stmt(stmt) - - def visit_assignment_stmt(self, stmt: AssignmentStmt) -> None: - BuildStatementIR(self).visit_assignment_stmt(stmt) - - def visit_operator_assignment_stmt(self, stmt: OperatorAssignmentStmt) -> None: - BuildStatementIR(self).visit_operator_assignment_stmt(stmt) - - def visit_if_stmt(self, stmt: IfStmt) -> None: - BuildStatementIR(self).visit_if_stmt(stmt) - - def visit_while_stmt(self, stmt: WhileStmt) -> None: - BuildStatementIR(self).visit_while_stmt(stmt) - - def visit_for_stmt(self, stmt: ForStmt) -> None: - BuildStatementIR(self).visit_for_stmt(stmt) - - def visit_break_stmt(self, stmt: BreakStmt) -> None: - BuildStatementIR(self).visit_break_stmt(stmt) - - def visit_continue_stmt(self, stmt: ContinueStmt) -> None: - BuildStatementIR(self).visit_continue_stmt(stmt) - - def visit_raise_stmt(self, stmt: RaiseStmt) -> None: - BuildStatementIR(self).visit_raise_stmt(stmt) - - def visit_try_stmt(self, stmt: TryStmt) -> None: - BuildStatementIR(self).visit_try_stmt(stmt) - - def visit_with_stmt(self, stmt: WithStmt) -> None: - BuildStatementIR(self).visit_with_stmt(stmt) - - def visit_pass_stmt(self, stmt: PassStmt) -> None: - pass - - def visit_assert_stmt(self, stmt: AssertStmt) -> None: - BuildStatementIR(self).visit_assert_stmt(stmt) - - def visit_del_stmt(self, stmt: DelStmt) -> None: - BuildStatementIR(self).visit_del_stmt(stmt) - - def visit_global_decl(self, stmt: GlobalDecl) -> None: - # Pure declaration -- no runtime effect - pass - - def visit_nonlocal_decl(self, stmt: NonlocalDecl) -> None: - # Pure declaration -- no runtime effect - pass - - # Expressions - - def visit_name_expr(self, expr: NameExpr) -> Value: - return BuildExpressionIR(self).visit_name_expr(expr) - - def visit_member_expr(self, expr: MemberExpr) -> Value: - return BuildExpressionIR(self).visit_member_expr(expr) - - def visit_super_expr(self, expr: SuperExpr) -> Value: - return BuildExpressionIR(self).visit_super_expr(expr) - - def visit_call_expr(self, expr: CallExpr) -> Value: - return BuildExpressionIR(self).visit_call_expr(expr) - - def visit_unary_expr(self, expr: UnaryExpr) -> Value: - return BuildExpressionIR(self).visit_unary_expr(expr) - - def visit_op_expr(self, expr: OpExpr) -> Value: - return BuildExpressionIR(self).visit_op_expr(expr) - - def visit_index_expr(self, expr: IndexExpr) -> Value: - return BuildExpressionIR(self).visit_index_expr(expr) - - def visit_conditional_expr(self, expr: ConditionalExpr) -> Value: - return BuildExpressionIR(self).visit_conditional_expr(expr) - - def visit_comparison_expr(self, expr: ComparisonExpr) -> Value: - return BuildExpressionIR(self).visit_comparison_expr(expr) - - def visit_int_expr(self, expr: IntExpr) -> Value: - return BuildExpressionIR(self).visit_int_expr(expr) - - def visit_float_expr(self, expr: FloatExpr) -> Value: - return BuildExpressionIR(self).visit_float_expr(expr) - - def visit_complex_expr(self, expr: ComplexExpr) -> Value: - return BuildExpressionIR(self).visit_complex_expr(expr) - - def visit_str_expr(self, expr: StrExpr) -> Value: - return BuildExpressionIR(self).visit_str_expr(expr) - - def visit_bytes_expr(self, expr: BytesExpr) -> Value: - return BuildExpressionIR(self).visit_bytes_expr(expr) - - def visit_ellipsis(self, expr: EllipsisExpr) -> Value: - return BuildExpressionIR(self).visit_ellipsis(expr) - - def visit_list_expr(self, expr: ListExpr) -> Value: - return BuildExpressionIR(self).visit_list_expr(expr) - - def visit_tuple_expr(self, expr: TupleExpr) -> Value: - return BuildExpressionIR(self).visit_tuple_expr(expr) - - def visit_dict_expr(self, expr: DictExpr) -> Value: - return BuildExpressionIR(self).visit_dict_expr(expr) - - def visit_set_expr(self, expr: SetExpr) -> Value: - return BuildExpressionIR(self).visit_set_expr(expr) - - def visit_list_comprehension(self, expr: ListComprehension) -> Value: - return BuildExpressionIR(self).visit_list_comprehension(expr) - - def visit_set_comprehension(self, expr: SetComprehension) -> Value: - return BuildExpressionIR(self).visit_set_comprehension(expr) - - def visit_dictionary_comprehension(self, expr: DictionaryComprehension) -> Value: - return BuildExpressionIR(self).visit_dictionary_comprehension(expr) - - def visit_slice_expr(self, expr: SliceExpr) -> Value: - return BuildExpressionIR(self).visit_slice_expr(expr) - - def visit_generator_expr(self, expr: GeneratorExpr) -> Value: - return BuildExpressionIR(self).visit_generator_expr(expr) - - def visit_lambda_expr(self, expr: LambdaExpr) -> Value: - return BuildFuncIR(self).visit_lambda_expr(expr) - - def visit_yield_expr(self, expr: YieldExpr) -> Value: - return BuildFuncIR(self).visit_yield_expr(expr) - - def visit_yield_from_expr(self, o: YieldFromExpr) -> Value: - return BuildFuncIR(self).visit_yield_from_expr(o) - # Builtin function special cases @specialize_function('builtins.globals') @@ -1434,66 +1213,6 @@ def flatten_classes(self, arg: Union[RefExpr, TupleExpr]) -> Optional[List[Class return None return res - def visit_await_expr(self, o: AwaitExpr) -> Value: - return BuildFuncIR(self).visit_await_expr(o) - - # Unimplemented constructs - def visit_assignment_expr(self, o: AssignmentExpr) -> Value: - self.bail("I Am The Walrus (unimplemented)", o.line) - - # Unimplemented constructs that shouldn't come up because they are py2 only - def visit_backquote_expr(self, o: BackquoteExpr) -> Value: - self.bail("Python 2 features are unsupported", o.line) - - def visit_exec_stmt(self, o: ExecStmt) -> None: - self.bail("Python 2 features are unsupported", o.line) - - def visit_print_stmt(self, o: PrintStmt) -> None: - self.bail("Python 2 features are unsupported", o.line) - - def visit_unicode_expr(self, o: UnicodeExpr) -> Value: - self.bail("Python 2 features are unsupported", o.line) - - # Constructs that shouldn't ever show up - def visit_enum_call_expr(self, o: EnumCallExpr) -> Value: - assert False, "can't compile analysis-only expressions" - - def visit__promote_expr(self, o: PromoteExpr) -> Value: - assert False, "can't compile analysis-only expressions" - - def visit_namedtuple_expr(self, o: NamedTupleExpr) -> Value: - assert False, "can't compile analysis-only expressions" - - def visit_newtype_expr(self, o: NewTypeExpr) -> Value: - assert False, "can't compile analysis-only expressions" - - def visit_temp_node(self, o: TempNode) -> Value: - assert False, "can't compile analysis-only expressions" - - def visit_type_alias_expr(self, o: TypeAliasExpr) -> Value: - assert False, "can't compile analysis-only expressions" - - def visit_type_application(self, o: TypeApplication) -> Value: - assert False, "can't compile analysis-only expressions" - - def visit_type_var_expr(self, o: TypeVarExpr) -> Value: - assert False, "can't compile analysis-only expressions" - - def visit_typeddict_expr(self, o: TypedDictExpr) -> Value: - assert False, "can't compile analysis-only expressions" - - def visit_reveal_expr(self, o: RevealExpr) -> Value: - assert False, "can't compile analysis-only expressions" - - def visit_var(self, o: Var) -> None: - assert False, "can't compile Var; should have been handled already?" - - def visit_cast_expr(self, o: CastExpr) -> Value: - assert False, "CastExpr should have been handled in CallExpr" - - def visit_star_expr(self, o: StarExpr) -> Value: - assert False, "should have been handled in Tuple/List/Set/DictExpr or CallExpr" - # Helpers def enter(self, fn_info: Union[FuncInfo, str] = '') -> None: @@ -1529,7 +1248,7 @@ def accept(self, node: Union[Statement, Expression]) -> Optional[Value]: with self.catch_errors(node.line): if isinstance(node, Expression): try: - res = node.accept(self) + res = node.accept(self.visitor) res = self.coerce(res, self.node_type(node), node.line) # If we hit an error during compilation, we want to # keep trying, so we can produce more error @@ -1540,7 +1259,7 @@ def accept(self, node: Union[Statement, Expression]) -> Optional[Value]: return res else: try: - node.accept(self) + node.accept(self.visitor) except UnsupportedException: pass return None @@ -1619,13 +1338,3 @@ def warning(self, msg: str, line: int) -> None: def error(self, msg: str, line: int) -> None: self.errors.error(msg, self.module_path, line) - - def bail(self, msg: str, line: int) -> 'NoReturn': - """Reports an error and aborts compilation up until the last accept() call - - (accept() catches the UnsupportedException and keeps on - processing. This allows errors to be non-blocking without always - needing to write handling for them. - """ - self.error(msg, line) - raise UnsupportedException() diff --git a/mypyc/genopsmain.py b/mypyc/genopsmain.py new file mode 100644 index 000000000000..2a59e1400780 --- /dev/null +++ b/mypyc/genopsmain.py @@ -0,0 +1,91 @@ +"""Transform a mypy AST to the IR form (Intermediate Representation). + +For example, consider a function like this: + + def f(x: int) -> int: + return x * 2 + 1 + +It would be translated to something that conceptually looks like this: + + r0 = 2 + r1 = 1 + r2 = x * r0 :: int + r3 = r2 + r1 :: int + return r3 + +The IR is implemented in mypyc.ops. + +For the core of the implementation, look at build_ir() below, +mypyc.genops, and mypyc.genopsvisitor. +""" + +from collections import OrderedDict +from typing import List, Dict, Callable, Any, TypeVar, cast + +from mypy.nodes import MypyFile, Expression +from mypy.types import Type +from mypy.state import strict_optional_set +from mypy.build import Graph + +from mypyc.errors import Errors +from mypyc.options import CompilerOptions +from mypyc.prebuildvisitor import PreBuildVisitor +from mypyc.genopsvtable import compute_vtable +from mypyc.genopsprepare import build_type_map +from mypyc.genops import IRBuilder +from mypyc.genopsvisitor import IRBuilderVisitor +from mypyc.ops import ModuleIR, ModuleIRs +from mypyc.genopsmapper import Mapper + + +# The stubs for callable contextmanagers are busted so cast it to the +# right type... +F = TypeVar('F', bound=Callable[..., Any]) +strict_optional_dec = cast(Callable[[F], F], strict_optional_set(True)) + + +@strict_optional_dec # Turn on strict optional for any type manipulations we do +def build_ir(modules: List[MypyFile], + graph: Graph, + types: Dict[Expression, Type], + mapper: 'Mapper', + options: CompilerOptions, + errors: Errors) -> ModuleIRs: + + build_type_map(mapper, modules, graph, types, options, errors) + + result = OrderedDict() # type: ModuleIRs + + # Generate IR for all modules. + class_irs = [] + + for module in modules: + # First pass to determine free symbols. + pbv = PreBuildVisitor() + module.accept(pbv) + + # Construct and configure builder objects (cyclic runtime dependency). + visitor = IRBuilderVisitor() + builder = IRBuilder( + module.fullname, types, graph, errors, mapper, pbv, visitor, options + ) + visitor.builder = builder + + # Second pass does the bulk of the work. + builder.visit_mypy_file(module) + module_ir = ModuleIR( + module.fullname, + list(builder.imports), + builder.functions, + builder.classes, + builder.final_names + ) + result[module.fullname] = module_ir + class_irs.extend(builder.classes) + + # Compute vtables. + for cir in class_irs: + if cir.is_ext_class: + compute_vtable(cir) + + return result diff --git a/mypyc/genopsvisitor.py b/mypyc/genopsvisitor.py new file mode 100644 index 000000000000..275c7c40ebb0 --- /dev/null +++ b/mypyc/genopsvisitor.py @@ -0,0 +1,285 @@ +"""Dispatcher used when transforming a mypy AST to the IR form. + +mypyc.genops and mypyc.genopsmain are closely related. +""" + +from typing_extensions import NoReturn + +from mypy.nodes import ( + MypyFile, FuncDef, ReturnStmt, AssignmentStmt, OpExpr, + IntExpr, NameExpr, Var, IfStmt, UnaryExpr, ComparisonExpr, WhileStmt, CallExpr, + IndexExpr, Block, ListExpr, ExpressionStmt, MemberExpr, ForStmt, + BreakStmt, ContinueStmt, ConditionalExpr, OperatorAssignmentStmt, TupleExpr, ClassDef, + Import, ImportFrom, ImportAll, DictExpr, StrExpr, CastExpr, TempNode, + PassStmt, PromoteExpr, AssignmentExpr, AwaitExpr, BackquoteExpr, AssertStmt, BytesExpr, + ComplexExpr, Decorator, DelStmt, DictionaryComprehension, EllipsisExpr, EnumCallExpr, ExecStmt, + FloatExpr, GeneratorExpr, GlobalDecl, LambdaExpr, ListComprehension, SetComprehension, + NamedTupleExpr, NewTypeExpr, NonlocalDecl, OverloadedFuncDef, PrintStmt, RaiseStmt, + RevealExpr, SetExpr, SliceExpr, StarExpr, SuperExpr, TryStmt, TypeAliasExpr, TypeApplication, + TypeVarExpr, TypedDictExpr, UnicodeExpr, WithStmt, YieldFromExpr, YieldExpr +) + +from mypyc.ops import Value +from mypyc.genops import IRVisitor, IRBuilder, UnsupportedException +from mypyc.genclass import BuildClassIR +from mypyc.genfunc import BuildFuncIR +from mypyc.genstatement import BuildStatementIR +from mypyc.genexpr import BuildExpressionIR + + +class IRBuilderVisitor(IRVisitor): + """Mypy node visitor that dispatches to node transform implementations. + + This class should have no non-trivial logic. + + This visitor is separated from the rest of code to improve modularity and + to avoid import cycles. + + This is based on the visitor pattern + (https://en.wikipedia.org/wiki/Visitor_pattern). + """ + + # This gets passed to all the implementations and contains all the + # state and many helpers. The attribute is initialized outside + # this class since this class and IRBuilder form a reference loop. + builder = None # type: IRBuilder + + def visit_mypy_file(self, mypyfile: MypyFile) -> None: + self.builder.visit_mypy_file(mypyfile) + + def visit_class_def(self, cdef: ClassDef) -> None: + BuildClassIR(self.builder).visit_class_def(cdef) + + def visit_import(self, node: Import) -> None: + self.builder.visit_import(node) + + def visit_import_from(self, node: ImportFrom) -> None: + self.builder.visit_import_from(node) + + def visit_import_all(self, node: ImportAll) -> None: + self.builder.visit_import_all(node) + + def visit_func_def(self, fdef: FuncDef) -> None: + BuildFuncIR(self.builder).visit_func_def(fdef) + + def visit_overloaded_func_def(self, o: OverloadedFuncDef) -> None: + BuildFuncIR(self.builder).visit_overloaded_func_def(o) + + def visit_decorator(self, dec: Decorator) -> None: + BuildFuncIR(self.builder).visit_decorator(dec) + + def visit_block(self, block: Block) -> None: + BuildStatementIR(self.builder).visit_block(block) + + # Statements + + def visit_expression_stmt(self, stmt: ExpressionStmt) -> None: + BuildStatementIR(self.builder).visit_expression_stmt(stmt) + + def visit_return_stmt(self, stmt: ReturnStmt) -> None: + BuildStatementIR(self.builder).visit_return_stmt(stmt) + + def visit_assignment_stmt(self, stmt: AssignmentStmt) -> None: + BuildStatementIR(self.builder).visit_assignment_stmt(stmt) + + def visit_operator_assignment_stmt(self, stmt: OperatorAssignmentStmt) -> None: + BuildStatementIR(self.builder).visit_operator_assignment_stmt(stmt) + + def visit_if_stmt(self, stmt: IfStmt) -> None: + BuildStatementIR(self.builder).visit_if_stmt(stmt) + + def visit_while_stmt(self, stmt: WhileStmt) -> None: + BuildStatementIR(self.builder).visit_while_stmt(stmt) + + def visit_for_stmt(self, stmt: ForStmt) -> None: + BuildStatementIR(self.builder).visit_for_stmt(stmt) + + def visit_break_stmt(self, stmt: BreakStmt) -> None: + BuildStatementIR(self.builder).visit_break_stmt(stmt) + + def visit_continue_stmt(self, stmt: ContinueStmt) -> None: + BuildStatementIR(self.builder).visit_continue_stmt(stmt) + + def visit_raise_stmt(self, stmt: RaiseStmt) -> None: + BuildStatementIR(self.builder).visit_raise_stmt(stmt) + + def visit_try_stmt(self, stmt: TryStmt) -> None: + BuildStatementIR(self.builder).visit_try_stmt(stmt) + + def visit_with_stmt(self, stmt: WithStmt) -> None: + BuildStatementIR(self.builder).visit_with_stmt(stmt) + + def visit_pass_stmt(self, stmt: PassStmt) -> None: + pass + + def visit_assert_stmt(self, stmt: AssertStmt) -> None: + BuildStatementIR(self.builder).visit_assert_stmt(stmt) + + def visit_del_stmt(self, stmt: DelStmt) -> None: + BuildStatementIR(self.builder).visit_del_stmt(stmt) + + def visit_global_decl(self, stmt: GlobalDecl) -> None: + # Pure declaration -- no runtime effect + pass + + def visit_nonlocal_decl(self, stmt: NonlocalDecl) -> None: + # Pure declaration -- no runtime effect + pass + + # Expressions + + def visit_name_expr(self, expr: NameExpr) -> Value: + return BuildExpressionIR(self.builder).visit_name_expr(expr) + + def visit_member_expr(self, expr: MemberExpr) -> Value: + return BuildExpressionIR(self.builder).visit_member_expr(expr) + + def visit_super_expr(self, expr: SuperExpr) -> Value: + return BuildExpressionIR(self.builder).visit_super_expr(expr) + + def visit_call_expr(self, expr: CallExpr) -> Value: + return BuildExpressionIR(self.builder).visit_call_expr(expr) + + def visit_unary_expr(self, expr: UnaryExpr) -> Value: + return BuildExpressionIR(self.builder).visit_unary_expr(expr) + + def visit_op_expr(self, expr: OpExpr) -> Value: + return BuildExpressionIR(self.builder).visit_op_expr(expr) + + def visit_index_expr(self, expr: IndexExpr) -> Value: + return BuildExpressionIR(self.builder).visit_index_expr(expr) + + def visit_conditional_expr(self, expr: ConditionalExpr) -> Value: + return BuildExpressionIR(self.builder).visit_conditional_expr(expr) + + def visit_comparison_expr(self, expr: ComparisonExpr) -> Value: + return BuildExpressionIR(self.builder).visit_comparison_expr(expr) + + def visit_int_expr(self, expr: IntExpr) -> Value: + return BuildExpressionIR(self.builder).visit_int_expr(expr) + + def visit_float_expr(self, expr: FloatExpr) -> Value: + return BuildExpressionIR(self.builder).visit_float_expr(expr) + + def visit_complex_expr(self, expr: ComplexExpr) -> Value: + return BuildExpressionIR(self.builder).visit_complex_expr(expr) + + def visit_str_expr(self, expr: StrExpr) -> Value: + return BuildExpressionIR(self.builder).visit_str_expr(expr) + + def visit_bytes_expr(self, expr: BytesExpr) -> Value: + return BuildExpressionIR(self.builder).visit_bytes_expr(expr) + + def visit_ellipsis(self, expr: EllipsisExpr) -> Value: + return BuildExpressionIR(self.builder).visit_ellipsis(expr) + + def visit_list_expr(self, expr: ListExpr) -> Value: + return BuildExpressionIR(self.builder).visit_list_expr(expr) + + def visit_tuple_expr(self, expr: TupleExpr) -> Value: + return BuildExpressionIR(self.builder).visit_tuple_expr(expr) + + def visit_dict_expr(self, expr: DictExpr) -> Value: + return BuildExpressionIR(self.builder).visit_dict_expr(expr) + + def visit_set_expr(self, expr: SetExpr) -> Value: + return BuildExpressionIR(self.builder).visit_set_expr(expr) + + def visit_list_comprehension(self, expr: ListComprehension) -> Value: + return BuildExpressionIR(self.builder).visit_list_comprehension(expr) + + def visit_set_comprehension(self, expr: SetComprehension) -> Value: + return BuildExpressionIR(self.builder).visit_set_comprehension(expr) + + def visit_dictionary_comprehension(self, expr: DictionaryComprehension) -> Value: + return BuildExpressionIR(self.builder).visit_dictionary_comprehension(expr) + + def visit_slice_expr(self, expr: SliceExpr) -> Value: + return BuildExpressionIR(self.builder).visit_slice_expr(expr) + + def visit_generator_expr(self, expr: GeneratorExpr) -> Value: + return BuildExpressionIR(self.builder).visit_generator_expr(expr) + + def visit_lambda_expr(self, expr: LambdaExpr) -> Value: + return BuildFuncIR(self.builder).visit_lambda_expr(expr) + + def visit_yield_expr(self, expr: YieldExpr) -> Value: + return BuildFuncIR(self.builder).visit_yield_expr(expr) + + def visit_yield_from_expr(self, o: YieldFromExpr) -> Value: + return BuildFuncIR(self.builder).visit_yield_from_expr(o) + + def visit_await_expr(self, o: AwaitExpr) -> Value: + return BuildFuncIR(self.builder).visit_await_expr(o) + + # Unimplemented constructs + + def visit_assignment_expr(self, o: AssignmentExpr) -> Value: + self.bail("I Am The Walrus (unimplemented)", o.line) + + # Unimplemented constructs that shouldn't come up because they are py2 only + + def visit_backquote_expr(self, o: BackquoteExpr) -> Value: + self.bail("Python 2 features are unsupported", o.line) + + def visit_exec_stmt(self, o: ExecStmt) -> None: + self.bail("Python 2 features are unsupported", o.line) + + def visit_print_stmt(self, o: PrintStmt) -> None: + self.bail("Python 2 features are unsupported", o.line) + + def visit_unicode_expr(self, o: UnicodeExpr) -> Value: + self.bail("Python 2 features are unsupported", o.line) + + # Constructs that shouldn't ever show up + + def visit_enum_call_expr(self, o: EnumCallExpr) -> Value: + assert False, "can't compile analysis-only expressions" + + def visit__promote_expr(self, o: PromoteExpr) -> Value: + assert False, "can't compile analysis-only expressions" + + def visit_namedtuple_expr(self, o: NamedTupleExpr) -> Value: + assert False, "can't compile analysis-only expressions" + + def visit_newtype_expr(self, o: NewTypeExpr) -> Value: + assert False, "can't compile analysis-only expressions" + + def visit_temp_node(self, o: TempNode) -> Value: + assert False, "can't compile analysis-only expressions" + + def visit_type_alias_expr(self, o: TypeAliasExpr) -> Value: + assert False, "can't compile analysis-only expressions" + + def visit_type_application(self, o: TypeApplication) -> Value: + assert False, "can't compile analysis-only expressions" + + def visit_type_var_expr(self, o: TypeVarExpr) -> Value: + assert False, "can't compile analysis-only expressions" + + def visit_typeddict_expr(self, o: TypedDictExpr) -> Value: + assert False, "can't compile analysis-only expressions" + + def visit_reveal_expr(self, o: RevealExpr) -> Value: + assert False, "can't compile analysis-only expressions" + + def visit_var(self, o: Var) -> None: + assert False, "can't compile Var; should have been handled already?" + + def visit_cast_expr(self, o: CastExpr) -> Value: + assert False, "CastExpr should have been handled in CallExpr" + + def visit_star_expr(self, o: StarExpr) -> Value: + assert False, "should have been handled in Tuple/List/Set/DictExpr or CallExpr" + + # Helpers + + def bail(self, msg: str, line: int) -> NoReturn: + """Reports an error and aborts compilation up until the last accept() call + + (accept() catches the UnsupportedException and keeps on + processing. This allows errors to be non-blocking without always + needing to write handling for them. + """ + self.builder.error(msg, line) + raise UnsupportedException() diff --git a/mypyc/genstatement.py b/mypyc/genstatement.py index 108cdbafe457..c74d67d9a991 100644 --- a/mypyc/genstatement.py +++ b/mypyc/genstatement.py @@ -1,5 +1,4 @@ from typing import Optional, List, Tuple, Sequence, Callable -from typing_extensions import TYPE_CHECKING from mypy.nodes import ( Block, ExpressionStmt, ReturnStmt, AssignmentStmt, OperatorAssignmentStmt, IfStmt, WhileStmt, @@ -20,16 +19,13 @@ from mypyc.nonlocalcontrol import ( ExceptNonlocalControl, FinallyNonlocalControl, TryFinallyNonlocalControl ) - - -if TYPE_CHECKING: - from mypyc.genops import IRBuilder +from mypyc.genops import IRBuilder GenFunc = Callable[[], None] class BuildStatementIR: - def __init__(self, builder: 'IRBuilder') -> None: + def __init__(self, builder: IRBuilder) -> None: self.builder = builder def visit_block(self, block: Block) -> None: @@ -50,7 +46,7 @@ def visit_expression_stmt(self, stmt: ExpressionStmt) -> None: # Docstring. Ignore return # ExpressionStmts do not need to be coerced like other Expressions. - stmt.expr.accept(self.builder) + stmt.expr.accept(self.builder.visitor) def visit_return_stmt(self, stmt: ReturnStmt) -> None: if stmt.expr: diff --git a/mypyc/test/test_emitfunc.py b/mypyc/test/test_emitfunc.py index 6a74aec16f44..1ebcb686a9d6 100644 --- a/mypyc/test/test_emitfunc.py +++ b/mypyc/test/test_emitfunc.py @@ -12,7 +12,7 @@ ClassIR, RInstance, SetAttr, Op, Value, int_rprimitive, bool_rprimitive, list_rprimitive, dict_rprimitive, object_rprimitive, FuncSignature, ) -from mypyc.genops import compute_vtable +from mypyc.genopsvtable import compute_vtable from mypyc.emit import Emitter, EmitterContext from mypyc.emitfunc import generate_native_function, FunctionEmitterVisitor from mypyc.ops_primitive import binary_ops diff --git a/mypyc/test/testutil.py b/mypyc/test/testutil.py index 4bc5cbe6a04d..4f6f44b3928e 100644 --- a/mypyc/test/testutil.py +++ b/mypyc/test/testutil.py @@ -14,10 +14,11 @@ from mypy.test.config import test_temp_dir from mypy.test.helpers import assert_string_arrays_equal -from mypyc import genops +from mypyc.genopsmain import build_ir from mypyc.options import CompilerOptions from mypyc.ops import FuncIR from mypyc.errors import Errors +from mypyc.genopsmapper import Mapper from mypyc.test.config import test_data_prefix # The builtins stub used during icode generation test cases. @@ -105,9 +106,9 @@ def build_ir_for_single_file(input_lines: List[str], raise CompileError(result.errors) errors = Errors() - modules = genops.build_ir( + modules = build_ir( [result.files['__main__']], result.graph, result.types, - genops.Mapper({'__main__': None}), + Mapper({'__main__': None}), compiler_options, errors) assert errors.num_errors == 0 From ce24783d1109883ee8aa0e567a911aa4df0562d2 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Sat, 22 Feb 2020 19:24:10 +0000 Subject: [PATCH 108/117] [mypyc] Refactor: extract builtin function specializers from genops (#8431) This also gets rid of a cyclic import. Work on mypyc/mypyc#714. --- mypyc/genops.py | 178 ++-------------------------------------- mypyc/specialize.py | 195 +++++++++++++++++++++++++++++++++++++++++--- 2 files changed, 192 insertions(+), 181 deletions(-) diff --git a/mypyc/genops.py b/mypyc/genops.py index 85c88c2b29c3..611dc648c31c 100644 --- a/mypyc/genops.py +++ b/mypyc/genops.py @@ -24,7 +24,7 @@ StarExpr, GDEF, ARG_POS, ARG_NAMED ) from mypy.types import ( - Type, Instance, TupleType, AnyType, TypeOfAny, UninhabitedType, get_proper_type + Type, Instance, TupleType, UninhabitedType, get_proper_type ) from mypy.visitor import ExpressionVisitor, StatementVisitor from mypy.util import split_target @@ -33,14 +33,12 @@ from mypyc.prebuildvisitor import PreBuildVisitor from mypyc.ops import ( BasicBlock, AssignmentTarget, AssignmentTargetRegister, AssignmentTargetIndex, - AssignmentTargetAttr, AssignmentTargetTuple, Environment, LoadInt, RType, Value, Register, Op, - FuncIR, Assign, Branch, RTuple, Unreachable, - TupleGet, ClassIR, NonExtClassInfo, RInstance, GetAttr, SetAttr, - LoadStatic, InitStatic, INVALID_FUNC_DEF, int_rprimitive, - bool_rprimitive, list_rprimitive, is_list_rprimitive, dict_rprimitive, set_rprimitive, - str_rprimitive, none_rprimitive, is_none_rprimitive, object_rprimitive, - PrimitiveOp, OpDescription, is_object_rprimitive, - FuncSignature, NAMESPACE_MODULE, RaiseStandardError, FuncDecl + AssignmentTargetAttr, AssignmentTargetTuple, Environment, LoadInt, RType, Value, + Register, Op, FuncIR, Assign, Branch, RTuple, Unreachable, TupleGet, ClassIR, + NonExtClassInfo, RInstance, GetAttr, SetAttr, LoadStatic, InitStatic, INVALID_FUNC_DEF, + int_rprimitive, is_list_rprimitive, dict_rprimitive, none_rprimitive, + is_none_rprimitive, object_rprimitive, PrimitiveOp, OpDescription, + is_object_rprimitive, FuncSignature, NAMESPACE_MODULE, RaiseStandardError, FuncDecl ) from mypyc.ops_primitive import func_ops from mypyc.ops_list import list_append_op, list_len_op, new_list_op, to_list, list_pop_last @@ -58,7 +56,6 @@ from mypyc.genopscontext import FuncInfo, ImplicitClass from mypyc.genopsmapper import Mapper from mypyc.ir_builder import LowLevelIRBuilder -from mypyc.specialize import specialize_function GenFunc = Callable[[], None] @@ -1029,167 +1026,6 @@ def loop_contents( handle_loop(loop_params) - # Builtin function special cases - - @specialize_function('builtins.globals') - def translate_globals(self, expr: CallExpr, callee: RefExpr) -> Optional[Value]: - # Special case builtins.globals - if len(expr.args) == 0: - return self.load_globals_dict() - return None - - @specialize_function('builtins.len') - def translate_len( - self, expr: CallExpr, callee: RefExpr) -> Optional[Value]: - # Special case builtins.len - if (len(expr.args) == 1 - and expr.arg_kinds == [ARG_POS]): - expr_rtype = self.node_type(expr.args[0]) - if isinstance(expr_rtype, RTuple): - # len() of fixed-length tuple can be trivially determined statically, - # though we still need to evaluate it. - self.accept(expr.args[0]) - return self.add(LoadInt(len(expr_rtype.types))) - return None - - # Special cases for things that consume iterators where we know we - # can safely compile a generator into a list. - @specialize_function('builtins.tuple') - @specialize_function('builtins.set') - @specialize_function('builtins.dict') - @specialize_function('builtins.sum') - @specialize_function('builtins.min') - @specialize_function('builtins.max') - @specialize_function('builtins.sorted') - @specialize_function('collections.OrderedDict') - @specialize_function('join', str_rprimitive) - @specialize_function('extend', list_rprimitive) - @specialize_function('update', dict_rprimitive) - @specialize_function('update', set_rprimitive) - def translate_safe_generator_call(self, expr: CallExpr, callee: RefExpr) -> Optional[Value]: - if (len(expr.args) > 0 - and expr.arg_kinds[0] == ARG_POS - and isinstance(expr.args[0], GeneratorExpr)): - if isinstance(callee, MemberExpr): - return self.gen_method_call( - self.accept(callee.expr), callee.name, - ([self.translate_list_comprehension(expr.args[0])] - + [self.accept(arg) for arg in expr.args[1:]]), - self.node_type(expr), expr.line, expr.arg_kinds, expr.arg_names) - else: - return self.call_refexpr_with_args( - expr, callee, - ([self.translate_list_comprehension(expr.args[0])] - + [self.accept(arg) for arg in expr.args[1:]])) - return None - - @specialize_function('builtins.any') - def translate_any_call(self, expr: CallExpr, callee: RefExpr) -> Optional[Value]: - if (len(expr.args) == 1 - and expr.arg_kinds == [ARG_POS] - and isinstance(expr.args[0], GeneratorExpr)): - return self.any_all_helper(expr.args[0], false_op, lambda x: x, true_op) - return None - - @specialize_function('builtins.all') - def translate_all_call(self, expr: CallExpr, callee: RefExpr) -> Optional[Value]: - if (len(expr.args) == 1 - and expr.arg_kinds == [ARG_POS] - and isinstance(expr.args[0], GeneratorExpr)): - return self.any_all_helper(expr.args[0], - true_op, - lambda x: self.unary_op(x, 'not', expr.line), - false_op) - return None - - # Special case for 'dataclasses.field' and 'attr.Factory' function calls - # because the results of such calls are typechecked by mypy using the types - # of the arguments to their respective functions, resulting in attempted - # coercions by mypyc that throw a runtime error. - @specialize_function('dataclasses.field') - @specialize_function('attr.Factory') - def translate_dataclasses_field_call(self, expr: CallExpr, callee: RefExpr) -> Optional[Value]: - self.types[expr] = AnyType(TypeOfAny.from_error) - return None - - def any_all_helper(self, - gen: GeneratorExpr, - initial_value_op: OpDescription, - modify: Callable[[Value], Value], - new_value_op: OpDescription) -> Value: - retval = self.alloc_temp(bool_rprimitive) - self.assign(retval, self.primitive_op(initial_value_op, [], -1), -1) - loop_params = list(zip(gen.indices, gen.sequences, gen.condlists)) - true_block, false_block, exit_block = BasicBlock(), BasicBlock(), BasicBlock() - - def gen_inner_stmts() -> None: - comparison = modify(self.accept(gen.left_expr)) - self.add_bool_branch(comparison, true_block, false_block) - self.activate_block(true_block) - self.assign(retval, self.primitive_op(new_value_op, [], -1), -1) - self.goto(exit_block) - self.activate_block(false_block) - - self.comprehension_helper(loop_params, gen_inner_stmts, gen.line) - self.goto_and_activate(exit_block) - - return retval - - # Special case for calling next() on a generator expression, an - # idiom that shows up some in mypy. - # - # For example, next(x for x in l if x.id == 12, None) will - # generate code that searches l for an element where x.id == 12 - # and produce the first such object, or None if no such element - # exists. - @specialize_function('builtins.next') - def translate_next_call(self, expr: CallExpr, callee: RefExpr) -> Optional[Value]: - if not (expr.arg_kinds in ([ARG_POS], [ARG_POS, ARG_POS]) - and isinstance(expr.args[0], GeneratorExpr)): - return None - - gen = expr.args[0] - - retval = self.alloc_temp(self.node_type(expr)) - default_val = None - if len(expr.args) > 1: - default_val = self.accept(expr.args[1]) - - exit_block = BasicBlock() - - def gen_inner_stmts() -> None: - # next takes the first element of the generator, so if - # something gets produced, we are done. - self.assign(retval, self.accept(gen.left_expr), gen.left_expr.line) - self.goto(exit_block) - - loop_params = list(zip(gen.indices, gen.sequences, gen.condlists)) - self.comprehension_helper(loop_params, gen_inner_stmts, gen.line) - - # Now we need the case for when nothing got hit. If there was - # a default value, we produce it, and otherwise we raise - # StopIteration. - if default_val: - self.assign(retval, default_val, gen.left_expr.line) - self.goto(exit_block) - else: - self.add(RaiseStandardError(RaiseStandardError.STOP_ITERATION, None, expr.line)) - self.add(Unreachable()) - - self.activate_block(exit_block) - return retval - - @specialize_function('builtins.isinstance') - def translate_isinstance(self, expr: CallExpr, callee: RefExpr) -> Optional[Value]: - # Special case builtins.isinstance - if (len(expr.args) == 2 - and expr.arg_kinds == [ARG_POS, ARG_POS] - and isinstance(expr.args[1], (RefExpr, TupleExpr))): - irs = self.flatten_classes(expr.args[1]) - if irs is not None: - return self.builder.isinstance_helper(self.accept(expr.args[0]), irs, expr.line) - return None - def flatten_classes(self, arg: Union[RefExpr, TupleExpr]) -> Optional[List[ClassIR]]: """Flatten classes in isinstance(obj, (A, (B, C))). diff --git a/mypyc/specialize.py b/mypyc/specialize.py index d2b796439d46..2867debc4ea8 100644 --- a/mypyc/specialize.py +++ b/mypyc/specialize.py @@ -1,9 +1,9 @@ -"""General infrastructure for special casing calls to builtin functions. +"""Special case IR generation of calls to specific builtin functions. Most special cases should be handled using the data driven "primitive ops" system, but certain operations require special handling that has access to the AST/IR directly and can make decisions/optimizations -based on it. +based on it. These special cases can be implemented here. For example, we use specializers to statically emit the length of a fixed length tuple and to emit optimized code for any()/all() calls with @@ -13,14 +13,16 @@ """ from typing import Callable, Optional, Dict, Tuple -from typing_extensions import TYPE_CHECKING -from mypy.nodes import CallExpr, RefExpr +from mypy.nodes import CallExpr, RefExpr, MemberExpr, TupleExpr, GeneratorExpr, ARG_POS +from mypy.types import AnyType, TypeOfAny -from mypyc.ops import Value, RType - -if TYPE_CHECKING: - from mypyc.genops import IRBuilder # noqa +from mypyc.ops import ( + Value, RType, RTuple, BasicBlock, LoadInt, RaiseStandardError, Unreachable, OpDescription, + str_rprimitive, list_rprimitive, dict_rprimitive, set_rprimitive, bool_rprimitive +) +from mypyc.ops_misc import true_op, false_op +from mypyc.genops import IRBuilder # Specializers are attempted before compiling the arguments to the @@ -30,11 +32,12 @@ # # Specializers take three arguments: the IRBuilder, the CallExpr being # compiled, and the RefExpr that is the left hand side of the call. +Specializer = Callable[['IRBuilder', CallExpr, RefExpr], Optional[Value]] + +# Dictionary containing all configured specializers. # # Specializers can operate on methods as well, and are keyed on the # name and RType in that case. -Specializer = Callable[['IRBuilder', CallExpr, RefExpr], Optional[Value]] - specializers = {} # type: Dict[Tuple[str, Optional[RType]], Specializer] @@ -45,3 +48,175 @@ def wrapper(f: Specializer) -> Specializer: specializers[name, typ] = f return f return wrapper + + +@specialize_function('builtins.globals') +def translate_globals(builder: IRBuilder, expr: CallExpr, callee: RefExpr) -> Optional[Value]: + # Special case builtins.globals + if len(expr.args) == 0: + return builder.load_globals_dict() + return None + + +@specialize_function('builtins.len') +def translate_len( + builder: IRBuilder, expr: CallExpr, callee: RefExpr) -> Optional[Value]: + # Special case builtins.len + if (len(expr.args) == 1 + and expr.arg_kinds == [ARG_POS]): + expr_rtype = builder.node_type(expr.args[0]) + if isinstance(expr_rtype, RTuple): + # len() of fixed-length tuple can be trivially determined statically, + # though we still need to evaluate it. + builder.accept(expr.args[0]) + return builder.add(LoadInt(len(expr_rtype.types))) + return None + + +@specialize_function('builtins.tuple') +@specialize_function('builtins.set') +@specialize_function('builtins.dict') +@specialize_function('builtins.sum') +@specialize_function('builtins.min') +@specialize_function('builtins.max') +@specialize_function('builtins.sorted') +@specialize_function('collections.OrderedDict') +@specialize_function('join', str_rprimitive) +@specialize_function('extend', list_rprimitive) +@specialize_function('update', dict_rprimitive) +@specialize_function('update', set_rprimitive) +def translate_safe_generator_call( + builder: IRBuilder, expr: CallExpr, callee: RefExpr) -> Optional[Value]: + # Special cases for things that consume iterators where we know we + # can safely compile a generator into a list. + if (len(expr.args) > 0 + and expr.arg_kinds[0] == ARG_POS + and isinstance(expr.args[0], GeneratorExpr)): + if isinstance(callee, MemberExpr): + return builder.gen_method_call( + builder.accept(callee.expr), callee.name, + ([builder.translate_list_comprehension(expr.args[0])] + + [builder.accept(arg) for arg in expr.args[1:]]), + builder.node_type(expr), expr.line, expr.arg_kinds, expr.arg_names) + else: + return builder.call_refexpr_with_args( + expr, callee, + ([builder.translate_list_comprehension(expr.args[0])] + + [builder.accept(arg) for arg in expr.args[1:]])) + return None + + +@specialize_function('builtins.any') +def translate_any_call(builder: IRBuilder, expr: CallExpr, callee: RefExpr) -> Optional[Value]: + if (len(expr.args) == 1 + and expr.arg_kinds == [ARG_POS] + and isinstance(expr.args[0], GeneratorExpr)): + return any_all_helper(builder, expr.args[0], false_op, lambda x: x, true_op) + return None + + +@specialize_function('builtins.all') +def translate_all_call(builder: IRBuilder, expr: CallExpr, callee: RefExpr) -> Optional[Value]: + if (len(expr.args) == 1 + and expr.arg_kinds == [ARG_POS] + and isinstance(expr.args[0], GeneratorExpr)): + return any_all_helper( + builder, expr.args[0], + true_op, + lambda x: builder.unary_op(x, 'not', expr.line), + false_op + ) + return None + + +def any_all_helper(builder: IRBuilder, + gen: GeneratorExpr, + initial_value_op: OpDescription, + modify: Callable[[Value], Value], + new_value_op: OpDescription) -> Value: + retval = builder.alloc_temp(bool_rprimitive) + builder.assign(retval, builder.primitive_op(initial_value_op, [], -1), -1) + loop_params = list(zip(gen.indices, gen.sequences, gen.condlists)) + true_block, false_block, exit_block = BasicBlock(), BasicBlock(), BasicBlock() + + def gen_inner_stmts() -> None: + comparison = modify(builder.accept(gen.left_expr)) + builder.add_bool_branch(comparison, true_block, false_block) + builder.activate_block(true_block) + builder.assign(retval, builder.primitive_op(new_value_op, [], -1), -1) + builder.goto(exit_block) + builder.activate_block(false_block) + + builder.comprehension_helper(loop_params, gen_inner_stmts, gen.line) + builder.goto_and_activate(exit_block) + + return retval + + +@specialize_function('dataclasses.field') +@specialize_function('attr.Factory') +def translate_dataclasses_field_call( + builder: IRBuilder, expr: CallExpr, callee: RefExpr) -> Optional[Value]: + # Special case for 'dataclasses.field' and 'attr.Factory' function calls + # because the results of such calls are typechecked by mypy using the types + # of the arguments to their respective functions, resulting in attempted + # coercions by mypyc that throw a runtime error. + builder.types[expr] = AnyType(TypeOfAny.from_error) + return None + + +@specialize_function('builtins.next') +def translate_next_call(builder: IRBuilder, expr: CallExpr, callee: RefExpr) -> Optional[Value]: + # Special case for calling next() on a generator expression, an + # idiom that shows up some in mypy. + # + # For example, next(x for x in l if x.id == 12, None) will + # generate code that searches l for an element where x.id == 12 + # and produce the first such object, or None if no such element + # exists. + if not (expr.arg_kinds in ([ARG_POS], [ARG_POS, ARG_POS]) + and isinstance(expr.args[0], GeneratorExpr)): + return None + + gen = expr.args[0] + + retval = builder.alloc_temp(builder.node_type(expr)) + default_val = None + if len(expr.args) > 1: + default_val = builder.accept(expr.args[1]) + + exit_block = BasicBlock() + + def gen_inner_stmts() -> None: + # next takes the first element of the generator, so if + # something gets produced, we are done. + builder.assign(retval, builder.accept(gen.left_expr), gen.left_expr.line) + builder.goto(exit_block) + + loop_params = list(zip(gen.indices, gen.sequences, gen.condlists)) + builder.comprehension_helper(loop_params, gen_inner_stmts, gen.line) + + # Now we need the case for when nothing got hit. If there was + # a default value, we produce it, and otherwise we raise + # StopIteration. + if default_val: + builder.assign(retval, default_val, gen.left_expr.line) + builder.goto(exit_block) + else: + builder.add(RaiseStandardError(RaiseStandardError.STOP_ITERATION, None, expr.line)) + builder.add(Unreachable()) + + builder.activate_block(exit_block) + return retval + + +@specialize_function('builtins.isinstance') +def translate_isinstance(builder: IRBuilder, expr: CallExpr, callee: RefExpr) -> Optional[Value]: + # Special case builtins.isinstance + if (len(expr.args) == 2 + and expr.arg_kinds == [ARG_POS, ARG_POS] + and isinstance(expr.args[1], (RefExpr, TupleExpr))): + irs = builder.flatten_classes(expr.args[1]) + if irs is not None: + return builder.builder.isinstance_helper(builder.accept(expr.args[0]), irs, expr.line) + return None From 14ac8af8f4b6473541427beabb9c2ce4337eca68 Mon Sep 17 00:00:00 2001 From: Jukka Lehtosalo Date: Sat, 22 Feb 2020 20:18:10 +0000 Subject: [PATCH 109/117] [mypyc] Refactor methods into top-level functions in mypyc.genstatement (#8432) Also experiment with formatting long import statements with one name per line. Work on mypyc/mypyc#714. --- mypyc/genfunc.py | 6 +- mypyc/genopsvisitor.py | 48 +- mypyc/genstatement.py | 1009 +++++++++++++++++++++------------------- 3 files changed, 559 insertions(+), 504 deletions(-) diff --git a/mypyc/genfunc.py b/mypyc/genfunc.py index 3ed76382a0aa..58cca370e364 100644 --- a/mypyc/genfunc.py +++ b/mypyc/genfunc.py @@ -29,7 +29,7 @@ from mypyc.sametype import is_same_method_signature from mypyc.genopsutil import concrete_arg_kind, is_constant, add_self_to_env from mypyc.genopscontext import FuncInfo, GeneratorClass, ImplicitClass -from mypyc.genstatement import BuildStatementIR +from mypyc.genstatement import transform_try_except from mypyc.genops import IRBuilder @@ -852,8 +852,8 @@ def else_body() -> None: self.builder.nonlocal_control[-1].gen_break(self.builder, o.line) self.builder.push_loop_stack(loop_block, done_block) - BuildStatementIR(self.builder).visit_try_except( - try_body, [(None, None, except_body)], else_body, o.line + transform_try_except( + self.builder, try_body, [(None, None, except_body)], else_body, o.line ) self.builder.pop_loop_stack() diff --git a/mypyc/genopsvisitor.py b/mypyc/genopsvisitor.py index 275c7c40ebb0..4735b55350ea 100644 --- a/mypyc/genopsvisitor.py +++ b/mypyc/genopsvisitor.py @@ -23,7 +23,23 @@ from mypyc.genops import IRVisitor, IRBuilder, UnsupportedException from mypyc.genclass import BuildClassIR from mypyc.genfunc import BuildFuncIR -from mypyc.genstatement import BuildStatementIR +from mypyc.genstatement import ( + transform_block, + transform_expression_stmt, + transform_return_stmt, + transform_assignment_stmt, + transform_operator_assignment_stmt, + transform_if_stmt, + transform_while_stmt, + transform_for_stmt, + transform_break_stmt, + transform_continue_stmt, + transform_raise_stmt, + transform_try_stmt, + transform_with_stmt, + transform_assert_stmt, + transform_del_stmt, +) from mypyc.genexpr import BuildExpressionIR @@ -69,54 +85,54 @@ def visit_decorator(self, dec: Decorator) -> None: BuildFuncIR(self.builder).visit_decorator(dec) def visit_block(self, block: Block) -> None: - BuildStatementIR(self.builder).visit_block(block) + transform_block(self.builder, block) # Statements def visit_expression_stmt(self, stmt: ExpressionStmt) -> None: - BuildStatementIR(self.builder).visit_expression_stmt(stmt) + transform_expression_stmt(self.builder, stmt) def visit_return_stmt(self, stmt: ReturnStmt) -> None: - BuildStatementIR(self.builder).visit_return_stmt(stmt) + transform_return_stmt(self.builder, stmt) def visit_assignment_stmt(self, stmt: AssignmentStmt) -> None: - BuildStatementIR(self.builder).visit_assignment_stmt(stmt) + transform_assignment_stmt(self.builder, stmt) def visit_operator_assignment_stmt(self, stmt: OperatorAssignmentStmt) -> None: - BuildStatementIR(self.builder).visit_operator_assignment_stmt(stmt) + transform_operator_assignment_stmt(self.builder, stmt) def visit_if_stmt(self, stmt: IfStmt) -> None: - BuildStatementIR(self.builder).visit_if_stmt(stmt) + transform_if_stmt(self.builder, stmt) def visit_while_stmt(self, stmt: WhileStmt) -> None: - BuildStatementIR(self.builder).visit_while_stmt(stmt) + transform_while_stmt(self.builder, stmt) def visit_for_stmt(self, stmt: ForStmt) -> None: - BuildStatementIR(self.builder).visit_for_stmt(stmt) + transform_for_stmt(self.builder, stmt) def visit_break_stmt(self, stmt: BreakStmt) -> None: - BuildStatementIR(self.builder).visit_break_stmt(stmt) + transform_break_stmt(self.builder, stmt) def visit_continue_stmt(self, stmt: ContinueStmt) -> None: - BuildStatementIR(self.builder).visit_continue_stmt(stmt) + transform_continue_stmt(self.builder, stmt) def visit_raise_stmt(self, stmt: RaiseStmt) -> None: - BuildStatementIR(self.builder).visit_raise_stmt(stmt) + transform_raise_stmt(self.builder, stmt) def visit_try_stmt(self, stmt: TryStmt) -> None: - BuildStatementIR(self.builder).visit_try_stmt(stmt) + transform_try_stmt(self.builder, stmt) def visit_with_stmt(self, stmt: WithStmt) -> None: - BuildStatementIR(self.builder).visit_with_stmt(stmt) + transform_with_stmt(self.builder, stmt) def visit_pass_stmt(self, stmt: PassStmt) -> None: pass def visit_assert_stmt(self, stmt: AssertStmt) -> None: - BuildStatementIR(self.builder).visit_assert_stmt(stmt) + transform_assert_stmt(self.builder, stmt) def visit_del_stmt(self, stmt: DelStmt) -> None: - BuildStatementIR(self.builder).visit_del_stmt(stmt) + transform_del_stmt(self.builder, stmt) def visit_global_decl(self, stmt: GlobalDecl) -> None: # Pure declaration -- no runtime effect diff --git a/mypyc/genstatement.py b/mypyc/genstatement.py index c74d67d9a991..ced764fbbe5d 100644 --- a/mypyc/genstatement.py +++ b/mypyc/genstatement.py @@ -24,499 +24,538 @@ GenFunc = Callable[[], None] -class BuildStatementIR: - def __init__(self, builder: IRBuilder) -> None: - self.builder = builder - - def visit_block(self, block: Block) -> None: - if not block.is_unreachable: - for stmt in block.body: - self.builder.accept(stmt) - # Raise a RuntimeError if we hit a non-empty unreachable block. - # Don't complain about empty unreachable blocks, since mypy inserts - # those after `if MYPY`. - elif block.body: - self.builder.add(RaiseStandardError(RaiseStandardError.RUNTIME_ERROR, - 'Reached allegedly unreachable code!', - block.line)) - self.builder.add(Unreachable()) - - def visit_expression_stmt(self, stmt: ExpressionStmt) -> None: - if isinstance(stmt.expr, StrExpr): - # Docstring. Ignore - return - # ExpressionStmts do not need to be coerced like other Expressions. - stmt.expr.accept(self.builder.visitor) - - def visit_return_stmt(self, stmt: ReturnStmt) -> None: - if stmt.expr: - retval = self.builder.accept(stmt.expr) - else: - retval = self.builder.builder.none() - retval = self.builder.coerce(retval, self.builder.ret_types[-1], stmt.line) - self.builder.nonlocal_control[-1].gen_return(self.builder, retval, stmt.line) - - def visit_assignment_stmt(self, stmt: AssignmentStmt) -> None: - assert len(stmt.lvalues) >= 1 - self.builder.disallow_class_assignments(stmt.lvalues, stmt.line) - lvalue = stmt.lvalues[0] - if stmt.type and isinstance(stmt.rvalue, TempNode): - # This is actually a variable annotation without initializer. Don't generate - # an assignment but we need to call get_assignment_target since it adds a - # name binding as a side effect. - self.builder.get_assignment_target(lvalue, stmt.line) - return - - line = stmt.rvalue.line - rvalue_reg = self.builder.accept(stmt.rvalue) - if self.builder.non_function_scope() and stmt.is_final_def: - self.builder.init_final_static(lvalue, rvalue_reg) - for lvalue in stmt.lvalues: - target = self.builder.get_assignment_target(lvalue) - self.builder.assign(target, rvalue_reg, line) - - def visit_operator_assignment_stmt(self, stmt: OperatorAssignmentStmt) -> None: - """Operator assignment statement such as x += 1""" - self.builder.disallow_class_assignments([stmt.lvalue], stmt.line) - target = self.builder.get_assignment_target(stmt.lvalue) - target_value = self.builder.read(target, stmt.line) - rreg = self.builder.accept(stmt.rvalue) - # the Python parser strips the '=' from operator assignment statements, so re-add it - op = stmt.op + '=' - res = self.builder.binary_op(target_value, rreg, op, stmt.line) - # usually operator assignments are done in-place - # but when target doesn't support that we need to manually assign - self.builder.assign(target, res, res.line) - - def visit_if_stmt(self, stmt: IfStmt) -> None: - if_body, next = BasicBlock(), BasicBlock() - else_body = BasicBlock() if stmt.else_body else next - - # If statements are normalized - assert len(stmt.expr) == 1 - - self.builder.process_conditional(stmt.expr[0], if_body, else_body) - self.builder.activate_block(if_body) - self.builder.accept(stmt.body[0]) - self.builder.goto(next) - if stmt.else_body: - self.builder.activate_block(else_body) - self.builder.accept(stmt.else_body) - self.builder.goto(next) - self.builder.activate_block(next) - - def visit_while_stmt(self, s: WhileStmt) -> None: - body, next, top, else_block = BasicBlock(), BasicBlock(), BasicBlock(), BasicBlock() - normal_loop_exit = else_block if s.else_body is not None else next - - self.builder.push_loop_stack(top, next) - - # Split block so that we get a handle to the top of the loop. - self.builder.goto_and_activate(top) - self.builder.process_conditional(s.expr, body, normal_loop_exit) - - self.builder.activate_block(body) - self.builder.accept(s.body) - # Add branch to the top at the end of the body. - self.builder.goto(top) - - self.builder.pop_loop_stack() - - if s.else_body is not None: - self.builder.activate_block(else_block) - self.builder.accept(s.else_body) - self.builder.goto(next) - - self.builder.activate_block(next) - - def visit_for_stmt(self, s: ForStmt) -> None: - def body() -> None: - self.builder.accept(s.body) - - def else_block() -> None: - assert s.else_body is not None - self.builder.accept(s.else_body) - - self.builder.for_loop_helper(s.index, s.expr, body, - else_block if s.else_body else None, s.line) - - def visit_break_stmt(self, node: BreakStmt) -> None: - self.builder.nonlocal_control[-1].gen_break(self.builder, node.line) - - def visit_continue_stmt(self, node: ContinueStmt) -> None: - self.builder.nonlocal_control[-1].gen_continue(self.builder, node.line) - - def visit_raise_stmt(self, s: RaiseStmt) -> None: - if s.expr is None: - self.builder.primitive_op(reraise_exception_op, [], NO_TRACEBACK_LINE_NO) - self.builder.add(Unreachable()) - return - - exc = self.builder.accept(s.expr) - self.builder.primitive_op(raise_exception_op, [exc], s.line) - self.builder.add(Unreachable()) - - def visit_try_except(self, +def transform_block(builder: IRBuilder, block: Block) -> None: + if not block.is_unreachable: + for stmt in block.body: + builder.accept(stmt) + # Raise a RuntimeError if we hit a non-empty unreachable block. + # Don't complain about empty unreachable blocks, since mypy inserts + # those after `if MYPY`. + elif block.body: + builder.add(RaiseStandardError(RaiseStandardError.RUNTIME_ERROR, + 'Reached allegedly unreachable code!', + block.line)) + builder.add(Unreachable()) + + +def transform_expression_stmt(builder: IRBuilder, stmt: ExpressionStmt) -> None: + if isinstance(stmt.expr, StrExpr): + # Docstring. Ignore + return + # ExpressionStmts do not need to be coerced like other Expressions. + stmt.expr.accept(builder.visitor) + + +def transform_return_stmt(builder: IRBuilder, stmt: ReturnStmt) -> None: + if stmt.expr: + retval = builder.accept(stmt.expr) + else: + retval = builder.builder.none() + retval = builder.coerce(retval, builder.ret_types[-1], stmt.line) + builder.nonlocal_control[-1].gen_return(builder, retval, stmt.line) + + +def transform_assignment_stmt(builder: IRBuilder, stmt: AssignmentStmt) -> None: + assert len(stmt.lvalues) >= 1 + builder.disallow_class_assignments(stmt.lvalues, stmt.line) + lvalue = stmt.lvalues[0] + if stmt.type and isinstance(stmt.rvalue, TempNode): + # This is actually a variable annotation without initializer. Don't generate + # an assignment but we need to call get_assignment_target since it adds a + # name binding as a side effect. + builder.get_assignment_target(lvalue, stmt.line) + return + + line = stmt.rvalue.line + rvalue_reg = builder.accept(stmt.rvalue) + if builder.non_function_scope() and stmt.is_final_def: + builder.init_final_static(lvalue, rvalue_reg) + for lvalue in stmt.lvalues: + target = builder.get_assignment_target(lvalue) + builder.assign(target, rvalue_reg, line) + + +def transform_operator_assignment_stmt(builder: IRBuilder, stmt: OperatorAssignmentStmt) -> None: + """Operator assignment statement such as x += 1""" + builder.disallow_class_assignments([stmt.lvalue], stmt.line) + target = builder.get_assignment_target(stmt.lvalue) + target_value = builder.read(target, stmt.line) + rreg = builder.accept(stmt.rvalue) + # the Python parser strips the '=' from operator assignment statements, so re-add it + op = stmt.op + '=' + res = builder.binary_op(target_value, rreg, op, stmt.line) + # usually operator assignments are done in-place + # but when target doesn't support that we need to manually assign + builder.assign(target, res, res.line) + + +def transform_if_stmt(builder: IRBuilder, stmt: IfStmt) -> None: + if_body, next = BasicBlock(), BasicBlock() + else_body = BasicBlock() if stmt.else_body else next + + # If statements are normalized + assert len(stmt.expr) == 1 + + builder.process_conditional(stmt.expr[0], if_body, else_body) + builder.activate_block(if_body) + builder.accept(stmt.body[0]) + builder.goto(next) + if stmt.else_body: + builder.activate_block(else_body) + builder.accept(stmt.else_body) + builder.goto(next) + builder.activate_block(next) + + +def transform_while_stmt(builder: IRBuilder, s: WhileStmt) -> None: + body, next, top, else_block = BasicBlock(), BasicBlock(), BasicBlock(), BasicBlock() + normal_loop_exit = else_block if s.else_body is not None else next + + builder.push_loop_stack(top, next) + + # Split block so that we get a handle to the top of the loop. + builder.goto_and_activate(top) + builder.process_conditional(s.expr, body, normal_loop_exit) + + builder.activate_block(body) + builder.accept(s.body) + # Add branch to the top at the end of the body. + builder.goto(top) + + builder.pop_loop_stack() + + if s.else_body is not None: + builder.activate_block(else_block) + builder.accept(s.else_body) + builder.goto(next) + + builder.activate_block(next) + + +def transform_for_stmt(builder: IRBuilder, s: ForStmt) -> None: + def body() -> None: + builder.accept(s.body) + + def else_block() -> None: + assert s.else_body is not None + builder.accept(s.else_body) + + builder.for_loop_helper(s.index, s.expr, body, + else_block if s.else_body else None, s.line) + + +def transform_break_stmt(builder: IRBuilder, node: BreakStmt) -> None: + builder.nonlocal_control[-1].gen_break(builder, node.line) + + +def transform_continue_stmt(builder: IRBuilder, node: ContinueStmt) -> None: + builder.nonlocal_control[-1].gen_continue(builder, node.line) + + +def transform_raise_stmt(builder: IRBuilder, s: RaiseStmt) -> None: + if s.expr is None: + builder.primitive_op(reraise_exception_op, [], NO_TRACEBACK_LINE_NO) + builder.add(Unreachable()) + return + + exc = builder.accept(s.expr) + builder.primitive_op(raise_exception_op, [exc], s.line) + builder.add(Unreachable()) + + +def transform_try_except(builder: IRBuilder, body: GenFunc, handlers: Sequence[ Tuple[Optional[Expression], Optional[Expression], GenFunc]], else_body: Optional[GenFunc], line: int) -> None: - """Generalized try/except/else handling that takes functions to gen the bodies. - - The point of this is to also be able to support with.""" - assert handlers, "try needs except" - - except_entry, exit_block, cleanup_block = BasicBlock(), BasicBlock(), BasicBlock() - double_except_block = BasicBlock() - # If there is an else block, jump there after the try, otherwise just leave - else_block = BasicBlock() if else_body else exit_block - - # Compile the try block with an error handler - self.builder.builder.push_error_handler(except_entry) - self.builder.goto_and_activate(BasicBlock()) - body() - self.builder.goto(else_block) - self.builder.builder.pop_error_handler() - - # The error handler catches the error and then checks it - # against the except clauses. We compile the error handler - # itself with an error handler so that it can properly restore - # the *old* exc_info if an exception occurs. - # The exception chaining will be done automatically when the - # exception is raised, based on the exception in exc_info. - self.builder.builder.push_error_handler(double_except_block) - self.builder.activate_block(except_entry) - old_exc = self.builder.maybe_spill(self.builder.primitive_op(error_catch_op, [], line)) - # Compile the except blocks with the nonlocal control flow overridden to clear exc_info - self.builder.nonlocal_control.append( - ExceptNonlocalControl(self.builder.nonlocal_control[-1], old_exc)) - - # Process the bodies - for type, var, handler_body in handlers: - next_block = None - if type: - next_block, body_block = BasicBlock(), BasicBlock() - matches = self.builder.primitive_op( - exc_matches_op, [self.builder.accept(type)], type.line - ) - self.builder.add(Branch(matches, body_block, next_block, Branch.BOOL_EXPR)) - self.builder.activate_block(body_block) - if var: - target = self.builder.get_assignment_target(var) - self.builder.assign( - target, - self.builder.primitive_op(get_exc_value_op, [], var.line), - var.line - ) - handler_body() - self.builder.goto(cleanup_block) - if next_block: - self.builder.activate_block(next_block) - - # Reraise the exception if needed - if next_block: - self.builder.primitive_op(reraise_exception_op, [], NO_TRACEBACK_LINE_NO) - self.builder.add(Unreachable()) - - self.builder.nonlocal_control.pop() - self.builder.builder.pop_error_handler() - - # Cleanup for if we leave except through normal control flow: - # restore the saved exc_info information and continue propagating - # the exception if it exists. - self.builder.activate_block(cleanup_block) - self.builder.primitive_op(restore_exc_info_op, [self.builder.read(old_exc)], line) - self.builder.goto(exit_block) - - # Cleanup for if we leave except through a raised exception: - # restore the saved exc_info information and continue propagating - # the exception. - self.builder.activate_block(double_except_block) - self.builder.primitive_op(restore_exc_info_op, [self.builder.read(old_exc)], line) - self.builder.primitive_op(keep_propagating_op, [], NO_TRACEBACK_LINE_NO) - self.builder.add(Unreachable()) - - # If present, compile the else body in the obvious way - if else_body: - self.builder.activate_block(else_block) - else_body() - self.builder.goto(exit_block) - - self.builder.activate_block(exit_block) - - def visit_try_except_stmt(self, t: TryStmt) -> None: - def body() -> None: - self.builder.accept(t.body) - - # Work around scoping woes - def make_handler(body: Block) -> GenFunc: - return lambda: self.builder.accept(body) - - handlers = [(type, var, make_handler(body)) for type, var, body in - zip(t.types, t.vars, t.handlers)] - else_body = (lambda: self.builder.accept(t.else_body)) if t.else_body else None - self.visit_try_except(body, handlers, else_body, t.line) - - def try_finally_try(self, err_handler: BasicBlock, return_entry: BasicBlock, - main_entry: BasicBlock, try_body: GenFunc) -> Optional[Register]: - # Compile the try block with an error handler - control = TryFinallyNonlocalControl(return_entry) - self.builder.builder.push_error_handler(err_handler) - - self.builder.nonlocal_control.append(control) - self.builder.goto_and_activate(BasicBlock()) - try_body() - self.builder.goto(main_entry) - self.builder.nonlocal_control.pop() - self.builder.builder.pop_error_handler() - - return control.ret_reg - - def try_finally_entry_blocks(self, - err_handler: BasicBlock, return_entry: BasicBlock, - main_entry: BasicBlock, finally_block: BasicBlock, - ret_reg: Optional[Register]) -> Value: - old_exc = self.builder.alloc_temp(exc_rtuple) - - # Entry block for non-exceptional flow - self.builder.activate_block(main_entry) - if ret_reg: - self.builder.add( - Assign( - ret_reg, - self.builder.add(LoadErrorValue(self.builder.ret_types[-1])) - ) - ) - self.builder.goto(return_entry) - - self.builder.activate_block(return_entry) - self.builder.add(Assign(old_exc, self.builder.add(LoadErrorValue(exc_rtuple)))) - self.builder.goto(finally_block) - - # Entry block for errors - self.builder.activate_block(err_handler) - if ret_reg: - self.builder.add( - Assign( - ret_reg, - self.builder.add(LoadErrorValue(self.builder.ret_types[-1])) - ) + """Generalized try/except/else handling that takes functions to gen the bodies. + + The point of this is to also be able to support with.""" + assert handlers, "try needs except" + + except_entry, exit_block, cleanup_block = BasicBlock(), BasicBlock(), BasicBlock() + double_except_block = BasicBlock() + # If there is an else block, jump there after the try, otherwise just leave + else_block = BasicBlock() if else_body else exit_block + + # Compile the try block with an error handler + builder.builder.push_error_handler(except_entry) + builder.goto_and_activate(BasicBlock()) + body() + builder.goto(else_block) + builder.builder.pop_error_handler() + + # The error handler catches the error and then checks it + # against the except clauses. We compile the error handler + # itself with an error handler so that it can properly restore + # the *old* exc_info if an exception occurs. + # The exception chaining will be done automatically when the + # exception is raised, based on the exception in exc_info. + builder.builder.push_error_handler(double_except_block) + builder.activate_block(except_entry) + old_exc = builder.maybe_spill(builder.primitive_op(error_catch_op, [], line)) + # Compile the except blocks with the nonlocal control flow overridden to clear exc_info + builder.nonlocal_control.append( + ExceptNonlocalControl(builder.nonlocal_control[-1], old_exc)) + + # Process the bodies + for type, var, handler_body in handlers: + next_block = None + if type: + next_block, body_block = BasicBlock(), BasicBlock() + matches = builder.primitive_op( + exc_matches_op, [builder.accept(type)], type.line ) - self.builder.add(Assign(old_exc, self.builder.primitive_op(error_catch_op, [], -1))) - self.builder.goto(finally_block) - - return old_exc - - def try_finally_body( - self, finally_block: BasicBlock, finally_body: GenFunc, - ret_reg: Optional[Value], old_exc: Value) -> Tuple[BasicBlock, - 'FinallyNonlocalControl']: - cleanup_block = BasicBlock() - # Compile the finally block with the nonlocal control flow overridden to restore exc_info - self.builder.builder.push_error_handler(cleanup_block) - finally_control = FinallyNonlocalControl( - self.builder.nonlocal_control[-1], ret_reg, old_exc) - self.builder.nonlocal_control.append(finally_control) - self.builder.activate_block(finally_block) - finally_body() - self.builder.nonlocal_control.pop() - - return cleanup_block, finally_control - - def try_finally_resolve_control(self, cleanup_block: BasicBlock, - finally_control: FinallyNonlocalControl, - old_exc: Value, ret_reg: Optional[Value]) -> BasicBlock: - """Resolve the control flow out of a finally block. - - This means returning if there was a return, propagating - exceptions, break/continue (soon), or just continuing on. - """ - reraise, rest = BasicBlock(), BasicBlock() - self.builder.add(Branch(old_exc, rest, reraise, Branch.IS_ERROR)) - - # Reraise the exception if there was one - self.builder.activate_block(reraise) - self.builder.primitive_op(reraise_exception_op, [], NO_TRACEBACK_LINE_NO) - self.builder.add(Unreachable()) - self.builder.builder.pop_error_handler() - - # If there was a return, keep returning - if ret_reg: - self.builder.activate_block(rest) - return_block, rest = BasicBlock(), BasicBlock() - self.builder.add(Branch(ret_reg, rest, return_block, Branch.IS_ERROR)) - - self.builder.activate_block(return_block) - self.builder.nonlocal_control[-1].gen_return(self.builder, ret_reg, -1) - - # TODO: handle break/continue - self.builder.activate_block(rest) - out_block = BasicBlock() - self.builder.goto(out_block) - - # If there was an exception, restore again - self.builder.activate_block(cleanup_block) - finally_control.gen_cleanup(self.builder, -1) - self.builder.primitive_op(keep_propagating_op, [], NO_TRACEBACK_LINE_NO) - self.builder.add(Unreachable()) - - return out_block - - def visit_try_finally_stmt(self, try_body: GenFunc, finally_body: GenFunc) -> None: - """Generalized try/finally handling that takes functions to gen the bodies. - - The point of this is to also be able to support with.""" - # Finally is a big pain, because there are so many ways that - # exits can occur. We emit 10+ basic blocks for every finally! - - err_handler, main_entry, return_entry, finally_block = ( - BasicBlock(), BasicBlock(), BasicBlock(), BasicBlock()) - - # Compile the body of the try - ret_reg = self.try_finally_try( - err_handler, return_entry, main_entry, try_body) - - # Set up the entry blocks for the finally statement - old_exc = self.try_finally_entry_blocks( - err_handler, return_entry, main_entry, finally_block, ret_reg) - - # Compile the body of the finally - cleanup_block, finally_control = self.try_finally_body( - finally_block, finally_body, ret_reg, old_exc) - - # Resolve the control flow out of the finally block - out_block = self.try_finally_resolve_control( - cleanup_block, finally_control, old_exc, ret_reg) - - self.builder.activate_block(out_block) - - def visit_try_stmt(self, t: TryStmt) -> None: - # Our compilation strategy for try/except/else/finally is to - # treat try/except/else and try/finally as separate language - # constructs that we compile separately. When we have a - # try/except/else/finally, we treat the try/except/else as the - # body of a try/finally block. - if t.finally_body: - def visit_try_body() -> None: - if t.handlers: - self.visit_try_except_stmt(t) - else: - self.builder.accept(t.body) - body = t.finally_body - - self.visit_try_finally_stmt(visit_try_body, lambda: self.builder.accept(body)) - else: - self.visit_try_except_stmt(t) - - def get_sys_exc_info(self) -> List[Value]: - exc_info = self.builder.primitive_op(get_exc_info_op, [], -1) - return [self.builder.add(TupleGet(exc_info, i, -1)) for i in range(3)] - - def visit_with(self, expr: Expression, target: Optional[Lvalue], - body: GenFunc, line: int) -> None: - - # This is basically a straight transcription of the Python code in PEP 343. - # I don't actually understand why a bunch of it is the way it is. - # We could probably optimize the case where the manager is compiled by us, - # but that is not our common case at all, so. - mgr_v = self.builder.accept(expr) - typ = self.builder.primitive_op(type_op, [mgr_v], line) - exit_ = self.builder.maybe_spill(self.builder.py_get_attr(typ, '__exit__', line)) - value = self.builder.py_call( - self.builder.py_get_attr(typ, '__enter__', line), [mgr_v], line - ) - mgr = self.builder.maybe_spill(mgr_v) - exc = self.builder.maybe_spill_assignable(self.builder.primitive_op(true_op, [], -1)) - - def try_body() -> None: - if target: - self.builder.assign(self.builder.get_assignment_target(target), value, line) - body() - - def except_body() -> None: - self.builder.assign(exc, self.builder.primitive_op(false_op, [], -1), line) - out_block, reraise_block = BasicBlock(), BasicBlock() - self.builder.add_bool_branch( - self.builder.py_call(self.builder.read(exit_), - [self.builder.read(mgr)] + self.get_sys_exc_info(), line), - out_block, - reraise_block + builder.add(Branch(matches, body_block, next_block, Branch.BOOL_EXPR)) + builder.activate_block(body_block) + if var: + target = builder.get_assignment_target(var) + builder.assign( + target, + builder.primitive_op(get_exc_value_op, [], var.line), + var.line ) - self.builder.activate_block(reraise_block) - self.builder.primitive_op(reraise_exception_op, [], NO_TRACEBACK_LINE_NO) - self.builder.add(Unreachable()) - self.builder.activate_block(out_block) - - def finally_body() -> None: - out_block, exit_block = BasicBlock(), BasicBlock() - self.builder.add( - Branch(self.builder.read(exc), exit_block, out_block, Branch.BOOL_EXPR) + handler_body() + builder.goto(cleanup_block) + if next_block: + builder.activate_block(next_block) + + # Reraise the exception if needed + if next_block: + builder.primitive_op(reraise_exception_op, [], NO_TRACEBACK_LINE_NO) + builder.add(Unreachable()) + + builder.nonlocal_control.pop() + builder.builder.pop_error_handler() + + # Cleanup for if we leave except through normal control flow: + # restore the saved exc_info information and continue propagating + # the exception if it exists. + builder.activate_block(cleanup_block) + builder.primitive_op(restore_exc_info_op, [builder.read(old_exc)], line) + builder.goto(exit_block) + + # Cleanup for if we leave except through a raised exception: + # restore the saved exc_info information and continue propagating + # the exception. + builder.activate_block(double_except_block) + builder.primitive_op(restore_exc_info_op, [builder.read(old_exc)], line) + builder.primitive_op(keep_propagating_op, [], NO_TRACEBACK_LINE_NO) + builder.add(Unreachable()) + + # If present, compile the else body in the obvious way + if else_body: + builder.activate_block(else_block) + else_body() + builder.goto(exit_block) + + builder.activate_block(exit_block) + + +def transform_try_except_stmt(builder: IRBuilder, t: TryStmt) -> None: + def body() -> None: + builder.accept(t.body) + + # Work around scoping woes + def make_handler(body: Block) -> GenFunc: + return lambda: builder.accept(body) + + handlers = [(type, var, make_handler(body)) + for type, var, body in zip(t.types, t.vars, t.handlers)] + else_body = (lambda: builder.accept(t.else_body)) if t.else_body else None + transform_try_except(builder, body, handlers, else_body, t.line) + + +def try_finally_try(builder: IRBuilder, + err_handler: BasicBlock, + return_entry: BasicBlock, + main_entry: BasicBlock, + try_body: GenFunc) -> Optional[Register]: + # Compile the try block with an error handler + control = TryFinallyNonlocalControl(return_entry) + builder.builder.push_error_handler(err_handler) + + builder.nonlocal_control.append(control) + builder.goto_and_activate(BasicBlock()) + try_body() + builder.goto(main_entry) + builder.nonlocal_control.pop() + builder.builder.pop_error_handler() + + return control.ret_reg + + +def try_finally_entry_blocks(builder: IRBuilder, + err_handler: BasicBlock, + return_entry: BasicBlock, + main_entry: BasicBlock, + finally_block: BasicBlock, + ret_reg: Optional[Register]) -> Value: + old_exc = builder.alloc_temp(exc_rtuple) + + # Entry block for non-exceptional flow + builder.activate_block(main_entry) + if ret_reg: + builder.add( + Assign( + ret_reg, + builder.add(LoadErrorValue(builder.ret_types[-1])) ) - self.builder.activate_block(exit_block) - none = self.builder.none_object() - self.builder.py_call( - self.builder.read(exit_), [self.builder.read(mgr), none, none, none], line + ) + builder.goto(return_entry) + + builder.activate_block(return_entry) + builder.add(Assign(old_exc, builder.add(LoadErrorValue(exc_rtuple)))) + builder.goto(finally_block) + + # Entry block for errors + builder.activate_block(err_handler) + if ret_reg: + builder.add( + Assign( + ret_reg, + builder.add(LoadErrorValue(builder.ret_types[-1])) ) - self.builder.goto_and_activate(out_block) - - self.visit_try_finally_stmt( - lambda: self.visit_try_except(try_body, [(None, None, except_body)], None, line), - finally_body) - - def visit_with_stmt(self, o: WithStmt) -> None: - # Generate separate logic for each expr in it, left to right - def generate(i: int) -> None: - if i >= len(o.expr): - self.builder.accept(o.body) + ) + builder.add(Assign(old_exc, builder.primitive_op(error_catch_op, [], -1))) + builder.goto(finally_block) + + return old_exc + + +def try_finally_body( + builder: IRBuilder, + finally_block: BasicBlock, + finally_body: GenFunc, + ret_reg: Optional[Value], + old_exc: Value) -> Tuple[BasicBlock, FinallyNonlocalControl]: + cleanup_block = BasicBlock() + # Compile the finally block with the nonlocal control flow overridden to restore exc_info + builder.builder.push_error_handler(cleanup_block) + finally_control = FinallyNonlocalControl( + builder.nonlocal_control[-1], ret_reg, old_exc) + builder.nonlocal_control.append(finally_control) + builder.activate_block(finally_block) + finally_body() + builder.nonlocal_control.pop() + + return cleanup_block, finally_control + + +def try_finally_resolve_control(builder: IRBuilder, + cleanup_block: BasicBlock, + finally_control: FinallyNonlocalControl, + old_exc: Value, + ret_reg: Optional[Value]) -> BasicBlock: + """Resolve the control flow out of a finally block. + + This means returning if there was a return, propagating + exceptions, break/continue (soon), or just continuing on. + """ + reraise, rest = BasicBlock(), BasicBlock() + builder.add(Branch(old_exc, rest, reraise, Branch.IS_ERROR)) + + # Reraise the exception if there was one + builder.activate_block(reraise) + builder.primitive_op(reraise_exception_op, [], NO_TRACEBACK_LINE_NO) + builder.add(Unreachable()) + builder.builder.pop_error_handler() + + # If there was a return, keep returning + if ret_reg: + builder.activate_block(rest) + return_block, rest = BasicBlock(), BasicBlock() + builder.add(Branch(ret_reg, rest, return_block, Branch.IS_ERROR)) + + builder.activate_block(return_block) + builder.nonlocal_control[-1].gen_return(builder, ret_reg, -1) + + # TODO: handle break/continue + builder.activate_block(rest) + out_block = BasicBlock() + builder.goto(out_block) + + # If there was an exception, restore again + builder.activate_block(cleanup_block) + finally_control.gen_cleanup(builder, -1) + builder.primitive_op(keep_propagating_op, [], NO_TRACEBACK_LINE_NO) + builder.add(Unreachable()) + + return out_block + + +def transform_try_finally_stmt(builder: IRBuilder, + try_body: GenFunc, + finally_body: GenFunc) -> None: + """Generalized try/finally handling that takes functions to gen the bodies. + + The point of this is to also be able to support with.""" + # Finally is a big pain, because there are so many ways that + # exits can occur. We emit 10+ basic blocks for every finally! + + err_handler, main_entry, return_entry, finally_block = ( + BasicBlock(), BasicBlock(), BasicBlock(), BasicBlock()) + + # Compile the body of the try + ret_reg = try_finally_try( + builder, err_handler, return_entry, main_entry, try_body) + + # Set up the entry blocks for the finally statement + old_exc = try_finally_entry_blocks( + builder, err_handler, return_entry, main_entry, finally_block, ret_reg) + + # Compile the body of the finally + cleanup_block, finally_control = try_finally_body( + builder, finally_block, finally_body, ret_reg, old_exc) + + # Resolve the control flow out of the finally block + out_block = try_finally_resolve_control( + builder, cleanup_block, finally_control, old_exc, ret_reg) + + builder.activate_block(out_block) + + +def transform_try_stmt(builder: IRBuilder, t: TryStmt) -> None: + # Our compilation strategy for try/except/else/finally is to + # treat try/except/else and try/finally as separate language + # constructs that we compile separately. When we have a + # try/except/else/finally, we treat the try/except/else as the + # body of a try/finally block. + if t.finally_body: + def transform_try_body() -> None: + if t.handlers: + transform_try_except_stmt(builder, t) else: - self.visit_with(o.expr[i], o.target[i], lambda: generate(i + 1), o.line) - - generate(0) - - def visit_assert_stmt(self, a: AssertStmt) -> None: - if self.builder.options.strip_asserts: - return - cond = self.builder.accept(a.expr) - ok_block, error_block = BasicBlock(), BasicBlock() - self.builder.add_bool_branch(cond, ok_block, error_block) - self.builder.activate_block(error_block) - if a.msg is None: - # Special case (for simpler generated code) - self.builder.add(RaiseStandardError(RaiseStandardError.ASSERTION_ERROR, None, a.line)) - elif isinstance(a.msg, StrExpr): - # Another special case - self.builder.add(RaiseStandardError(RaiseStandardError.ASSERTION_ERROR, a.msg.value, - a.line)) + builder.accept(t.body) + body = t.finally_body + + transform_try_finally_stmt(builder, transform_try_body, lambda: builder.accept(body)) + else: + transform_try_except_stmt(builder, t) + + +def get_sys_exc_info(builder: IRBuilder) -> List[Value]: + exc_info = builder.primitive_op(get_exc_info_op, [], -1) + return [builder.add(TupleGet(exc_info, i, -1)) for i in range(3)] + + +def transform_with(builder: IRBuilder, + expr: Expression, + target: Optional[Lvalue], + body: GenFunc, + line: int) -> None: + # This is basically a straight transcription of the Python code in PEP 343. + # I don't actually understand why a bunch of it is the way it is. + # We could probably optimize the case where the manager is compiled by us, + # but that is not our common case at all, so. + mgr_v = builder.accept(expr) + typ = builder.primitive_op(type_op, [mgr_v], line) + exit_ = builder.maybe_spill(builder.py_get_attr(typ, '__exit__', line)) + value = builder.py_call( + builder.py_get_attr(typ, '__enter__', line), [mgr_v], line + ) + mgr = builder.maybe_spill(mgr_v) + exc = builder.maybe_spill_assignable(builder.primitive_op(true_op, [], -1)) + + def try_body() -> None: + if target: + builder.assign(builder.get_assignment_target(target), value, line) + body() + + def except_body() -> None: + builder.assign(exc, builder.primitive_op(false_op, [], -1), line) + out_block, reraise_block = BasicBlock(), BasicBlock() + builder.add_bool_branch( + builder.py_call(builder.read(exit_), + [builder.read(mgr)] + get_sys_exc_info(builder), line), + out_block, + reraise_block + ) + builder.activate_block(reraise_block) + builder.primitive_op(reraise_exception_op, [], NO_TRACEBACK_LINE_NO) + builder.add(Unreachable()) + builder.activate_block(out_block) + + def finally_body() -> None: + out_block, exit_block = BasicBlock(), BasicBlock() + builder.add( + Branch(builder.read(exc), exit_block, out_block, Branch.BOOL_EXPR) + ) + builder.activate_block(exit_block) + none = builder.none_object() + builder.py_call( + builder.read(exit_), [builder.read(mgr), none, none, none], line + ) + builder.goto_and_activate(out_block) + + transform_try_finally_stmt( + builder, + lambda: transform_try_except(builder, + try_body, + [(None, None, except_body)], + None, + line), + finally_body + ) + + +def transform_with_stmt(builder: IRBuilder, o: WithStmt) -> None: + # Generate separate logic for each expr in it, left to right + def generate(i: int) -> None: + if i >= len(o.expr): + builder.accept(o.body) else: - # The general case -- explicitly construct an exception instance - message = self.builder.accept(a.msg) - exc_type = self.builder.load_module_attr_by_fullname('builtins.AssertionError', a.line) - exc = self.builder.py_call(exc_type, [message], a.line) - self.builder.primitive_op(raise_exception_op, [exc], a.line) - self.builder.add(Unreachable()) - self.builder.activate_block(ok_block) - - def visit_del_stmt(self, o: DelStmt) -> None: - self.visit_del_item(self.builder.get_assignment_target(o.expr), o.line) - - def visit_del_item(self, target: AssignmentTarget, line: int) -> None: - if isinstance(target, AssignmentTargetIndex): - self.builder.gen_method_call( - target.base, - '__delitem__', - [target.index], - result_type=None, - line=line - ) - elif isinstance(target, AssignmentTargetAttr): - key = self.builder.load_static_unicode(target.attr) - self.builder.add(PrimitiveOp([target.obj, key], py_delattr_op, line)) - elif isinstance(target, AssignmentTargetRegister): - # Delete a local by assigning an error value to it, which will - # prompt the insertion of uninit checks. - self.builder.add(Assign(target.register, - self.builder.add(LoadErrorValue(target.type, undefines=True)))) - elif isinstance(target, AssignmentTargetTuple): - for subtarget in target.items: - self.visit_del_item(subtarget, line) + transform_with(builder, o.expr[i], o.target[i], lambda: generate(i + 1), o.line) + + generate(0) + + +def transform_assert_stmt(builder: IRBuilder, a: AssertStmt) -> None: + if builder.options.strip_asserts: + return + cond = builder.accept(a.expr) + ok_block, error_block = BasicBlock(), BasicBlock() + builder.add_bool_branch(cond, ok_block, error_block) + builder.activate_block(error_block) + if a.msg is None: + # Special case (for simpler generated code) + builder.add(RaiseStandardError(RaiseStandardError.ASSERTION_ERROR, None, a.line)) + elif isinstance(a.msg, StrExpr): + # Another special case + builder.add(RaiseStandardError(RaiseStandardError.ASSERTION_ERROR, a.msg.value, + a.line)) + else: + # The general case -- explicitly construct an exception instance + message = builder.accept(a.msg) + exc_type = builder.load_module_attr_by_fullname('builtins.AssertionError', a.line) + exc = builder.py_call(exc_type, [message], a.line) + builder.primitive_op(raise_exception_op, [exc], a.line) + builder.add(Unreachable()) + builder.activate_block(ok_block) + + +def transform_del_stmt(builder: IRBuilder, o: DelStmt) -> None: + transform_del_item(builder, builder.get_assignment_target(o.expr), o.line) + + +def transform_del_item(builder: IRBuilder, target: AssignmentTarget, line: int) -> None: + if isinstance(target, AssignmentTargetIndex): + builder.gen_method_call( + target.base, + '__delitem__', + [target.index], + result_type=None, + line=line + ) + elif isinstance(target, AssignmentTargetAttr): + key = builder.load_static_unicode(target.attr) + builder.add(PrimitiveOp([target.obj, key], py_delattr_op, line)) + elif isinstance(target, AssignmentTargetRegister): + # Delete a local by assigning an error value to it, which will + # prompt the insertion of uninit checks. + builder.add(Assign(target.register, + builder.add(LoadErrorValue(target.type, undefines=True)))) + elif isinstance(target, AssignmentTargetTuple): + for subtarget in target.items: + transform_del_item(builder, subtarget, line) From 7af3191246c17a8580acd3b6336f35b02de85e7e Mon Sep 17 00:00:00 2001 From: Michael Lee Date: Sun, 23 Feb 2020 15:38:14 -0800 Subject: [PATCH 110/117] Make ModuleFinder try identifying non-PEP 561 packages (#8238) This pull request is an attempt at mitigating https://github.com/python/mypy/issues/4542 by making mypy report a custom error when it detects installed packages that are not PEP 561 compliant. (I don't think it resolves it though -- I've come to the conclusion that import handling is just inherently complex/spooky. So if you were in a cynical mode, you could perhaps argue the issue is just fundamentally unresolvable...) But anyways, this PR: 1. Removes the hard-coded list of "popular third party libraries" from `moduleinfo.py` and replaces it with a heuristic that tries to find when an import "plausibly matches" some directory or Python file while we search for packages containing ``py.typed``. If we do find a plausible match, we generate an error that looks something like this: ``` test.py:1: error: Skipping analyzing 'scipy': found module but no type hints or library stubs test.py:1: note: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports ``` The heuristic I'm using obviously isn't foolproof since we don't have any obvious signifiers like the ``py.typed`` file we can look for, but it seemed to work well enough when I tried testing in on some of the libraries in the old list. Hopefully this will result in less confusion when users use a mix of "popular" and "unpopular" libraries. 2. Gives us a way to add more fine-grained "module not found" error messages and heuristics in the future: we can add more entries to the ModuleNotFoundReason enum. 3. Updates the docs about missing imports to use these new errors. I added a new subsection per each error type to try and make things a little less unwieldy. 4. Adds what I think are common points of confusion to the doc -- e.g. that missing imports are inferred to be of type Any, what exactly it means to add a `# type: ignore`, and the whole virtualenv confusion thing. 5. Modifies the docs to more strongly discourage the use of MYPYPATH. Unless I'm wrong, it's not a feature most people will find useful. One limitation of this PR is that I added tests for just ModuleFinder. I didn't want to dive into modifying our testcases framework to support adding custom site-packages/some moral equivalent -- and my PR only changes the behavior of ModuleFinder when it would have originally reported something was not found, anyways. --- docs/source/running_mypy.rst | 187 ++++++++++---- mypy/build.py | 66 +++-- mypy/modulefinder.py | 71 +++++- mypy/moduleinfo.py | 228 +----------------- mypy/stubgen.py | 17 +- mypy/stubtest.py | 2 +- mypy/stubutil.py | 11 +- mypy/test/testcheck.py | 2 +- mypy/test/testmodulefinder.py | 135 ++++++++++- .../foo-stubs/__init__.pyi | 0 .../foo-stubs/bar.pyi | 1 + .../foo/__init__.py | 0 .../modulefinder-site-packages/foo/bar.py | 1 + .../ns_pkg_typed/a.py | 1 + .../ns_pkg_typed/b/c.py | 1 + .../ns_pkg_typed/py.typed | 0 .../ns_pkg_untyped/a.py | 1 + .../ns_pkg_untyped/b/c.py | 1 + .../pkg_typed/__init__.py | 1 + .../modulefinder-site-packages/pkg_typed/a.py | 1 + .../pkg_typed/b/__init__.py | 1 + .../pkg_typed/b/c.py | 1 + .../pkg_typed/py.typed | 0 .../pkg_untyped/__init__.py | 1 + .../pkg_untyped/a.py | 1 + .../pkg_untyped/b/__init__.py | 1 + .../pkg_untyped/b/c.py | 1 + .../modulefinder-site-packages/standalone.py | 1 + test-data/unit/check-errorcodes.test | 7 +- test-data/unit/semanal-errors.test | 6 - 30 files changed, 408 insertions(+), 339 deletions(-) create mode 100644 test-data/packages/modulefinder-site-packages/foo-stubs/__init__.pyi create mode 100644 test-data/packages/modulefinder-site-packages/foo-stubs/bar.pyi create mode 100644 test-data/packages/modulefinder-site-packages/foo/__init__.py create mode 100644 test-data/packages/modulefinder-site-packages/foo/bar.py create mode 100644 test-data/packages/modulefinder-site-packages/ns_pkg_typed/a.py create mode 100644 test-data/packages/modulefinder-site-packages/ns_pkg_typed/b/c.py create mode 100644 test-data/packages/modulefinder-site-packages/ns_pkg_typed/py.typed create mode 100644 test-data/packages/modulefinder-site-packages/ns_pkg_untyped/a.py create mode 100644 test-data/packages/modulefinder-site-packages/ns_pkg_untyped/b/c.py create mode 100644 test-data/packages/modulefinder-site-packages/pkg_typed/__init__.py create mode 100644 test-data/packages/modulefinder-site-packages/pkg_typed/a.py create mode 100644 test-data/packages/modulefinder-site-packages/pkg_typed/b/__init__.py create mode 100644 test-data/packages/modulefinder-site-packages/pkg_typed/b/c.py create mode 100644 test-data/packages/modulefinder-site-packages/pkg_typed/py.typed create mode 100644 test-data/packages/modulefinder-site-packages/pkg_untyped/__init__.py create mode 100644 test-data/packages/modulefinder-site-packages/pkg_untyped/a.py create mode 100644 test-data/packages/modulefinder-site-packages/pkg_untyped/b/__init__.py create mode 100644 test-data/packages/modulefinder-site-packages/pkg_untyped/b/c.py create mode 100644 test-data/packages/modulefinder-site-packages/standalone.py diff --git a/docs/source/running_mypy.rst b/docs/source/running_mypy.rst index 64e7d39c0d72..f17a6d5b8a09 100644 --- a/docs/source/running_mypy.rst +++ b/docs/source/running_mypy.rst @@ -126,72 +126,102 @@ sections will discuss what to do in the other two cases. .. _ignore-missing-imports: Missing imports ---------------- +*************** When you import a module, mypy may report that it is unable to follow the import. -This can cause a lot of errors that look like the following:: +This can cause errors that look like the following:: main.py:1: error: No library stub file for standard library module 'antigravity' - main.py:2: error: No library stub file for module 'flask' + main.py:2: error: Skipping analyzing 'django': found module but no type hints or library stubs main.py:3: error: Cannot find implementation or library stub for module named 'this_module_does_not_exist' -There are several different things you can try doing, depending on the exact -nature of the module. +If you get any of these errors on an import, mypy will assume the type of that +module is ``Any``, the dynamic type. This means attempting to access any +attribute of the module will automatically succeed: -If the module is a part of your own codebase, try: +.. code-block:: python -1. Making sure your import does not contain a typo. -2. Reading the :ref:`finding-imports` section below to make sure you - understand how exactly mypy searches for and finds modules and modify - how you're invoking mypy accordingly. -3. Adding the directory containing that module to either the ``MYPYPATH`` - environment variable or the ``mypy_path`` - :ref:`config file option `. + # Error: Cannot find implementation or library stub for module named 'does_not_exist' + import does_not_exist - Note: if the module you are trying to import is actually a *submodule* of - some package, you should add the directory containing the *entire* package - to ``MYPYPATH``. For example, suppose you are trying to add the module - ``foo.bar.baz``, which is located at ``~/foo-project/src/foo/bar/baz.py``. - In this case, you should add ``~/foo-project/src`` to ``MYPYPATH``. - -If the module is a third party library, you must make sure that there are -type hints available for that library. Mypy by default will not attempt to -infer the types of any 3rd party libraries you may have installed + # But this type checks, and x will have type 'Any' + x = does_not_exist.foobar() + +The next three sections describe what each error means and recommended next steps. + +Missing type hints for standard library module +---------------------------------------------- + +If you are getting a "No library stub file for standard library module" error, +this means that you are attempting to import something from the standard library +which has not yet been annotated with type hints. In this case, try: + +1. Updating mypy and re-running it. It's possible type hints for that corner + of the standard library were added in a newer version of mypy. + +2. Filing a bug report or submitting a pull request to + `typeshed `_, the repository of type hints + for the standard library that comes bundled with mypy. + + Changes to typeshed will come bundled with mypy the next time it's released. + In the meantime, you can add a ``# type: ignore`` to the import to suppress + the errors generated on that line. After upgrading, run mypy with the + :option:`--warn-unused-ignores ` flag to help you + find any ``# type: ignore`` annotations you no longer need. + +.. _missing-type-hints-for-third-party-library: + +Missing type hints for third party library +------------------------------------------ + +If you are getting a "Skipping analyzing X: found module but no type hints or library stubs", +error, this means mypy was able to find the module you were importing, but no +corresponding type hints. + +Mypy will not try inferring the types of any 3rd party libraries you have installed unless they either have declared themselves to be :ref:`PEP 561 compliant stub package ` or have registered -themselves on `typeshed `_, -the repository of types for the standard library and some 3rd party libraries. +themselves on `typeshed `_, the repository +of types for the standard library and some 3rd party libraries. -If you are getting an import-related error, this means the library you -are trying to use has done neither of these things. In that case, you can try: +If you are getting this error, try: -1. Searching to see if there is a :ref:`PEP 561 compliant stub package `. +1. Upgrading the version of the library you're using, in case a newer version + has started to include type hints. + +2. Searching to see if there is a :ref:`PEP 561 compliant stub package `. corresponding to your third party library. Stub packages let you install type hints independently from the library itself. -2. :ref:`Writing your own stub files ` containing type hints for + For example, if you want type hints for the ``django`` library, you can + install the `django-stubs `_ package. + +3. :ref:`Writing your own stub files ` containing type hints for the library. You can point mypy at your type hints either by passing - them in via the command line, by adding the location to the - ``MYPYPATH`` environment variable, or by using the ``mypy_path`` - :ref:`config file option `. + them in via the command line, by using the ``files`` or ``mypy_path`` + :ref:`config file options `, or by + adding the location to the ``MYPYPATH`` environment variable. - Note that if you decide to write your own stub files, they don't need - to be complete! A good strategy is to add stubs for just the parts - of the library you need and iterate on them over time. + These stub files do not need to be complete! A good strategy is to use + stubgen, a program that comes bundled with mypy, to generate a first + rough draft of the stubs. You can then iterate on just the parts of the + library you need. If you want to share your work, you can try contributing your stubs back to the library -- see our documentation on creating :ref:`PEP 561 compliant packages `. -If the module is a third party library, but you cannot find any existing -type hints nor have time to write your own, you can *silence* the errors: +If you are unable to find any existing type hints nor have time to write your +own, you can instead *suppress* the errors. All this will do is make mypy stop +reporting an error on the line containing the import: the imported module +will continue to be of type ``Any``. -1. To silence a *single* missing import error, add a ``# type: ignore`` at the end of the +1. To suppress a *single* missing import error, add a ``# type: ignore`` at the end of the line containing the import. -2. To silence *all* missing import imports errors from a single library, add +2. To suppress *all* missing import imports errors from a single library, add a section to your :ref:`mypy config file ` for that library setting ``ignore_missing_imports`` to True. For example, suppose your codebase makes heavy use of an (untyped) library named ``foobar``. You can silence @@ -206,7 +236,7 @@ type hints nor have time to write your own, you can *silence* the errors: documentation about configuring :ref:`import discovery ` in config files. -3. To silence *all* missing import errors for *all* libraries in your codebase, +3. To suppress *all* missing import errors for *all* libraries in your codebase, invoke mypy with the :option:`--ignore-missing-imports ` command line flag or set the ``ignore_missing_imports`` :ref:`config file option ` to True @@ -218,26 +248,59 @@ type hints nor have time to write your own, you can *silence* the errors: We recommend using this approach only as a last resort: it's equivalent to adding a ``# type: ignore`` to all unresolved imports in your codebase. -If the module is a part of the standard library, try: +Unable to find module +--------------------- -1. Updating mypy and re-running it. It's possible type hints for that corner - of the standard library were added in a later version of mypy. +If you are getting a "Cannot find implementation or library stub for module" +error, this means mypy was not able to find the module you are trying to +import, whether it comes bundled with type hints or not. If you are getting +this error, try: -2. Filing a bug report on `typeshed `_, - the repository of type hints for the standard library that comes bundled - with mypy. You can expedite this process by also submitting a pull request - fixing the bug. +1. Making sure your import does not contain a typo. - Changes to typeshed will come bundled with mypy the next time it's released. - In the meantime, you can add a ``# type: ignore`` to silence any relevant - errors. After upgrading, we recommend running mypy using the - :option:`--warn-unused-ignores ` flag to help you find any ``# type: ignore`` - annotations you no longer need. +2. If the module is a third party library, making sure that mypy is able + to find the interpreter containing the installed library. + + For example, if you are running your code in a virtualenv, make sure + to install and use mypy within the virtualenv. Alternatively, if you + want to use a globally installed mypy, set the + :option:`--python-executable ` command + line flag to point the Python interpreter containing your installed + third party packages. + +2. Reading the :ref:`finding-imports` section below to make sure you + understand how exactly mypy searches for and finds modules and modify + how you're invoking mypy accordingly. + +3. Directly specifying the directory containing the module you want to + type check from the command line, by using the ``files`` or + ``mypy_path`` :ref:`config file options `, + or by using the ``MYPYPATH`` environment variable. + + Note: if the module you are trying to import is actually a *submodule* of + some package, you should specific the directory containing the *entire* package. + For example, suppose you are trying to add the module ``foo.bar.baz`` + which is located at ``~/foo-project/src/foo/bar/baz.py``. In this case, + you must run ``mypy ~/foo-project/src`` (or set the ``MYPYPATH`` to + ``~/foo-project/src``. + +4. If you are using namespace packages -- packages which do not contain + ``__init__.py`` files within each subfolder -- using the + :option:`--namespace-packages ` command + line flag. + +In some rare cases, you may get the "Cannot find implementation or library +stub for module" error even when the module is installed in your system. +This can happen when the module is both missing type hints and is installed +on your system in a unconventional way. + +In this case, follow the steps above on how to handle +:ref:`missing type hints in third party libraries `. .. _follow-imports: Following imports ------------------ +***************** Mypy is designed to :ref:`doggedly follow all imports `, even if the imported module is not a file you explicitly wanted mypy to check. @@ -401,3 +464,23 @@ same directory on the search path, only the stub file is used. (However, if the files are in different directories, the one found in the earlier directory is used.) +Other advice and best practices +******************************* + +There are multiple ways of telling mypy what files to type check, ranging +from passing in command line arguments to using the ``files`` or ``mypy_path`` +:ref:`config file options ` to setting the +``MYPYPATH`` environment variable. + +However, in practice, it is usually sufficient to just use either +command line arguments or the ``files`` config file option (the two +are largely interchangeable). + +Setting ``mypy_path``/``MYPYPATH`` is mostly useful in the case +where you want to try running mypy against multiple distinct +sets of files that happen to share some common dependencies. + +For example, if you have multiple projects that happen to be +using the same set of work-in-progress stubs, it could be +convenient to just have your ``MYPYPATH`` point to a single +directory containing the stubs. diff --git a/mypy/build.py b/mypy/build.py index 890bc06c4b84..330f5cb12e3f 100644 --- a/mypy/build.py +++ b/mypy/build.py @@ -42,7 +42,10 @@ from mypy.report import Reports # Avoid unconditional slow import from mypy import moduleinfo from mypy.fixup import fixup_module -from mypy.modulefinder import BuildSource, compute_search_paths, FindModuleCache, SearchPaths +from mypy.modulefinder import ( + BuildSource, compute_search_paths, FindModuleCache, SearchPaths, ModuleSearchResult, + ModuleNotFoundReason +) from mypy.nodes import Expression from mypy.options import Options from mypy.parse import parse @@ -2369,15 +2372,15 @@ def find_module_and_diagnose(manager: BuildManager, # difference and just assume 'builtins' everywhere, # which simplifies code. file_id = '__builtin__' - path = find_module_simple(file_id, manager) - if path: + result = find_module_with_reason(file_id, manager) + if isinstance(result, str): # For non-stubs, look at options.follow_imports: # - normal (default) -> fully analyze # - silent -> analyze but silence errors # - skip -> don't analyze, make the type Any follow_imports = options.follow_imports if (root_source # Honor top-level modules - or (not path.endswith('.py') # Stubs are always normal + or (not result.endswith('.py') # Stubs are always normal and not options.follow_imports_for_stubs) # except when they aren't or id in mypy.semanal_main.core_modules): # core is always normal follow_imports = 'normal' @@ -2385,32 +2388,32 @@ def find_module_and_diagnose(manager: BuildManager, pass elif follow_imports == 'silent': # Still import it, but silence non-blocker errors. - manager.log("Silencing %s (%s)" % (path, id)) + manager.log("Silencing %s (%s)" % (result, id)) elif follow_imports == 'skip' or follow_imports == 'error': # In 'error' mode, produce special error messages. if id not in manager.missing_modules: - manager.log("Skipping %s (%s)" % (path, id)) + manager.log("Skipping %s (%s)" % (result, id)) if follow_imports == 'error': if ancestor_for: - skipping_ancestor(manager, id, path, ancestor_for) + skipping_ancestor(manager, id, result, ancestor_for) else: skipping_module(manager, caller_line, caller_state, - id, path) + id, result) raise ModuleNotFound if not manager.options.no_silence_site_packages: for dir in manager.search_paths.package_path + manager.search_paths.typeshed_path: - if is_sub_path(path, dir): + if is_sub_path(result, dir): # Silence errors in site-package dirs and typeshed follow_imports = 'silent' if (id in CORE_BUILTIN_MODULES - and not is_typeshed_file(path) + and not is_typeshed_file(result) and not options.use_builtins_fixtures and not options.custom_typeshed_dir): raise CompileError([ - 'mypy: "%s" shadows library module "%s"' % (path, id), + 'mypy: "%s" shadows library module "%s"' % (result, id), 'note: A user-defined top-level module with name "%s" is not supported' % id ]) - return (path, follow_imports) + return (result, follow_imports) else: # Could not find a module. Typically the reason is a # misspelled module name, missing stub, module not in @@ -2419,7 +2422,7 @@ def find_module_and_diagnose(manager: BuildManager, raise ModuleNotFound if caller_state: if not (options.ignore_missing_imports or in_partial_package(id, manager)): - module_not_found(manager, caller_line, caller_state, id) + module_not_found(manager, caller_line, caller_state, id, result) raise ModuleNotFound elif root_source: # If we can't find a root source it's always fatal. @@ -2456,10 +2459,17 @@ def exist_added_packages(suppressed: List[str], def find_module_simple(id: str, manager: BuildManager) -> Optional[str]: """Find a filesystem path for module `id` or `None` if not found.""" + x = find_module_with_reason(id, manager) + if isinstance(x, ModuleNotFoundReason): + return None + return x + + +def find_module_with_reason(id: str, manager: BuildManager) -> ModuleSearchResult: + """Find a filesystem path for module `id` or the reason it can't be found.""" t0 = time.time() x = manager.find_module_cache.find_module(id) manager.add_stats(find_module_time=time.time() - t0, find_module_calls=1) - return x @@ -2493,35 +2503,23 @@ def in_partial_package(id: str, manager: BuildManager) -> bool: def module_not_found(manager: BuildManager, line: int, caller_state: State, - target: str) -> None: + target: str, reason: ModuleNotFoundReason) -> None: errors = manager.errors save_import_context = errors.import_context() errors.set_import_context(caller_state.import_context) errors.set_file(caller_state.xpath, caller_state.id) - stub_msg = "(Stub files are from https://github.com/python/typeshed)" if target == 'builtins': errors.report(line, 0, "Cannot find 'builtins' module. Typeshed appears broken!", blocker=True) errors.raise_error() - elif ((manager.options.python_version[0] == 2 and moduleinfo.is_py2_std_lib_module(target)) - or (manager.options.python_version[0] >= 3 - and moduleinfo.is_py3_std_lib_module(target))): - errors.report( - line, 0, "No library stub file for standard library module '{}'".format(target), - code=codes.IMPORT) - errors.report(line, 0, stub_msg, severity='note', only_once=True, code=codes.IMPORT) - elif moduleinfo.is_third_party_module(target): - errors.report(line, 0, "No library stub file for module '{}'".format(target), - code=codes.IMPORT) - errors.report(line, 0, stub_msg, severity='note', only_once=True, code=codes.IMPORT) + elif moduleinfo.is_std_lib_module(manager.options.python_version, target): + msg = "No library stub file for standard library module '{}'".format(target) + note = "(Stub files are from https://github.com/python/typeshed)" + errors.report(line, 0, msg, code=codes.IMPORT) + errors.report(line, 0, note, severity='note', only_once=True, code=codes.IMPORT) else: - note = "See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports" - errors.report( - line, - 0, - "Cannot find implementation or library stub for module named '{}'".format(target), - code=codes.IMPORT - ) + msg, note = reason.error_message_templates() + errors.report(line, 0, msg.format(target), code=codes.IMPORT) errors.report(line, 0, note, severity='note', only_once=True, code=codes.IMPORT) errors.set_import_context(save_import_context) diff --git a/mypy/modulefinder.py b/mypy/modulefinder.py index 8802c2a2eb5e..8b4a6f271545 100644 --- a/mypy/modulefinder.py +++ b/mypy/modulefinder.py @@ -9,8 +9,9 @@ import os import subprocess import sys +from enum import Enum -from typing import Dict, List, NamedTuple, Optional, Set, Tuple +from typing import Dict, List, NamedTuple, Optional, Set, Tuple, Union from typing_extensions import Final from mypy.defaults import PYTHON3_VERSION_MIN @@ -34,6 +35,37 @@ PYTHON_EXTENSIONS = ['.pyi', '.py'] # type: Final +# TODO: Consider adding more reasons here? +# E.g. if we deduce a module would likely be found if the user were +# to set the --namespace-packages flag. +class ModuleNotFoundReason(Enum): + # The module was not found: we found neither stubs nor a plausible code + # implementation (with or without a py.typed file). + NOT_FOUND = 0 + + # The implementation for this module plausibly exists (e.g. we + # found a matching folder or *.py file), but either the parent package + # did not contain a py.typed file or we were unable to find a + # corresponding *-stubs package. + FOUND_WITHOUT_TYPE_HINTS = 1 + + def error_message_templates(self) -> Tuple[str, str]: + if self is ModuleNotFoundReason.NOT_FOUND: + msg = "Cannot find implementation or library stub for module named '{}'" + note = "See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports" + elif self is ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS: + msg = "Skipping analyzing '{}': found module but no type hints or library stubs" + note = "See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports" + else: + assert False + return msg, note + + +# If we found the module, returns the path to the module as a str. +# Otherwise, returns the reason why the module wasn't found. +ModuleSearchResult = Union[str, ModuleNotFoundReason] + + class BuildSource: """A single source file.""" @@ -72,7 +104,7 @@ def __init__(self, # search_paths -> (toplevel_id -> list(package_dirs)) self.initial_components = {} # type: Dict[Tuple[str, ...], Dict[str, List[str]]] # Cache find_module: id -> result - self.results = {} # type: Dict[str, Optional[str]] + self.results = {} # type: Dict[str, ModuleSearchResult] self.ns_ancestors = {} # type: Dict[str, str] self.options = options self.ns_packages = ns_packages or [] # type: List[str] @@ -128,20 +160,27 @@ def get_toplevel_possibilities(self, lib_path: Tuple[str, ...], id: str) -> List self.initial_components[lib_path] = components return components.get(id, []) - def find_module(self, id: str) -> Optional[str]: - """Return the path of the module source file, or None if not found.""" + def find_module(self, id: str) -> ModuleSearchResult: + """Return the path of the module source file or why it wasn't found.""" if id not in self.results: self.results[id] = self._find_module(id) return self.results[id] def _find_module_non_stub_helper(self, components: List[str], - pkg_dir: str) -> Optional[OnePackageDir]: + pkg_dir: str) -> Union[OnePackageDir, ModuleNotFoundReason]: + plausible_match = False dir_path = pkg_dir for index, component in enumerate(components): dir_path = os.path.join(dir_path, component) if self.fscache.isfile(os.path.join(dir_path, 'py.typed')): return os.path.join(pkg_dir, *components[:-1]), index == 0 - return None + elif not plausible_match and (self.fscache.isdir(dir_path) + or self.fscache.isfile(dir_path + ".py")): + plausible_match = True + if plausible_match: + return ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS + else: + return ModuleNotFoundReason.NOT_FOUND def _update_ns_ancestors(self, components: List[str], match: Tuple[str, bool]) -> None: path, verify = match @@ -151,7 +190,7 @@ def _update_ns_ancestors(self, components: List[str], match: Tuple[str, bool]) - self.ns_ancestors[pkg_id] = path path = os.path.dirname(path) - def _find_module(self, id: str) -> Optional[str]: + def _find_module(self, id: str) -> ModuleSearchResult: fscache = self.fscache # If we're looking for a module like 'foo.bar.baz', it's likely that most of the @@ -166,6 +205,7 @@ def _find_module(self, id: str) -> Optional[str]: # put them in the front of the search path third_party_inline_dirs = [] # type: PackageDirs third_party_stubs_dirs = [] # type: PackageDirs + found_possible_third_party_missing_type_hints = False # Third-party stub/typed packages for pkg_dir in self.search_paths.package_path: stub_name = components[0] + '-stubs' @@ -193,13 +233,17 @@ def _find_module(self, id: str) -> Optional[str]: else: third_party_stubs_dirs.append((path, True)) non_stub_match = self._find_module_non_stub_helper(components, pkg_dir) - if non_stub_match: + if isinstance(non_stub_match, ModuleNotFoundReason): + if non_stub_match is ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS: + found_possible_third_party_missing_type_hints = True + else: third_party_inline_dirs.append(non_stub_match) self._update_ns_ancestors(components, non_stub_match) if self.options and self.options.use_builtins_fixtures: # Everything should be in fixtures. third_party_inline_dirs.clear() third_party_stubs_dirs.clear() + found_possible_third_party_missing_type_hints = False python_mypy_path = self.search_paths.mypy_path + self.search_paths.python_path candidate_base_dirs = self.find_lib_path_dirs(id, python_mypy_path) + \ third_party_stubs_dirs + third_party_inline_dirs + \ @@ -279,11 +323,18 @@ def _find_module(self, id: str) -> Optional[str]: # installed package with a py.typed marker that is a # subpackage of a namespace package. We only fess up to these # if we would otherwise return "not found". - return self.ns_ancestors.get(id) + ancestor = self.ns_ancestors.get(id) + if ancestor is not None: + return ancestor + + if found_possible_third_party_missing_type_hints: + return ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS + else: + return ModuleNotFoundReason.NOT_FOUND def find_modules_recursive(self, module: str) -> List[BuildSource]: module_path = self.find_module(module) - if not module_path: + if isinstance(module_path, ModuleNotFoundReason): return [] result = [BuildSource(module_path, module, None)] if module_path.endswith(('__init__.py', '__init__.pyi')): diff --git a/mypy/moduleinfo.py b/mypy/moduleinfo.py index a710037788e9..9cf45784ff04 100644 --- a/mypy/moduleinfo.py +++ b/mypy/moduleinfo.py @@ -12,221 +12,9 @@ no stub for a module. """ -from typing import Set +from typing import Set, Tuple from typing_extensions import Final -third_party_modules = { - # From https://hugovk.github.io/top-pypi-packages/ - 'pip', - 'urllib3', - 'six', - 'botocore', - 'dateutil', - 's3transfer', - 'yaml', - 'requests', - 'pyasn1', - 'docutils', - 'jmespath', - 'certifi', - 'rsa', - 'setuptools', - 'idna', - 'awscli', - 'concurrent.futures', - 'colorama', - 'chardet', - 'wheel', - 'simplejson', - 'boto3', - 'pytz', - 'numpy', - 'markupsafe', - 'jinja2', - 'cffi', - 'cryptography', - 'google.protobuf', - 'cwlogs', - 'enum', - 'pycparser', - 'asn1crypto', - 'attr', - 'click', - 'ipaddress', - 'pytest', - 'future', - 'decorator', - 'pbr', - 'google.api', - 'pandas', - 'werkzeug', - 'pyparsing', - 'flask', - 'psutil', - 'itsdangerous', - 'google.cloud', - 'grpc', - 'cachetools', - 'virtualenv', - 'google.auth', - 'py', - 'pluggy', - 'scipy', - 'boto', - 'coverage', - 'mock', - 'OpenSSL', - 'sklearn', - 'jsonschema', - 'argparse', - 'more_itertools', - 'pygments', - 'psycopg2', - 'websocket', - 'PIL', - 'googleapiclient', - 'httplib2', - 'matplotlib', - 'oauth2client', - 'docopt', - 'tornado', - 'funcsigs', - 'lxml', - 'prompt_toolkit', - 'paramiko', - 'jwt', - 'IPython', - 'docker', - 'dockerpycreds', - 'oauthlib', - 'mccabe', - 'google.resumable_media', - 'sqlalchemy', - 'wrapt', - 'bcrypt', - 'ptyprocess', - 'requests_oauthlib', - 'multidict', - 'markdown', - 'pexpect', - 'atomicwrites', - 'uritemplate', - 'nacl', - 'pycodestyle', - 'elasticsearch', - 'absl', - 'aiohttp', - 'redis', - 'sklearn', - 'gevent', - 'pymysql', - 'wcwidth', - 'tqdm', - 'bs4', - 'functools32', - 'configparser', - 'gunicorn', - 'typing', - 'ujson', - 'pyflakes', - 'packaging', - 'lazy_object_proxy', - 'ipython_genutils', - 'toolz', - 'async_timeout', - 'traitlets', - 'kiwisolver', - 'pathlib2', - 'greenlet', - 'networkx', - 'cv2', - 'termcolor', - 'babel', - 'django', - 'pymemcache', - 'skimage', - 'pickleshare', - 'flake8', - 'cycler', - 'requests_toolbelt', - 'bleach', - 'scandir', - 'selenium', - 'dask', - 'websockets', - 'isort', - 'h5py', - 'tabulate', - 'tensorflow', - 'html5lib', - 'pylint', - 'tensorboard', - 'compose', - 'astroid', - 'trueskill', - 'webencodings', - 'defusedxml', - 'pykube', - 'pymongo', - 'retrying', - 'cached_property', - 'zope', - 'singledispatch', - 'tzlocal', - 'datadog', - 'zmq', - 'discord', - 'apache_beam', - 'subprocess32', - 'astor', - 'entrypoints', - 'gast', - 'nose', - 'smmap', - 'gitdb', - 'isodate', - 'pywt', - 'simplegeneric', - 'sortedcontainers', - 'psycopg2', - 'pytest_cov', - 'hiredis', - 'elasticsearch_dsl', - 'dill', - 'keras', - 'contextlib2', - 'hdfs', - 'jupyter_core', - 'typed_ast', - 'croniter', - 'azure', - 'nbformat', - 'xmltodict', - 'lockfile', - 'arrow', - 'parso', - 'jsonpickle', - - # Skipped (name considered too generic): - # - fixtures - # - migrate (from sqlalchemy-migrate) - # - git (GitPython) - - # Other - 'formencode', - 'pkg_resources', - 'wx', - 'gi.repository', - 'pygtk', - 'gtk', - 'PyQt4', - 'PyQt5', - 'pylons', - - # for use in tests - '__dummy_third_party1', -} # type: Final - # Modules and packages common to Python 2.7 and 3.x. common_std_lib_modules = { 'abc', @@ -547,12 +335,14 @@ } # type: Final -def is_third_party_module(id: str) -> bool: - return is_in_module_collection(third_party_modules, id) - - -def is_py2_std_lib_module(id: str) -> bool: - return is_in_module_collection(python2_std_lib_modules, id) +def is_std_lib_module(python_version: Tuple[int, int], id: str) -> bool: + if python_version[0] == 2: + return is_in_module_collection(python2_std_lib_modules, id) + elif python_version[0] >= 3: + return is_in_module_collection(python3_std_lib_modules, id) + else: + # TODO: Raise an exception here? + return False def is_py3_std_lib_module(id: str) -> bool: diff --git a/mypy/stubgen.py b/mypy/stubgen.py index b86da770b4d3..75fa94e8e630 100755 --- a/mypy/stubgen.py +++ b/mypy/stubgen.py @@ -65,7 +65,9 @@ import mypy.mixedtraverser import mypy.util from mypy import defaults -from mypy.modulefinder import FindModuleCache, SearchPaths, BuildSource, default_lib_path +from mypy.modulefinder import ( + ModuleNotFoundReason, FindModuleCache, SearchPaths, BuildSource, default_lib_path +) from mypy.nodes import ( Expression, IntExpr, UnaryExpr, StrExpr, BytesExpr, NameExpr, FloatExpr, MemberExpr, TupleExpr, ListExpr, ComparisonExpr, CallExpr, IndexExpr, EllipsisExpr, @@ -1290,14 +1292,17 @@ def find_module_paths_using_search(modules: List[str], packages: List[str], search_paths = SearchPaths(('.',) + tuple(search_path), (), (), tuple(typeshed_path)) cache = FindModuleCache(search_paths) for module in modules: - module_path = cache.find_module(module) - if not module_path: - fail_missing(module) + m_result = cache.find_module(module) + if isinstance(m_result, ModuleNotFoundReason): + fail_missing(module, m_result) + module_path = None + else: + module_path = m_result result.append(StubSource(module, module_path)) for package in packages: p_result = cache.find_modules_recursive(package) - if not p_result: - fail_missing(package) + if p_result: + fail_missing(package, ModuleNotFoundReason.NOT_FOUND) sources = [StubSource(m.module, m.path) for m in p_result] result.extend(sources) diff --git a/mypy/stubtest.py b/mypy/stubtest.py index 4c7cc815c08f..8273367ec218 100644 --- a/mypy/stubtest.py +++ b/mypy/stubtest.py @@ -936,7 +936,7 @@ def build_stubs(modules: List[str], options: Options, find_submodules: bool = Fa all_modules.append(module) if not find_submodules: module_path = find_module_cache.find_module(module) - if module_path is None: + if not isinstance(module_path, str): # test_module will yield an error later when it can't find stubs continue sources.append(mypy.modulefinder.BuildSource(module_path, module, None)) diff --git a/mypy/stubutil.py b/mypy/stubutil.py index bf5de6d607e2..51f9ef6e39ff 100644 --- a/mypy/stubutil.py +++ b/mypy/stubutil.py @@ -11,6 +11,7 @@ from typing_extensions import overload from mypy.moduleinspect import ModuleInspect, InspectError +from mypy.modulefinder import ModuleNotFoundReason # Modules that may fail when imported, or that may have side effects (fully qualified). @@ -195,8 +196,14 @@ def report_missing(mod: str, message: Optional[str] = '', traceback: str = '') - print('note: Try --py2 for Python 2 mode') -def fail_missing(mod: str) -> None: - raise SystemExit("Can't find module '{}' (consider using --search-path)".format(mod)) +def fail_missing(mod: str, reason: ModuleNotFoundReason) -> None: + if reason is ModuleNotFoundReason.NOT_FOUND: + clarification = "(consider using --search-path)" + elif reason is ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS: + clarification = "(module likely exists, but is not PEP 561 compatible)" + else: + clarification = "(unknown reason '{}')".format(reason) + raise SystemExit("Can't find module '{}' {}".format(mod, clarification)) @overload diff --git a/mypy/test/testcheck.py b/mypy/test/testcheck.py index 2747d1c034d1..f969fb338c1b 100644 --- a/mypy/test/testcheck.py +++ b/mypy/test/testcheck.py @@ -335,7 +335,7 @@ def parse_module(self, cache = FindModuleCache(search_paths) for module_name in module_names.split(' '): path = cache.find_module(module_name) - assert path is not None, "Can't find ad hoc case file" + assert isinstance(path, str), "Can't find ad hoc case file" with open(path, encoding='utf8') as f: program_text = f.read() out.append((module_name, path, program_text)) diff --git a/mypy/test/testmodulefinder.py b/mypy/test/testmodulefinder.py index ab1a0d8e67f4..58fb95943af1 100644 --- a/mypy/test/testmodulefinder.py +++ b/mypy/test/testmodulefinder.py @@ -1,7 +1,7 @@ import os from mypy.options import Options -from mypy.modulefinder import FindModuleCache, SearchPaths +from mypy.modulefinder import FindModuleCache, SearchPaths, ModuleNotFoundReason from mypy.test.helpers import Suite, assert_equal from mypy.test.config import package_path @@ -38,14 +38,14 @@ def test__no_namespace_packages__nsx(self) -> None: If namespace_packages is False, we shouldn't find nsx """ found_module = self.fmc_nons.find_module("nsx") - self.assertIsNone(found_module) + assert_equal(ModuleNotFoundReason.NOT_FOUND, found_module) def test__no_namespace_packages__nsx_a(self) -> None: """ If namespace_packages is False, we shouldn't find nsx.a. """ found_module = self.fmc_nons.find_module("nsx.a") - self.assertIsNone(found_module) + assert_equal(ModuleNotFoundReason.NOT_FOUND, found_module) def test__no_namespace_packages__find_a_in_pkg1(self) -> None: """ @@ -133,4 +133,131 @@ def test__find_b_init_in_pkg2(self) -> None: def test__find_d_nowhere(self) -> None: found_module = self.fmc_ns.find_module("d") - self.assertIsNone(found_module) + assert_equal(ModuleNotFoundReason.NOT_FOUND, found_module) + + +class ModuleFinderSitePackagesSuite(Suite): + + def setUp(self) -> None: + self.package_dir = os.path.relpath(os.path.join( + package_path, + "modulefinder-site-packages", + )) + self.search_paths = SearchPaths( + python_path=(), + mypy_path=( + os.path.join(data_path, "pkg1"), + ), + package_path=( + self.package_dir, + ), + typeshed_path=(), + ) + options = Options() + options.namespace_packages = True + self.fmc_ns = FindModuleCache(self.search_paths, options=options) + + options = Options() + options.namespace_packages = False + self.fmc_nons = FindModuleCache(self.search_paths, options=options) + + def path(self, *parts: str) -> str: + return os.path.join(self.package_dir, *parts) + + def test__packages_with_ns(self) -> None: + cases = [ + # Namespace package with py.typed + ("ns_pkg_typed", self.path("ns_pkg_typed")), + ("ns_pkg_typed.a", self.path("ns_pkg_typed", "a.py")), + ("ns_pkg_typed.b", self.path("ns_pkg_typed", "b")), + ("ns_pkg_typed.b.c", self.path("ns_pkg_typed", "b", "c.py")), + ("ns_pkg_typed.a.a_var", ModuleNotFoundReason.NOT_FOUND), + + # Namespace package without py.typed + ("ns_pkg_untyped", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), + ("ns_pkg_untyped.a", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), + ("ns_pkg_untyped.b", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), + ("ns_pkg_untyped.b.c", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), + ("ns_pkg_untyped.a.a_var", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), + + # Regular package with py.typed + ("pkg_typed", self.path("pkg_typed", "__init__.py")), + ("pkg_typed.a", self.path("pkg_typed", "a.py")), + ("pkg_typed.b", self.path("pkg_typed", "b", "__init__.py")), + ("pkg_typed.b.c", self.path("pkg_typed", "b", "c.py")), + ("pkg_typed.a.a_var", ModuleNotFoundReason.NOT_FOUND), + + # Regular package without py.typed + ("pkg_untyped", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), + ("pkg_untyped.a", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), + ("pkg_untyped.b", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), + ("pkg_untyped.b.c", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), + ("pkg_untyped.a.a_var", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), + + # Top-level Python file in site-packages + ("standalone", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), + ("standalone.standalone_var", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), + + # Something that doesn't exist + ("does_not_exist", ModuleNotFoundReason.NOT_FOUND), + + # A regular package with an installed set of stubs + ("foo.bar", self.path("foo-stubs", "bar.pyi")), + + # A regular, non-site-packages module + ("a", os.path.join(data_path, "pkg1", "a.py")), + ] + for module, expected in cases: + template = "Find(" + module + ") got {}; expected {}" + + actual = self.fmc_ns.find_module(module) + assert_equal(actual, expected, template) + + def test__packages_without_ns(self) -> None: + cases = [ + # Namespace package with py.typed + ("ns_pkg_typed", ModuleNotFoundReason.NOT_FOUND), + ("ns_pkg_typed.a", ModuleNotFoundReason.NOT_FOUND), + ("ns_pkg_typed.b", ModuleNotFoundReason.NOT_FOUND), + ("ns_pkg_typed.b.c", ModuleNotFoundReason.NOT_FOUND), + ("ns_pkg_typed.a.a_var", ModuleNotFoundReason.NOT_FOUND), + + # Namespace package without py.typed + ("ns_pkg_untyped", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), + ("ns_pkg_untyped.a", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), + ("ns_pkg_untyped.b", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), + ("ns_pkg_untyped.b.c", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), + ("ns_pkg_untyped.a.a_var", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), + + # Regular package with py.typed + ("pkg_typed", self.path("pkg_typed", "__init__.py")), + ("pkg_typed.a", self.path("pkg_typed", "a.py")), + ("pkg_typed.b", self.path("pkg_typed", "b", "__init__.py")), + ("pkg_typed.b.c", self.path("pkg_typed", "b", "c.py")), + ("pkg_typed.a.a_var", ModuleNotFoundReason.NOT_FOUND), + + # Regular package without py.typed + ("pkg_untyped", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), + ("pkg_untyped.a", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), + ("pkg_untyped.b", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), + ("pkg_untyped.b.c", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), + ("pkg_untyped.a.a_var", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), + + # Top-level Python file in site-packages + ("standalone", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), + ("standalone.standalone_var", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), + + # Something that doesn't exist + ("does_not_exist", ModuleNotFoundReason.NOT_FOUND), + + # A regular package with an installed set of stubs + ("foo.bar", self.path("foo-stubs", "bar.pyi")), + + # A regular, non-site-packages module + ("a", os.path.join(data_path, "pkg1", "a.py")), + ] + for module, expected in cases: + template = "Find(" + module + ") got {}; expected {}" + + actual = self.fmc_nons.find_module(module) + assert_equal(actual, expected, template) diff --git a/test-data/packages/modulefinder-site-packages/foo-stubs/__init__.pyi b/test-data/packages/modulefinder-site-packages/foo-stubs/__init__.pyi new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test-data/packages/modulefinder-site-packages/foo-stubs/bar.pyi b/test-data/packages/modulefinder-site-packages/foo-stubs/bar.pyi new file mode 100644 index 000000000000..bf896e8cdfa3 --- /dev/null +++ b/test-data/packages/modulefinder-site-packages/foo-stubs/bar.pyi @@ -0,0 +1 @@ +bar_var: str \ No newline at end of file diff --git a/test-data/packages/modulefinder-site-packages/foo/__init__.py b/test-data/packages/modulefinder-site-packages/foo/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test-data/packages/modulefinder-site-packages/foo/bar.py b/test-data/packages/modulefinder-site-packages/foo/bar.py new file mode 100644 index 000000000000..a1c3b50eeeab --- /dev/null +++ b/test-data/packages/modulefinder-site-packages/foo/bar.py @@ -0,0 +1 @@ +bar_var = "bar" \ No newline at end of file diff --git a/test-data/packages/modulefinder-site-packages/ns_pkg_typed/a.py b/test-data/packages/modulefinder-site-packages/ns_pkg_typed/a.py new file mode 100644 index 000000000000..9d71311c4d82 --- /dev/null +++ b/test-data/packages/modulefinder-site-packages/ns_pkg_typed/a.py @@ -0,0 +1 @@ +a_var = "a" \ No newline at end of file diff --git a/test-data/packages/modulefinder-site-packages/ns_pkg_typed/b/c.py b/test-data/packages/modulefinder-site-packages/ns_pkg_typed/b/c.py new file mode 100644 index 000000000000..003a29a2ef67 --- /dev/null +++ b/test-data/packages/modulefinder-site-packages/ns_pkg_typed/b/c.py @@ -0,0 +1 @@ +c_var = "c" \ No newline at end of file diff --git a/test-data/packages/modulefinder-site-packages/ns_pkg_typed/py.typed b/test-data/packages/modulefinder-site-packages/ns_pkg_typed/py.typed new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test-data/packages/modulefinder-site-packages/ns_pkg_untyped/a.py b/test-data/packages/modulefinder-site-packages/ns_pkg_untyped/a.py new file mode 100644 index 000000000000..9d71311c4d82 --- /dev/null +++ b/test-data/packages/modulefinder-site-packages/ns_pkg_untyped/a.py @@ -0,0 +1 @@ +a_var = "a" \ No newline at end of file diff --git a/test-data/packages/modulefinder-site-packages/ns_pkg_untyped/b/c.py b/test-data/packages/modulefinder-site-packages/ns_pkg_untyped/b/c.py new file mode 100644 index 000000000000..003a29a2ef67 --- /dev/null +++ b/test-data/packages/modulefinder-site-packages/ns_pkg_untyped/b/c.py @@ -0,0 +1 @@ +c_var = "c" \ No newline at end of file diff --git a/test-data/packages/modulefinder-site-packages/pkg_typed/__init__.py b/test-data/packages/modulefinder-site-packages/pkg_typed/__init__.py new file mode 100644 index 000000000000..88ed99fb525e --- /dev/null +++ b/test-data/packages/modulefinder-site-packages/pkg_typed/__init__.py @@ -0,0 +1 @@ +pkg_typed_var = "pkg_typed" \ No newline at end of file diff --git a/test-data/packages/modulefinder-site-packages/pkg_typed/a.py b/test-data/packages/modulefinder-site-packages/pkg_typed/a.py new file mode 100644 index 000000000000..9d71311c4d82 --- /dev/null +++ b/test-data/packages/modulefinder-site-packages/pkg_typed/a.py @@ -0,0 +1 @@ +a_var = "a" \ No newline at end of file diff --git a/test-data/packages/modulefinder-site-packages/pkg_typed/b/__init__.py b/test-data/packages/modulefinder-site-packages/pkg_typed/b/__init__.py new file mode 100644 index 000000000000..de0052886c57 --- /dev/null +++ b/test-data/packages/modulefinder-site-packages/pkg_typed/b/__init__.py @@ -0,0 +1 @@ +b_var = "b" \ No newline at end of file diff --git a/test-data/packages/modulefinder-site-packages/pkg_typed/b/c.py b/test-data/packages/modulefinder-site-packages/pkg_typed/b/c.py new file mode 100644 index 000000000000..003a29a2ef67 --- /dev/null +++ b/test-data/packages/modulefinder-site-packages/pkg_typed/b/c.py @@ -0,0 +1 @@ +c_var = "c" \ No newline at end of file diff --git a/test-data/packages/modulefinder-site-packages/pkg_typed/py.typed b/test-data/packages/modulefinder-site-packages/pkg_typed/py.typed new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test-data/packages/modulefinder-site-packages/pkg_untyped/__init__.py b/test-data/packages/modulefinder-site-packages/pkg_untyped/__init__.py new file mode 100644 index 000000000000..c7ff39c11179 --- /dev/null +++ b/test-data/packages/modulefinder-site-packages/pkg_untyped/__init__.py @@ -0,0 +1 @@ +pkg_untyped_var = "pkg_untyped" \ No newline at end of file diff --git a/test-data/packages/modulefinder-site-packages/pkg_untyped/a.py b/test-data/packages/modulefinder-site-packages/pkg_untyped/a.py new file mode 100644 index 000000000000..9d71311c4d82 --- /dev/null +++ b/test-data/packages/modulefinder-site-packages/pkg_untyped/a.py @@ -0,0 +1 @@ +a_var = "a" \ No newline at end of file diff --git a/test-data/packages/modulefinder-site-packages/pkg_untyped/b/__init__.py b/test-data/packages/modulefinder-site-packages/pkg_untyped/b/__init__.py new file mode 100644 index 000000000000..de0052886c57 --- /dev/null +++ b/test-data/packages/modulefinder-site-packages/pkg_untyped/b/__init__.py @@ -0,0 +1 @@ +b_var = "b" \ No newline at end of file diff --git a/test-data/packages/modulefinder-site-packages/pkg_untyped/b/c.py b/test-data/packages/modulefinder-site-packages/pkg_untyped/b/c.py new file mode 100644 index 000000000000..003a29a2ef67 --- /dev/null +++ b/test-data/packages/modulefinder-site-packages/pkg_untyped/b/c.py @@ -0,0 +1 @@ +c_var = "c" \ No newline at end of file diff --git a/test-data/packages/modulefinder-site-packages/standalone.py b/test-data/packages/modulefinder-site-packages/standalone.py new file mode 100644 index 000000000000..35b38168f25e --- /dev/null +++ b/test-data/packages/modulefinder-site-packages/standalone.py @@ -0,0 +1 @@ +standalone_var = "standalone" \ No newline at end of file diff --git a/test-data/unit/check-errorcodes.test b/test-data/unit/check-errorcodes.test index c0e3a9782b5e..4104d1d2f222 100644 --- a/test-data/unit/check-errorcodes.test +++ b/test-data/unit/check-errorcodes.test @@ -497,10 +497,9 @@ if int() is str(): # E: Non-overlapping identity check (left operand type: "int [builtins fixtures/primitives.pyi] [case testErrorCodeMissingModule] -from defusedxml import xyz # E: No library stub file for module 'defusedxml' [import] \ - # N: (Stub files are from https://github.com/python/typeshed) -from nonexistent import foobar # E: Cannot find implementation or library stub for module named 'nonexistent' [import] \ - # N: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports +from defusedxml import xyz # E: Cannot find implementation or library stub for module named 'defusedxml' [import] \ + # N: See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports +from nonexistent import foobar # E: Cannot find implementation or library stub for module named 'nonexistent' [import] import nonexistent2 # E: Cannot find implementation or library stub for module named 'nonexistent2' [import] from nonexistent3 import * # E: Cannot find implementation or library stub for module named 'nonexistent3' [import] from pkg import bad # E: Module 'pkg' has no attribute 'bad' [attr-defined] diff --git a/test-data/unit/semanal-errors.test b/test-data/unit/semanal-errors.test index f92a1a5e338f..afd39122f99e 100644 --- a/test-data/unit/semanal-errors.test +++ b/test-data/unit/semanal-errors.test @@ -1294,12 +1294,6 @@ x = 1 y = 1 [out] -[case testMissingStubForThirdPartyModule] -import __dummy_third_party1 -[out] -main:1: error: No library stub file for module '__dummy_third_party1' -main:1: note: (Stub files are from https://github.com/python/typeshed) - [case testMissingStubForStdLibModule] import __dummy_stdlib1 [out] From 09cdab4ffd34f89cc02f512fc35fdef57cb4c267 Mon Sep 17 00:00:00 2001 From: Shantanu Date: Tue, 25 Feb 2020 09:39:16 -0800 Subject: [PATCH 111/117] mypy: remove has_member (#8438) In particular: - The test case mentioned in the code passes without it - The test case changed seems to have more desirable behaviour now, consider: ``` from typing import Any """ class C: def __radd__(self, other) -> float: return 1.234 """ C: Any class D(C): pass reveal_type("str" + D()) ``` --- mypy/checkexpr.py | 46 ------------------------------- mypy/typeops.py | 2 +- test-data/unit/check-classes.test | 2 +- 3 files changed, 2 insertions(+), 48 deletions(-) diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index 310aac82b13c..0141271eccba 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -2410,15 +2410,6 @@ def lookup_operator(op_name: str, base_type: Type) -> Optional[Type]: """Looks up the given operator and returns the corresponding type, if it exists.""" local_errors = make_local_errors() - - # TODO: Remove this call and rely just on analyze_member_access - # Currently, it seems we still need this to correctly deal with - # things like metaclasses? - # - # E.g. see the pythoneval.testMetaclassOpAccessAny test case. - if not self.has_member(base_type, op_name): - return None - member = analyze_member_access( name=op_name, typ=base_type, @@ -3799,43 +3790,6 @@ def is_valid_keyword_var_arg(self, typ: Type) -> bool: [self.named_type('builtins.unicode'), AnyType(TypeOfAny.special_form)]))) - def has_member(self, typ: Type, member: str) -> bool: - """Does type have member with the given name?""" - # TODO: refactor this to use checkmember.analyze_member_access, otherwise - # these two should be carefully kept in sync. - typ = get_proper_type(typ) - - if isinstance(typ, TypeVarType): - typ = get_proper_type(typ.upper_bound) - if isinstance(typ, TupleType): - typ = tuple_fallback(typ) - if isinstance(typ, LiteralType): - typ = typ.fallback - if isinstance(typ, Instance): - return typ.type.has_readable_member(member) - if isinstance(typ, CallableType) and typ.is_type_obj(): - return typ.fallback.type.has_readable_member(member) - elif isinstance(typ, AnyType): - return True - elif isinstance(typ, UnionType): - result = all(self.has_member(x, member) for x in typ.relevant_items()) - return result - elif isinstance(typ, TypeType): - # Type[Union[X, ...]] is always normalized to Union[Type[X], ...], - # so we don't need to care about unions here. - item = typ.item - if isinstance(item, TypeVarType): - item = get_proper_type(item.upper_bound) - if isinstance(item, TupleType): - item = tuple_fallback(item) - if isinstance(item, Instance) and item.type.metaclass_type is not None: - return self.has_member(item.type.metaclass_type, member) - if isinstance(item, AnyType): - return True - return False - else: - return False - def not_ready_callback(self, name: str, context: Context) -> None: """Called when we can't infer the type of a variable because it's not ready yet. diff --git a/mypy/typeops.py b/mypy/typeops.py index 828791333f36..d143588aada3 100644 --- a/mypy/typeops.py +++ b/mypy/typeops.py @@ -719,5 +719,5 @@ def custom_special_method(typ: Type, name: str, check_all: bool = False) -> bool if isinstance(typ, AnyType): # Avoid false positives in uncertain cases. return True - # TODO: support other types (see ExpressionChecker.has_member())? + # TODO: support other types (see analyze_member_access)? return False diff --git a/test-data/unit/check-classes.test b/test-data/unit/check-classes.test index ed547510b46c..e924402c8614 100644 --- a/test-data/unit/check-classes.test +++ b/test-data/unit/check-classes.test @@ -6650,7 +6650,7 @@ reveal_type(0.5 + C) # N: Revealed type is 'Any' reveal_type(0.5 + D()) # N: Revealed type is 'Any' reveal_type(D() + 0.5) # N: Revealed type is 'Any' -reveal_type("str" + D()) # N: Revealed type is 'builtins.str' +reveal_type("str" + D()) # N: Revealed type is 'Any' reveal_type(D() + "str") # N: Revealed type is 'Any' From d1281583240d9cc4536901d02ce1288318aea812 Mon Sep 17 00:00:00 2001 From: Ivan Levkivskyi Date: Thu, 27 Feb 2020 08:44:09 +0000 Subject: [PATCH 112/117] Sync typeshed (#8448) --- mypy/typeshed | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mypy/typeshed b/mypy/typeshed index 47409f3e254a..ca1ca0c14f65 160000 --- a/mypy/typeshed +++ b/mypy/typeshed @@ -1 +1 @@ -Subproject commit 47409f3e254afc69bbff902fe490f00e44400fd0 +Subproject commit ca1ca0c14f65f13f778f1b8e91eade7a6c84498d From ef0b0dff8017ed69f31c6095a4897940aec3261a Mon Sep 17 00:00:00 2001 From: Mukuntha N S Date: Thu, 27 Feb 2020 22:19:21 +0000 Subject: [PATCH 113/117] Use fully qualified names for ambiguous class names resembling builtins. (#8425) --- mypy/messages.py | 18 ++++++++++++++++++ mypy/semanal.py | 19 +++---------------- test-data/unit/check-basic.test | 20 ++++++++++++++++++++ 3 files changed, 41 insertions(+), 16 deletions(-) diff --git a/mypy/messages.py b/mypy/messages.py index 14e1b146a82b..2e0d0be35dae 100644 --- a/mypy/messages.py +++ b/mypy/messages.py @@ -42,6 +42,21 @@ from mypy.errorcodes import ErrorCode from mypy import message_registry, errorcodes as codes +TYPES_FOR_UNIMPORTED_HINTS = { + 'typing.Any', + 'typing.Callable', + 'typing.Dict', + 'typing.Iterable', + 'typing.Iterator', + 'typing.List', + 'typing.Optional', + 'typing.Set', + 'typing.Tuple', + 'typing.TypeVar', + 'typing.Union', + 'typing.cast', +} # type: Final + ARG_CONSTRUCTOR_NAMES = { ARG_POS: "Arg", @@ -1720,6 +1735,9 @@ def find_type_overlaps(*types: Type) -> Set[str]: for type in types: for inst in collect_all_instances(type): d.setdefault(inst.type.name, set()).add(inst.type.fullname) + for shortname in d.keys(): + if 'typing.{}'.format(shortname) in TYPES_FOR_UNIMPORTED_HINTS: + d[shortname].add('typing.{}'.format(shortname)) overlaps = set() # type: Set[str] for fullnames in d.values(): diff --git a/mypy/semanal.py b/mypy/semanal.py index be455a737202..86edbea12a5e 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -81,7 +81,9 @@ from mypy.typevars import fill_typevars from mypy.visitor import NodeVisitor from mypy.errors import Errors, report_internal_error -from mypy.messages import best_matches, MessageBuilder, pretty_seq, SUGGESTED_TEST_FIXTURES +from mypy.messages import ( + best_matches, MessageBuilder, pretty_seq, SUGGESTED_TEST_FIXTURES, TYPES_FOR_UNIMPORTED_HINTS +) from mypy.errorcodes import ErrorCode from mypy import message_registry, errorcodes as codes from mypy.types import ( @@ -120,21 +122,6 @@ T = TypeVar('T') -TYPES_FOR_UNIMPORTED_HINTS = { - 'typing.Any', - 'typing.Callable', - 'typing.Dict', - 'typing.Iterable', - 'typing.Iterator', - 'typing.List', - 'typing.Optional', - 'typing.Set', - 'typing.Tuple', - 'typing.TypeVar', - 'typing.Union', - 'typing.cast', -} # type: Final - # Special cased built-in classes that are needed for basic functionality and need to be # available very early on. diff --git a/test-data/unit/check-basic.test b/test-data/unit/check-basic.test index 4939c2d5be93..db605cf185e5 100644 --- a/test-data/unit/check-basic.test +++ b/test-data/unit/check-basic.test @@ -75,6 +75,26 @@ if int(): x = 1 +[case testIncompatibleAssignmentAmbiguousShortnames] + +class Any: pass +class List: pass +class Dict: pass +class Iterator: pass + +x = Any() +x = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "__main__.Any") + +y = List() +y = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "__main__.List") + +z = Dict() +z = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "__main__.Dict") + +w = Iterator() +w = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "__main__.Iterator") + + -- Simple functions and calling -- ---------------------------- From 30c46ab48fa224fa0b19f9a28b08ef5b12ba3a70 Mon Sep 17 00:00:00 2001 From: "Michael J. Sullivan" Date: Fri, 28 Feb 2020 10:21:20 -0800 Subject: [PATCH 114/117] Properly track module_hidden and module_public for incomplete symbols (#8450) This fixes some crash bugs involving import * from an import cycle. --- mypy/fixup.py | 8 +++---- mypy/nodes.py | 1 + mypy/semanal.py | 32 ++++++++++++++++++--------- test-data/unit/check-incremental.test | 23 +++++++++++++++++++ test-data/unit/fine-grained.test | 22 ++++++++++++++++++ 5 files changed, 71 insertions(+), 15 deletions(-) diff --git a/mypy/fixup.py b/mypy/fixup.py index e3555b9ba7f3..023df1e31331 100644 --- a/mypy/fixup.py +++ b/mypy/fixup.py @@ -22,7 +22,7 @@ def fixup_module(tree: MypyFile, modules: Dict[str, MypyFile], allow_missing: bool) -> None: node_fixer = NodeFixer(modules, allow_missing) - node_fixer.visit_symbol_table(tree.names) + node_fixer.visit_symbol_table(tree.names, tree.fullname) # TODO: Fix up .info when deserializing, i.e. much earlier. @@ -42,7 +42,7 @@ def visit_type_info(self, info: TypeInfo) -> None: if info.defn: info.defn.accept(self) if info.names: - self.visit_symbol_table(info.names) + self.visit_symbol_table(info.names, info.fullname) if info.bases: for base in info.bases: base.accept(self.type_fixer) @@ -64,7 +64,7 @@ def visit_type_info(self, info: TypeInfo) -> None: self.current_info = save_info # NOTE: This method *definitely* isn't part of the NodeVisitor API. - def visit_symbol_table(self, symtab: SymbolTable) -> None: + def visit_symbol_table(self, symtab: SymbolTable, table_fullname: str) -> None: # Copy the items because we may mutate symtab. for key, value in list(symtab.items()): cross_ref = value.cross_ref @@ -76,7 +76,7 @@ def visit_symbol_table(self, symtab: SymbolTable) -> None: stnode = lookup_qualified_stnode(self.modules, cross_ref, self.allow_missing) if stnode is not None: - assert stnode.node is not None + assert stnode.node is not None, (table_fullname + "." + key, cross_ref) value.node = stnode.node elif not self.allow_missing: assert False, "Could not find cross-ref %s" % (cross_ref,) diff --git a/mypy/nodes.py b/mypy/nodes.py index e24a8887dd01..dd3d0f390340 100644 --- a/mypy/nodes.py +++ b/mypy/nodes.py @@ -3030,6 +3030,7 @@ def serialize(self, prefix: str, name: str) -> JsonDict: and fullname != prefix + '.' + name and not (isinstance(self.node, Var) and self.node.from_module_getattr)): + assert not isinstance(self.node, PlaceholderNode) data['cross_ref'] = fullname return data data['node'] = self.node.serialize() diff --git a/mypy/semanal.py b/mypy/semanal.py index 86edbea12a5e..81f03858af6a 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -1742,13 +1742,25 @@ def process_imported_symbol(self, fullname: str, context: ImportBase) -> None: imported_id = as_id or id + # 'from m import x as x' exports x in a stub file or when implicit + # re-exports are disabled. + module_public = ( + not self.is_stub_file + and self.options.implicit_reexport + or as_id is not None + ) + module_hidden = not module_public and fullname not in self.modules + if isinstance(node.node, PlaceholderNode): if self.final_iteration: self.report_missing_module_attribute(module_id, id, imported_id, context) return else: # This might become a type. - self.mark_incomplete(imported_id, node.node, becomes_typeinfo=True) + self.mark_incomplete(imported_id, node.node, + module_public=module_public, + module_hidden=module_hidden, + becomes_typeinfo=True) existing_symbol = self.globals.get(imported_id) if (existing_symbol and not isinstance(existing_symbol.node, PlaceholderNode) and not isinstance(node.node, PlaceholderNode)): @@ -1760,14 +1772,6 @@ def process_imported_symbol(self, # Imports are special, some redefinitions are allowed, so wait until # we know what is the new symbol node. return - # 'from m import x as x' exports x in a stub file or when implicit - # re-exports are disabled. - module_public = ( - not self.is_stub_file - and self.options.implicit_reexport - or as_id is not None - ) - module_hidden = not module_public and fullname not in self.modules # NOTE: we take the original node even for final `Var`s. This is to support # a common pattern when constants are re-exported (same applies to import *). self.add_imported_symbol(imported_id, node, context, @@ -1866,6 +1870,7 @@ def visit_import_all(self, i: ImportAll) -> None: self.add_imported_symbol(name, node, i, module_public=module_public, module_hidden=not module_public) + else: # Don't add any dummy symbols for 'from x import *' if 'x' is unknown. pass @@ -4338,6 +4343,7 @@ def add_imported_symbol(self, module_public: bool = True, module_hidden: bool = False) -> None: """Add an alias to an existing symbol through import.""" + assert not module_hidden or not module_public symbol = SymbolTableNode(node.kind, node.node, module_public=module_public, module_hidden=module_hidden) @@ -4421,7 +4427,9 @@ def record_incomplete_ref(self) -> None: self.num_incomplete_refs += 1 def mark_incomplete(self, name: str, node: Node, - becomes_typeinfo: bool = False) -> None: + becomes_typeinfo: bool = False, + module_public: bool = True, + module_hidden: bool = False) -> None: """Mark a definition as incomplete (and defer current analysis target). Also potentially mark the current namespace as incomplete. @@ -4440,7 +4448,9 @@ def mark_incomplete(self, name: str, node: Node, assert self.statement placeholder = PlaceholderNode(fullname, node, self.statement.line, becomes_typeinfo=becomes_typeinfo) - self.add_symbol(name, placeholder, context=dummy_context()) + self.add_symbol(name, placeholder, + module_public=module_public, module_hidden=module_hidden, + context=dummy_context()) self.missing_names.add(name) def is_incomplete_namespace(self, fullname: str) -> bool: diff --git a/test-data/unit/check-incremental.test b/test-data/unit/check-incremental.test index aac3d37a0716..6d03759dec29 100644 --- a/test-data/unit/check-incremental.test +++ b/test-data/unit/check-incremental.test @@ -5361,3 +5361,26 @@ reveal_type(z) tmp/c.py:2: note: Revealed type is 'a.A' [out2] tmp/c.py:2: note: Revealed type is 'a.' + +[case testStubFixupIssues] +import a +[file a.py] +import p +[file a.py.2] +import p +p.N + +[file p/__init__.pyi] +from p.util import * + +[file p/util.pyi] +from p.params import N +class Test: ... +x: N + +[file p/params.pyi] +import p.util +class N(p.util.Test): + ... +[out2] +tmp/a.py:2: error: "object" has no attribute "N" diff --git a/test-data/unit/fine-grained.test b/test-data/unit/fine-grained.test index ad8357f3d4e9..9d27be50328c 100644 --- a/test-data/unit/fine-grained.test +++ b/test-data/unit/fine-grained.test @@ -9572,3 +9572,25 @@ c.py:2: note: Revealed type is 'a.A' == c.py:2: note: Revealed type is 'a.' +[case testStubFixupIssues] +[file a.py] +import p +[file a.py.2] +import p +# a change + +[file p/__init__.pyi] +from p.util import * + +[file p/util.pyi] +from p.params import N +class Test: ... + +[file p/params.pyi] +import p.util +class N(p.util.Test): + ... + +[builtins fixtures/list.pyi] +[out] +== From dc0d35f043c9d1405e1aea51ca293109ae2f1eef Mon Sep 17 00:00:00 2001 From: "Michael J. Sullivan" Date: Fri, 6 Mar 2020 01:45:06 -0800 Subject: [PATCH 115/117] Revert "mypy: remove has_member (#8438)" (#8500) It turns out that the has_member check is an important (accidental?) performance optimization. Removing this caused a major (30+%?) slowdown at dropbox. There might be a better way to optimize this but I'm just going to revert it for now at least. This reverts commit 09cdab4ffd34f89cc02f512fc35fdef57cb4c267. --- mypy/checkexpr.py | 48 +++++++++++++++++++++++++++++++ mypy/typeops.py | 2 +- test-data/unit/check-classes.test | 2 +- 3 files changed, 50 insertions(+), 2 deletions(-) diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index 0141271eccba..0118080c1c00 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -2409,7 +2409,16 @@ def make_local_errors() -> MessageBuilder: def lookup_operator(op_name: str, base_type: Type) -> Optional[Type]: """Looks up the given operator and returns the corresponding type, if it exists.""" + + # This check is an important performance optimization, + # even though it is mostly a subset of + # analyze_member_access. + # TODO: Find a way to remove this call without performance implications. + if not self.has_member(base_type, op_name): + return None + local_errors = make_local_errors() + member = analyze_member_access( name=op_name, typ=base_type, @@ -3790,6 +3799,45 @@ def is_valid_keyword_var_arg(self, typ: Type) -> bool: [self.named_type('builtins.unicode'), AnyType(TypeOfAny.special_form)]))) + def has_member(self, typ: Type, member: str) -> bool: + """Does type have member with the given name?""" + # TODO: refactor this to use checkmember.analyze_member_access, otherwise + # these two should be carefully kept in sync. + # This is much faster than analyze_member_access, though, and so using + # it first as a filter is important for performance. + typ = get_proper_type(typ) + + if isinstance(typ, TypeVarType): + typ = get_proper_type(typ.upper_bound) + if isinstance(typ, TupleType): + typ = tuple_fallback(typ) + if isinstance(typ, LiteralType): + typ = typ.fallback + if isinstance(typ, Instance): + return typ.type.has_readable_member(member) + if isinstance(typ, CallableType) and typ.is_type_obj(): + return typ.fallback.type.has_readable_member(member) + elif isinstance(typ, AnyType): + return True + elif isinstance(typ, UnionType): + result = all(self.has_member(x, member) for x in typ.relevant_items()) + return result + elif isinstance(typ, TypeType): + # Type[Union[X, ...]] is always normalized to Union[Type[X], ...], + # so we don't need to care about unions here. + item = typ.item + if isinstance(item, TypeVarType): + item = get_proper_type(item.upper_bound) + if isinstance(item, TupleType): + item = tuple_fallback(item) + if isinstance(item, Instance) and item.type.metaclass_type is not None: + return self.has_member(item.type.metaclass_type, member) + if isinstance(item, AnyType): + return True + return False + else: + return False + def not_ready_callback(self, name: str, context: Context) -> None: """Called when we can't infer the type of a variable because it's not ready yet. diff --git a/mypy/typeops.py b/mypy/typeops.py index d143588aada3..828791333f36 100644 --- a/mypy/typeops.py +++ b/mypy/typeops.py @@ -719,5 +719,5 @@ def custom_special_method(typ: Type, name: str, check_all: bool = False) -> bool if isinstance(typ, AnyType): # Avoid false positives in uncertain cases. return True - # TODO: support other types (see analyze_member_access)? + # TODO: support other types (see ExpressionChecker.has_member())? return False diff --git a/test-data/unit/check-classes.test b/test-data/unit/check-classes.test index e924402c8614..ed547510b46c 100644 --- a/test-data/unit/check-classes.test +++ b/test-data/unit/check-classes.test @@ -6650,7 +6650,7 @@ reveal_type(0.5 + C) # N: Revealed type is 'Any' reveal_type(0.5 + D()) # N: Revealed type is 'Any' reveal_type(D() + 0.5) # N: Revealed type is 'Any' -reveal_type("str" + D()) # N: Revealed type is 'Any' +reveal_type("str" + D()) # N: Revealed type is 'builtins.str' reveal_type(D() + "str") # N: Revealed type is 'Any' From c4d85549b18faed622f0d0d776ca7d8a9fa343a3 Mon Sep 17 00:00:00 2001 From: "Michael J. Sullivan" Date: Fri, 28 Feb 2020 15:42:30 -0800 Subject: [PATCH 116/117] Support narrowing of walrus in most cases (#8458) It is a pretty simple matter of pulling out the assignment target from the walrus. We don't bother handling things like `x := (y := z)` since I can't imagine they are common enough to be worth bothering but we could in the future if anyone cares. Fixes #8447. --- mypy/checker.py | 19 +++++++++++++++---- test-data/unit/check-python38.test | 19 ++++++++++++++++--- 2 files changed, 31 insertions(+), 7 deletions(-) diff --git a/mypy/checker.py b/mypy/checker.py index f4f466bb6ba8..5ec156d8aacd 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -3924,10 +3924,12 @@ def find_isinstance_check_helper(self, node: Expression) -> Tuple[TypeMap, TypeM elif is_false_literal(node): return None, {} elif isinstance(node, CallExpr): + if len(node.args) == 0: + return {}, {} + expr = collapse_walrus(node.args[0]) if refers_to_fullname(node.callee, 'builtins.isinstance'): if len(node.args) != 2: # the error will be reported elsewhere return {}, {} - expr = node.args[0] if literal(expr) == LITERAL_TYPE: return self.conditional_type_map_with_intersection( expr, @@ -3937,13 +3939,11 @@ def find_isinstance_check_helper(self, node: Expression) -> Tuple[TypeMap, TypeM elif refers_to_fullname(node.callee, 'builtins.issubclass'): if len(node.args) != 2: # the error will be reported elsewhere return {}, {} - expr = node.args[0] if literal(expr) == LITERAL_TYPE: return self.infer_issubclass_maps(node, expr, type_map) elif refers_to_fullname(node.callee, 'builtins.callable'): if len(node.args) != 1: # the error will be reported elsewhere return {}, {} - expr = node.args[0] if literal(expr) == LITERAL_TYPE: vartype = type_map[expr] return self.conditional_callable_type_map(expr, vartype) @@ -3952,7 +3952,7 @@ def find_isinstance_check_helper(self, node: Expression) -> Tuple[TypeMap, TypeM # narrow their types. (For example, we shouldn't try narrowing the # types of literal string or enum expressions). - operands = node.operands + operands = [collapse_walrus(x) for x in node.operands] operand_types = [] narrowable_operand_index_to_hash = {} for i, expr in enumerate(operands): @@ -5742,3 +5742,14 @@ def has_bool_item(typ: ProperType) -> bool: return any(is_named_instance(item, 'builtins.bool') for item in typ.items) return False + + +def collapse_walrus(e: Expression) -> Expression: + """If an expression is an AssignmentExpr, pull out the assignment target. + + We don't make any attempt to pull out all the targets in code like `x := (y := z)`. + We could support narrowing those if that sort of code turns out to be common. + """ + if isinstance(e, AssignmentExpr): + return e.target + return e diff --git a/test-data/unit/check-python38.test b/test-data/unit/check-python38.test index 98eda306c731..12a060525820 100644 --- a/test-data/unit/check-python38.test +++ b/test-data/unit/check-python38.test @@ -189,7 +189,7 @@ def f(p1: bytes, p2: float, /) -> None: [case testWalrus] # flags: --strict-optional -from typing import NamedTuple, Optional +from typing import NamedTuple, Optional, List from typing_extensions import Final if a := 2: @@ -288,10 +288,23 @@ def check_partial() -> None: reveal_type(x) # N: Revealed type is 'Union[builtins.int, None]' -def check_narrow(x: Optional[int]) -> None: +def check_narrow(x: Optional[int], s: List[int]) -> None: if (y := x): reveal_type(y) # N: Revealed type is 'builtins.int' -[builtins fixtures/f_string.pyi] + + if (y := x) is not None: + reveal_type(y) # N: Revealed type is 'builtins.int' + + if (y := x) == 10: + reveal_type(y) # N: Revealed type is 'builtins.int' + + if (y := x) in s: + reveal_type(y) # N: Revealed type is 'builtins.int' + + if isinstance((y := x), int): + reveal_type(y) # N: Revealed type is 'builtins.int' + +[builtins fixtures/isinstancelist.pyi] [case testWalrusPartialTypes] from typing import List From 92e3f396b03c69c1d8abc249632bfd2db02350d0 Mon Sep 17 00:00:00 2001 From: "Michael J. Sullivan" Date: Mon, 9 Mar 2020 12:00:31 -0700 Subject: [PATCH 117/117] Bump version to 0.770. --- mypy/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mypy/version.py b/mypy/version.py index 81a8cfca378b..a75857d23827 100644 --- a/mypy/version.py +++ b/mypy/version.py @@ -5,7 +5,7 @@ # - Release versions have the form "0.NNN". # - Dev versions have the form "0.NNN+dev" (PLUS sign to conform to PEP 440). # - For 1.0 we'll switch back to 1.2.3 form. -__version__ = '0.770+dev' +__version__ = '0.770' base_version = __version__ mypy_dir = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))