From 2bc24f3c6fc5283b33574d953253b2a30b5de0d5 Mon Sep 17 00:00:00 2001 From: quartox Date: Tue, 23 May 2017 17:13:55 -0700 Subject: [PATCH 01/27] New message for anystr errors --- mypy/applytype.py | 4 ++-- mypy/messages.py | 13 +++++++++++++ 2 files changed, 15 insertions(+), 2 deletions(-) diff --git a/mypy/applytype.py b/mypy/applytype.py index 6d2f3a9a1de0..e6df595d9ea6 100644 --- a/mypy/applytype.py +++ b/mypy/applytype.py @@ -38,8 +38,8 @@ def apply_generic_arguments(callable: CallableType, types: List[Type], types[i] = value break else: - msg.incompatible_typevar_value(callable, i + 1, type, context) - + if not msg.incompatible_anystr_arguments(callable, type, context): + msg.incompatible_typevar_value(callable, i + 1, type, context) upper_bound = callable.variables[i].upper_bound if (type and not isinstance(type, PartialType) and not mypy.subtypes.is_subtype(type, upper_bound)): diff --git a/mypy/messages.py b/mypy/messages.py index 8dbf1dbeff95..84ffa787db22 100644 --- a/mypy/messages.py +++ b/mypy/messages.py @@ -819,6 +819,19 @@ def incompatible_typevar_value(self, callee: CallableType, index: int, self.fail('Type argument {} of {} has incompatible value {}'.format( index, callable_name(callee), self.format(type)), context) + def incompatible_anystr_arguments(self, callee: CallableType, type: Type, + context: Context) -> bool: + arg_types = tuple(type.name for type in callee.arg_types) + if (arg_types.count('AnyStr') > 1 and isinstance(type, Instance) and + type.type.fullname() == 'builtins.object'): + self.fail( + 'Type arguments of {} have incompatible values with expected {}'.format( + callable_name(callee), arg_types), context) + self.note('"AnyStr" arguments must be all "str" or all "byte"', context) + return True + else: + return False + def overloaded_signatures_overlap(self, index1: int, index2: int, context: Context) -> None: self.fail('Overloaded function signatures {} and {} overlap with ' From 36e66dee04fd845fb6711270a37a8fa2447cb15e Mon Sep 17 00:00:00 2001 From: quartox Date: Tue, 23 May 2017 20:11:43 -0700 Subject: [PATCH 02/27] Fix logic path of anystr args --- mypy/applytype.py | 27 +++++++++++++++++++++++++-- mypy/messages.py | 17 +++++------------ 2 files changed, 30 insertions(+), 14 deletions(-) diff --git a/mypy/applytype.py b/mypy/applytype.py index e6df595d9ea6..4131ff48e4ed 100644 --- a/mypy/applytype.py +++ b/mypy/applytype.py @@ -3,7 +3,7 @@ import mypy.subtypes from mypy.sametypes import is_same_type from mypy.expandtype import expand_type -from mypy.types import Type, TypeVarId, TypeVarType, CallableType, AnyType, PartialType +from mypy.types import Type, TypeVarId, TypeVarType, CallableType, AnyType, PartialType, Instance from mypy.messages import MessageBuilder from mypy.nodes import Context @@ -38,7 +38,10 @@ def apply_generic_arguments(callable: CallableType, types: List[Type], types[i] = value break else: - if not msg.incompatible_anystr_arguments(callable, type, context): + arg_types = get_arg_types(callable) + if has_anystr_incompatible_args(arg_types, type): + msg.incompatible_anystr_arguments(callable, arg_types, context) + else: msg.incompatible_typevar_value(callable, i + 1, type, context) upper_bound = callable.variables[i].upper_bound if (type and not isinstance(type, PartialType) and @@ -62,3 +65,23 @@ def apply_generic_arguments(callable: CallableType, types: List[Type], ret_type=expand_type(callable.ret_type, id_to_type), variables=remaining_tvars, ) + + +def get_arg_types(callee: CallableType) -> List[str]: + arg_types = [] # type: List[str] + for arg_type in callee.arg_types: + if isinstance(arg_type, Instance): + arg_types.append(arg_type.type.name()) + elif isinstance(arg_type, TypeVarType): + arg_types.append(arg_type.name) + else: + arg_types.append(str(arg_type)) + return arg_types + + +def has_anystr_incompatible_args(arg_types: List[str], type: Type) -> bool: + if (arg_types.count('AnyStr') > 1 and isinstance(type, Instance) and + type.type.name() == 'object'): + return True + else: + return False diff --git a/mypy/messages.py b/mypy/messages.py index 84ffa787db22..01a38d06bc32 100644 --- a/mypy/messages.py +++ b/mypy/messages.py @@ -819,18 +819,11 @@ def incompatible_typevar_value(self, callee: CallableType, index: int, self.fail('Type argument {} of {} has incompatible value {}'.format( index, callable_name(callee), self.format(type)), context) - def incompatible_anystr_arguments(self, callee: CallableType, type: Type, - context: Context) -> bool: - arg_types = tuple(type.name for type in callee.arg_types) - if (arg_types.count('AnyStr') > 1 and isinstance(type, Instance) and - type.type.fullname() == 'builtins.object'): - self.fail( - 'Type arguments of {} have incompatible values with expected {}'.format( - callable_name(callee), arg_types), context) - self.note('"AnyStr" arguments must be all "str" or all "byte"', context) - return True - else: - return False + def incompatible_anystr_arguments(self, callee: CallableType, arg_types: List[str], + context: Context) -> None: + self.fail('Type arguments of {} have incompatible values with expected {}'.format( + callable_name(callee), tuple(arg_types)), context) + self.note('"AnyStr" arguments must be all "str" or all "bytes"', context) def overloaded_signatures_overlap(self, index1: int, index2: int, context: Context) -> None: From 7816c4fce82a8a02d719247601b5ee3f67fe6162 Mon Sep 17 00:00:00 2001 From: quartox Date: Tue, 23 May 2017 20:13:39 -0700 Subject: [PATCH 03/27] Add tests --- test-data/unit/check-functions.test | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/test-data/unit/check-functions.test b/test-data/unit/check-functions.test index fc049053f855..c6db55c1124f 100644 --- a/test-data/unit/check-functions.test +++ b/test-data/unit/check-functions.test @@ -2125,3 +2125,20 @@ def i() -> List[Union[str, int]]: return x [builtins fixtures/dict.pyi] + +[case testAnyStrIncompatibleArguments] +from typing import TypeVar +AnyStr = TypeVar('AnyStr', bytes, str) +def f(x: AnyStr, y: AnyStr) -> None: pass +def g(x: AnyStr, y: AnyStr, z: int) -> None: pass +f('a', 'b') +f(b'a', b'b') +f('a', b'b') # E: Type arguments of "f" have incompatible values with expected ('AnyStr', 'AnyStr') \ +# N: "AnyStr" arguments must be all "str" or all "bytes" +g('a', 'b', 1) +g('a', b'b', 1) # E: Type arguments of "g" have incompatible values with expected ('AnyStr', 'AnyStr', 'int') \ +# N: "AnyStr" arguments must be all "str" or all "bytes" +g('a', b'b', 'c') # E: Type arguments of "g" have incompatible values with expected ('AnyStr', 'AnyStr', 'int') \ +# N: "AnyStr" arguments must be all "str" or all "bytes" \ +# E: Argument 3 to "g" has incompatible type "str"; expected "int" + From 6c3e8229aefc7c2368cf46c9a2032750b43bcf11 Mon Sep 17 00:00:00 2001 From: quartox Date: Tue, 23 May 2017 20:15:51 -0700 Subject: [PATCH 04/27] Add docstring --- mypy/applytype.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/mypy/applytype.py b/mypy/applytype.py index 4131ff48e4ed..7fdae96374f5 100644 --- a/mypy/applytype.py +++ b/mypy/applytype.py @@ -80,6 +80,11 @@ def get_arg_types(callee: CallableType) -> List[str]: def has_anystr_incompatible_args(arg_types: List[str], type: Type) -> bool: + """Determines if function has a problem with AnyStr arguments. + + If the function has more than one AnyStr argument and the solver returns the object type, + then the function was passed both an "str" and "bytes" argument type. + """ if (arg_types.count('AnyStr') > 1 and isinstance(type, Instance) and type.type.name() == 'object'): return True From 4c408e74fef767e16f7fea321b81bb87c2d0dba9 Mon Sep 17 00:00:00 2001 From: quartox Date: Tue, 23 May 2017 21:37:17 -0700 Subject: [PATCH 05/27] Fix tests --- mypy/applytype.py | 6 +++--- test-data/unit/check-overloading.test | 3 ++- test-data/unit/pythoneval.test | 6 ++++-- 3 files changed, 9 insertions(+), 6 deletions(-) diff --git a/mypy/applytype.py b/mypy/applytype.py index 7fdae96374f5..b7582c830e5e 100644 --- a/mypy/applytype.py +++ b/mypy/applytype.py @@ -38,9 +38,9 @@ def apply_generic_arguments(callable: CallableType, types: List[Type], types[i] = value break else: - arg_types = get_arg_types(callable) - if has_anystr_incompatible_args(arg_types, type): - msg.incompatible_anystr_arguments(callable, arg_types, context) + arg_types_str = get_arg_types(callable) + if has_anystr_incompatible_args(arg_types_str, type): + msg.incompatible_anystr_arguments(callable, arg_types_str, context) else: msg.incompatible_typevar_value(callable, i + 1, type, context) upper_bound = callable.variables[i].upper_bound diff --git a/test-data/unit/check-overloading.test b/test-data/unit/check-overloading.test index 69289fae18c1..5ff626478645 100644 --- a/test-data/unit/check-overloading.test +++ b/test-data/unit/check-overloading.test @@ -998,7 +998,8 @@ def g(x: int, *a: AnyStr) -> None: pass g('foo') g('foo', 'bar') -g('foo', b'bar') # E: Type argument 1 of "g" has incompatible value "object" +g('foo', b'bar') # E: Type arguments of "g" have incompatible values with expected ('AnyStr', 'AnyStr') \ +# N: "AnyStr" arguments must be all "str" or all "bytes" g(1) g(1, 'foo') g(1, 'foo', b'bar') # E: Type argument 1 of "g" has incompatible value "object" diff --git a/test-data/unit/pythoneval.test b/test-data/unit/pythoneval.test index c9ee1f322a28..74a504c30fca 100644 --- a/test-data/unit/pythoneval.test +++ b/test-data/unit/pythoneval.test @@ -1277,7 +1277,8 @@ re.subn(bpat, b'', b'')[0] + b'' re.subn(bre, lambda m: b'', b'')[0] + b'' re.subn(bpat, lambda m: b'', b'')[0] + b'' [out] -_program.py:7: error: Type argument 1 of "search" has incompatible value "object" +_program.py:7: error: Type arguments of "search" have incompatible values with expected ('AnyStr', 'AnyStr', 'int') +_program.py:7: note: "AnyStr" arguments must be all "str" or all "bytes" _program.py:9: error: Cannot infer type argument 1 of "search" [case testReModuleString] @@ -1301,7 +1302,8 @@ re.subn(spat, '', '')[0] + '' re.subn(sre, lambda m: '', '')[0] + '' re.subn(spat, lambda m: '', '')[0] + '' [out] -_program.py:7: error: Type argument 1 of "search" has incompatible value "object" +_program.py:7: error: Type arguments of "search" have incompatible values with expected ('AnyStr', 'AnyStr', 'int') +_program.py:7: note: "AnyStr" arguments must be all "str" or all "bytes" _program.py:9: error: Cannot infer type argument 1 of "search" [case testListSetitemTuple] From 9ee35b7274a5825c5ef56f2339327975a57672df Mon Sep 17 00:00:00 2001 From: quartox Date: Wed, 24 May 2017 08:24:30 -0700 Subject: [PATCH 06/27] Get all AnyStr failures --- mypy/applytype.py | 33 +++++++++++---------------------- 1 file changed, 11 insertions(+), 22 deletions(-) diff --git a/mypy/applytype.py b/mypy/applytype.py index b7582c830e5e..6e2cc61ed921 100644 --- a/mypy/applytype.py +++ b/mypy/applytype.py @@ -1,4 +1,4 @@ -from typing import List, Dict +from typing import List, Dict, Sequence import mypy.subtypes from mypy.sametypes import is_same_type @@ -38,9 +38,10 @@ def apply_generic_arguments(callable: CallableType, types: List[Type], types[i] = value break else: - arg_types_str = get_arg_types(callable) - if has_anystr_incompatible_args(arg_types_str, type): - msg.incompatible_anystr_arguments(callable, arg_types_str, context) + arg_strings = tuple(msg.format(arg_type).replace('"', '') + for arg_type in callable.arg_types) + if has_anystr_incompatible_args(arg_strings, type): + msg.incompatible_anystr_arguments(callable, arg_strings, context) else: msg.incompatible_typevar_value(callable, i + 1, type, context) upper_bound = callable.variables[i].upper_bound @@ -67,26 +68,14 @@ def apply_generic_arguments(callable: CallableType, types: List[Type], ) -def get_arg_types(callee: CallableType) -> List[str]: - arg_types = [] # type: List[str] - for arg_type in callee.arg_types: - if isinstance(arg_type, Instance): - arg_types.append(arg_type.type.name()) - elif isinstance(arg_type, TypeVarType): - arg_types.append(arg_type.name) - else: - arg_types.append(str(arg_type)) - return arg_types - - -def has_anystr_incompatible_args(arg_types: List[str], type: Type) -> bool: +def has_anystr_incompatible_args(arg_strings: Sequence[str], type: Type) -> bool: """Determines if function has a problem with AnyStr arguments. If the function has more than one AnyStr argument and the solver returns the object type, then the function was passed both an "str" and "bytes" argument type. """ - if (arg_types.count('AnyStr') > 1 and isinstance(type, Instance) and - type.type.name() == 'object'): - return True - else: - return False + if isinstance(type, Instance) and type.type.name() == 'object': + for string in arg_strings: + if 'AnyStr' in string: + return True + return False From 066fde64a0d5b9bd2234a37323e7efdc9cb1b435 Mon Sep 17 00:00:00 2001 From: quartox Date: Wed, 24 May 2017 08:24:41 -0700 Subject: [PATCH 07/27] Clean up error message --- mypy/messages.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/mypy/messages.py b/mypy/messages.py index 01a38d06bc32..019784812beb 100644 --- a/mypy/messages.py +++ b/mypy/messages.py @@ -819,10 +819,12 @@ def incompatible_typevar_value(self, callee: CallableType, index: int, self.fail('Type argument {} of {} has incompatible value {}'.format( index, callable_name(callee), self.format(type)), context) - def incompatible_anystr_arguments(self, callee: CallableType, arg_types: List[str], + def incompatible_anystr_arguments(self, callee: CallableType, arg_strings: Sequence[str], context: Context) -> None: - self.fail('Type arguments of {} have incompatible values with expected {}'.format( - callable_name(callee), tuple(arg_types)), context) + if len(arg_strings) == 1: + arg_strings = str(arg_strings).replace(',)', ')') + call_with_types = '"{}{}"'.format(callable_name(callee).replace('"', ''), arg_strings) + self.fail('Type arguments of {} have incompatible values'.format(call_with_types), context) self.note('"AnyStr" arguments must be all "str" or all "bytes"', context) def overloaded_signatures_overlap(self, index1: int, index2: int, From c108a583bd8e3dc6c5780b15997e48a0591ce7cc Mon Sep 17 00:00:00 2001 From: quartox Date: Wed, 24 May 2017 08:25:18 -0700 Subject: [PATCH 08/27] Fix tests --- test-data/unit/check-functions.test | 24 +++++++++++++++++++++--- test-data/unit/check-inference.test | 6 ++++-- test-data/unit/check-overloading.test | 5 +++-- test-data/unit/pythoneval.test | 4 ++-- 4 files changed, 30 insertions(+), 9 deletions(-) diff --git a/test-data/unit/check-functions.test b/test-data/unit/check-functions.test index c6db55c1124f..5daf6aa3f1fd 100644 --- a/test-data/unit/check-functions.test +++ b/test-data/unit/check-functions.test @@ -2133,12 +2133,30 @@ def f(x: AnyStr, y: AnyStr) -> None: pass def g(x: AnyStr, y: AnyStr, z: int) -> None: pass f('a', 'b') f(b'a', b'b') -f('a', b'b') # E: Type arguments of "f" have incompatible values with expected ('AnyStr', 'AnyStr') \ +f('a', b'b') # E: Type arguments of "f('AnyStr', 'AnyStr')" have incompatible values \ # N: "AnyStr" arguments must be all "str" or all "bytes" g('a', 'b', 1) -g('a', b'b', 1) # E: Type arguments of "g" have incompatible values with expected ('AnyStr', 'AnyStr', 'int') \ +g('a', b'b', 1) # E: Type arguments of "g('AnyStr', 'AnyStr', 'int')" have incompatible values \ # N: "AnyStr" arguments must be all "str" or all "bytes" -g('a', b'b', 'c') # E: Type arguments of "g" have incompatible values with expected ('AnyStr', 'AnyStr', 'int') \ +g('a', b'b', 'c') # E: Type arguments of "g('AnyStr', 'AnyStr', 'int')" have incompatible values \ # N: "AnyStr" arguments must be all "str" or all "bytes" \ # E: Argument 3 to "g" has incompatible type "str"; expected "int" +[case testUnionAnyStrIncompatibleArguments] +from typing import TypeVar, Union +AnyStr = TypeVar('AnyStr', bytes, str) +def f(x: Union[AnyStr, int], y: AnyStr) -> None: pass +f('a', 'b') +f(1, 'b') +f('a', b'b') # E: Type arguments of "f('Union[AnyStr, int]', 'AnyStr')" have incompatible values \ +# N: "AnyStr" arguments must be all "str" or all "bytes" + +[case testStarAnyStrIncompatibleArguments] +from typing import TypeVar, Union +AnyStr = TypeVar('AnyStr', bytes, str) +def f(*x: AnyStr) -> None: pass +f('a') +f('a', 'b') +f('a', b'b') # E: Type arguments of "f('AnyStr')" have incompatible values \ +# N: "AnyStr" arguments must be all "str" or all "bytes" + diff --git a/test-data/unit/check-inference.test b/test-data/unit/check-inference.test index 42cd312c0531..d0f9e12ebdb0 100644 --- a/test-data/unit/check-inference.test +++ b/test-data/unit/check-inference.test @@ -750,10 +750,12 @@ AnyStr = TypeVar('AnyStr', bytes, str) def f(x: Union[AnyStr, int], *a: AnyStr) -> None: pass f('foo') f('foo', 'bar') -f('foo', b'bar') # E: Type argument 1 of "f" has incompatible value "object" +f('foo', b'bar') # E: Type arguments of "f('Union[AnyStr, int]', 'AnyStr')" have incompatible values \ +# N: "AnyStr" arguments must be all "str" or all "bytes" f(1) f(1, 'foo') -f(1, 'foo', b'bar') # E: Type argument 1 of "f" has incompatible value "object" +f(1, 'foo', b'bar') # E: Type arguments of "f('Union[AnyStr, int]', 'AnyStr')" have incompatible values \ +# N: "AnyStr" arguments must be all "str" or all "bytes" [builtins fixtures/primitives.pyi] diff --git a/test-data/unit/check-overloading.test b/test-data/unit/check-overloading.test index 5ff626478645..e33948c072ea 100644 --- a/test-data/unit/check-overloading.test +++ b/test-data/unit/check-overloading.test @@ -998,11 +998,12 @@ def g(x: int, *a: AnyStr) -> None: pass g('foo') g('foo', 'bar') -g('foo', b'bar') # E: Type arguments of "g" have incompatible values with expected ('AnyStr', 'AnyStr') \ +g('foo', b'bar') # E: Type arguments of "g('AnyStr', 'AnyStr')" have incompatible values \ # N: "AnyStr" arguments must be all "str" or all "bytes" g(1) g(1, 'foo') -g(1, 'foo', b'bar') # E: Type argument 1 of "g" has incompatible value "object" +g(1, 'foo', b'bar') # E: Type arguments of "g('int', 'AnyStr')" have incompatible values \ +# N: "AnyStr" arguments must be all "str" or all "bytes" [builtins fixtures/primitives.pyi] [case testBadOverlapWithTypeVarsWithValues] diff --git a/test-data/unit/pythoneval.test b/test-data/unit/pythoneval.test index 74a504c30fca..52e4c2be048f 100644 --- a/test-data/unit/pythoneval.test +++ b/test-data/unit/pythoneval.test @@ -1277,7 +1277,7 @@ re.subn(bpat, b'', b'')[0] + b'' re.subn(bre, lambda m: b'', b'')[0] + b'' re.subn(bpat, lambda m: b'', b'')[0] + b'' [out] -_program.py:7: error: Type arguments of "search" have incompatible values with expected ('AnyStr', 'AnyStr', 'int') +_program.py:7: error: Type arguments of "search('AnyStr', 'AnyStr', 'int')" have incompatible values _program.py:7: note: "AnyStr" arguments must be all "str" or all "bytes" _program.py:9: error: Cannot infer type argument 1 of "search" @@ -1302,7 +1302,7 @@ re.subn(spat, '', '')[0] + '' re.subn(sre, lambda m: '', '')[0] + '' re.subn(spat, lambda m: '', '')[0] + '' [out] -_program.py:7: error: Type arguments of "search" have incompatible values with expected ('AnyStr', 'AnyStr', 'int') +_program.py:7: error: Type arguments of "search('AnyStr', 'AnyStr', 'int')" have incompatible values _program.py:7: note: "AnyStr" arguments must be all "str" or all "bytes" _program.py:9: error: Cannot infer type argument 1 of "search" From 4a133bd9d459d246bcbaf95c679326841a90efc5 Mon Sep 17 00:00:00 2001 From: quartox Date: Sun, 4 Jun 2017 12:55:03 -0700 Subject: [PATCH 09/27] Check for any constrained type --- mypy/applytype.py | 46 ++++++++++----- mypy/messages.py | 23 +++++--- test-data/unit/check-functions.test | 74 +++++++++++++++++++----- test-data/unit/check-inference.test | 8 +-- test-data/unit/check-overloading.test | 8 +-- test-data/unit/check-typevar-values.test | 9 ++- test-data/unit/pythoneval.test | 8 +-- 7 files changed, 124 insertions(+), 52 deletions(-) diff --git a/mypy/applytype.py b/mypy/applytype.py index 6e2cc61ed921..83149d03095e 100644 --- a/mypy/applytype.py +++ b/mypy/applytype.py @@ -1,9 +1,12 @@ -from typing import List, Dict, Sequence +from typing import List, Dict, Sequence, Tuple import mypy.subtypes from mypy.sametypes import is_same_type from mypy.expandtype import expand_type -from mypy.types import Type, TypeVarId, TypeVarType, CallableType, AnyType, PartialType, Instance +from mypy.types import ( + Type, TypeVarId, TypeVarType, TypeVisitor, CallableType, AnyType, PartialType, + Instance, UnionType +) from mypy.messages import MessageBuilder from mypy.nodes import Context @@ -38,10 +41,9 @@ def apply_generic_arguments(callable: CallableType, types: List[Type], types[i] = value break else: - arg_strings = tuple(msg.format(arg_type).replace('"', '') - for arg_type in callable.arg_types) - if has_anystr_incompatible_args(arg_strings, type): - msg.incompatible_anystr_arguments(callable, arg_strings, context) + constraints = get_incompatible_arg_constraints(callable.arg_types, type, i + 1) + if constraints: + msg.incompatible_constrained_arguments(callable, i + 1, constraints, context) else: msg.incompatible_typevar_value(callable, i + 1, type, context) upper_bound = callable.variables[i].upper_bound @@ -68,14 +70,30 @@ def apply_generic_arguments(callable: CallableType, types: List[Type], ) -def has_anystr_incompatible_args(arg_strings: Sequence[str], type: Type) -> bool: - """Determines if function has a problem with AnyStr arguments. +def get_incompatible_arg_constraints(arg_types: Sequence[Type], type: Type, + index: int) -> Dict[str, Tuple[str]]: + """Gets incompatible function arguments with the constrained types. - If the function has more than one AnyStr argument and the solver returns the object type, - then the function was passed both an "str" and "bytes" argument type. + An example of a constrained type is AnyStr which must be all str or all byte. """ + constraints = {} # type: Dict[str, Tuple[str]] if isinstance(type, Instance) and type.type.name() == 'object': - for string in arg_strings: - if 'AnyStr' in string: - return True - return False + if index == len(arg_types): + # Index is off by one for '*' arguments + constraints = add_arg_constraints(constraints, arg_types[index - 1]) + else: + constraints = add_arg_constraints(constraints, arg_types[index]) + return constraints + + +def add_arg_constraints(constraints: Dict[str, Tuple[str]], + arg_type: Type) -> Dict[str, Tuple[str]]: + if (isinstance(arg_type, TypeVarType) and + arg_type.values and + len(arg_type.values) > 1 and + arg_type.name not in constraints.keys()): + constraints[arg_type.name] = tuple(vals.type.name() for vals in arg_type.values) + elif isinstance(arg_type, UnionType): + for item in arg_type.items: + constraints = add_arg_constraints(constraints, item) + return constraints diff --git a/mypy/messages.py b/mypy/messages.py index 019784812beb..28c17c1a60f7 100644 --- a/mypy/messages.py +++ b/mypy/messages.py @@ -6,7 +6,7 @@ import re import difflib -from typing import cast, List, Dict, Any, Sequence, Iterable, Tuple +from typing import cast, List, Dict, Any, Sequence, Iterable, Tuple, Mapping from mypy.erasetype import erase_type from mypy.errors import Errors @@ -819,13 +819,20 @@ def incompatible_typevar_value(self, callee: CallableType, index: int, self.fail('Type argument {} of {} has incompatible value {}'.format( index, callable_name(callee), self.format(type)), context) - def incompatible_anystr_arguments(self, callee: CallableType, arg_strings: Sequence[str], - context: Context) -> None: - if len(arg_strings) == 1: - arg_strings = str(arg_strings).replace(',)', ')') - call_with_types = '"{}{}"'.format(callable_name(callee).replace('"', ''), arg_strings) - self.fail('Type arguments of {} have incompatible values'.format(call_with_types), context) - self.note('"AnyStr" arguments must be all "str" or all "bytes"', context) + def incompatible_constrained_arguments(self, + callee: CallableType, + index: int, + constraints: Mapping[str, Sequence[str]], + context: Context) -> None: + for key, values in constraints.items(): + self.fail('Type argument {} of {} has incompatible value'.format( + index, callable_name(callee)), context) + if len(values) == 2: + constraint_str = '{} or {}'.format(values[0], values[1]) + elif len(values) > 3: + constraint_str = ', '.join(values[:-1]) + ', or ' + values[-1] + self.note('"{}" must be all one type: {}'.format( + key, constraint_str), context) def overloaded_signatures_overlap(self, index1: int, index2: int, context: Context) -> None: diff --git a/test-data/unit/check-functions.test b/test-data/unit/check-functions.test index 5daf6aa3f1fd..e2ddade9887f 100644 --- a/test-data/unit/check-functions.test +++ b/test-data/unit/check-functions.test @@ -2128,35 +2128,79 @@ def i() -> List[Union[str, int]]: [case testAnyStrIncompatibleArguments] from typing import TypeVar -AnyStr = TypeVar('AnyStr', bytes, str) +AnyStr = TypeVar('AnyStr', str, bytes) def f(x: AnyStr, y: AnyStr) -> None: pass -def g(x: AnyStr, y: AnyStr, z: int) -> None: pass +def g(x: AnyStr, y: AnyStr, z: int) -> AnyStr: pass f('a', 'b') f(b'a', b'b') -f('a', b'b') # E: Type arguments of "f('AnyStr', 'AnyStr')" have incompatible values \ -# N: "AnyStr" arguments must be all "str" or all "bytes" +f('a', b'b') # E: Type argument 1 of "f" has incompatible value \ +# N: "AnyStr" must be all one type: str or bytes g('a', 'b', 1) -g('a', b'b', 1) # E: Type arguments of "g('AnyStr', 'AnyStr', 'int')" have incompatible values \ -# N: "AnyStr" arguments must be all "str" or all "bytes" -g('a', b'b', 'c') # E: Type arguments of "g('AnyStr', 'AnyStr', 'int')" have incompatible values \ -# N: "AnyStr" arguments must be all "str" or all "bytes" \ +g(b'a', b'b', 1) +g('a', b'b', 1) # E: Type argument 1 of "g" has incompatible value \ +# N: "AnyStr" must be all one type: str or bytes +g('a', b'b', 'c') # E: Type argument 1 of "g" has incompatible value \ +# N: "AnyStr" must be all one type: str or bytes \ # E: Argument 3 to "g" has incompatible type "str"; expected "int" [case testUnionAnyStrIncompatibleArguments] from typing import TypeVar, Union -AnyStr = TypeVar('AnyStr', bytes, str) +AnyStr = TypeVar('AnyStr', str, bytes) def f(x: Union[AnyStr, int], y: AnyStr) -> None: pass f('a', 'b') f(1, 'b') -f('a', b'b') # E: Type arguments of "f('Union[AnyStr, int]', 'AnyStr')" have incompatible values \ -# N: "AnyStr" arguments must be all "str" or all "bytes" +f('a', b'b') # E: Type argument 1 of "f" has incompatible value \ +# N: "AnyStr" must be all one type: str or bytes [case testStarAnyStrIncompatibleArguments] from typing import TypeVar, Union -AnyStr = TypeVar('AnyStr', bytes, str) +AnyStr = TypeVar('AnyStr', str, bytes) def f(*x: AnyStr) -> None: pass +def g(x: int, *y: AnyStr) -> None: pass +def h(*x: AnyStr, y: int) -> None: pass f('a') f('a', 'b') -f('a', b'b') # E: Type arguments of "f('AnyStr')" have incompatible values \ -# N: "AnyStr" arguments must be all "str" or all "bytes" - +f('a', b'b') # E: Type argument 1 of "f" has incompatible value \ +# N: "AnyStr" must be all one type: str or bytes +g(1, 'a') +g(1, 'a', b'b') # E: Type argument 1 of "g" has incompatible value \ +# N: "AnyStr" must be all one type: str or bytes +h('a', y=1) +h('a', 'b', y=1) +h('a', b'b', y=1) # E: Type argument 1 of "h" has incompatible value "object" + +[case testConstrainedIncompatibleArguments] +from typing import TypeVar +S = TypeVar('S', int, str) +def f(x: S, y: S) -> S: return (x + y) +f('1', '2') +f('1', 2) # E: Type argument 1 of "f" has incompatible value \ +# N: "S" must be all one type: int or str +f(1, '2') # E: Type argument 1 of "f" has incompatible value \ +# N: "S" must be all one type: int or str + +[case testMultipleConstrainedIncompatibleArguments] +from typing import TypeVar +S = TypeVar('S', int, str) +AnyStr = TypeVar('AnyStr', str, bytes) +def f(a: S, b: S, c: AnyStr, d: AnyStr) -> S: return (a + b) +f('1', '2', '3', '4') +f('1', '2', b'3', b'4') +f(1, 2, '3', '4') +f(1, 2, b'3', b'4') +f(1, '2', '3', '4') # E: Type argument 1 of "f" has incompatible value \ +# N: "S" must be all one type: int or str +f('1', 2, '3', '4') # E: Type argument 1 of "f" has incompatible value \ +# N: "S" must be all one type: int or str +f('1', '2', b'3', '4') # E: Type argument 2 of "f" has incompatible value \ +# N: "AnyStr" must be all one type: str or bytes +f('1', '2', '3', b'4') # E: Type argument 2 of "f" has incompatible value \ +# N: "AnyStr" must be all one type: str or bytes +f('1', 2, b'3', '4') # E: Type argument 1 of "f" has incompatible value \ +# N: "S" must be all one type: int or str \ +# E: Type argument 2 of "f" has incompatible value \ +# N: "AnyStr" must be all one type: str or bytes +f(1, '2', '3', b'4') # E: Type argument 1 of "f" has incompatible value \ +# N: "S" must be all one type: int or str \ +# E: Type argument 2 of "f" has incompatible value \ +# N: "AnyStr" must be all one type: str or bytes diff --git a/test-data/unit/check-inference.test b/test-data/unit/check-inference.test index d0f9e12ebdb0..6a3d4c054fc9 100644 --- a/test-data/unit/check-inference.test +++ b/test-data/unit/check-inference.test @@ -750,12 +750,12 @@ AnyStr = TypeVar('AnyStr', bytes, str) def f(x: Union[AnyStr, int], *a: AnyStr) -> None: pass f('foo') f('foo', 'bar') -f('foo', b'bar') # E: Type arguments of "f('Union[AnyStr, int]', 'AnyStr')" have incompatible values \ -# N: "AnyStr" arguments must be all "str" or all "bytes" +f('foo', b'bar') # E: Type argument 1 of "f" has incompatible value \ +# N: "AnyStr" must be all one type: bytes or str f(1) f(1, 'foo') -f(1, 'foo', b'bar') # E: Type arguments of "f('Union[AnyStr, int]', 'AnyStr')" have incompatible values \ -# N: "AnyStr" arguments must be all "str" or all "bytes" +f(1, 'foo', b'bar') # E: Type argument 1 of "f" has incompatible value \ +# N: "AnyStr" must be all one type: bytes or str [builtins fixtures/primitives.pyi] diff --git a/test-data/unit/check-overloading.test b/test-data/unit/check-overloading.test index e33948c072ea..3b3ed75615d4 100644 --- a/test-data/unit/check-overloading.test +++ b/test-data/unit/check-overloading.test @@ -998,12 +998,12 @@ def g(x: int, *a: AnyStr) -> None: pass g('foo') g('foo', 'bar') -g('foo', b'bar') # E: Type arguments of "g('AnyStr', 'AnyStr')" have incompatible values \ -# N: "AnyStr" arguments must be all "str" or all "bytes" +g('foo', b'bar') # E: Type argument 1 of "g" has incompatible value \ +# N: "AnyStr" must be all one type: bytes or str g(1) g(1, 'foo') -g(1, 'foo', b'bar') # E: Type arguments of "g('int', 'AnyStr')" have incompatible values \ -# N: "AnyStr" arguments must be all "str" or all "bytes" +g(1, 'foo', b'bar') # E: Type argument 1 of "g" has incompatible value \ +# N: "AnyStr" must be all one type: bytes or str [builtins fixtures/primitives.pyi] [case testBadOverlapWithTypeVarsWithValues] diff --git a/test-data/unit/check-typevar-values.test b/test-data/unit/check-typevar-values.test index 36df2235a209..e8d432268fd0 100644 --- a/test-data/unit/check-typevar-values.test +++ b/test-data/unit/check-typevar-values.test @@ -7,7 +7,8 @@ T = TypeVar('T', int, str) def f(x: T) -> None: pass f(1) f('x') -f(object()) # E: Type argument 1 of "f" has incompatible value "object" +f(object()) # E: Type argument 1 of "f" has incompatible value\ +# N: "T" must be all one type: int or str [case testCallGenericFunctionWithTypeVarValueRestrictionUsingContext] from typing import TypeVar, List @@ -18,7 +19,8 @@ s = ['x'] o = [object()] i = f(1) s = f('') -o = f(1) # E: Type argument 1 of "f" has incompatible value "object" +o = f(1) # E: Type argument 1 of "f" has incompatible value\ +# N: "T" must be all one type: int or str [builtins fixtures/list.pyi] [case testCallGenericFunctionWithTypeVarValueRestrictionAndAnyArgs] @@ -239,7 +241,8 @@ class A(Generic[X]): A(1) A('x') A(cast(Any, object())) -A(object()) # E: Type argument 1 of "A" has incompatible value "object" +A(object()) # E: Type argument 1 of "A" has incompatible value\ +# N: "X" must be all one type: int or str [case testGenericTypeWithTypevarValuesAndTypevarArgument] from typing import TypeVar, Generic diff --git a/test-data/unit/pythoneval.test b/test-data/unit/pythoneval.test index 52e4c2be048f..c38bc5cff9b8 100644 --- a/test-data/unit/pythoneval.test +++ b/test-data/unit/pythoneval.test @@ -1277,8 +1277,8 @@ re.subn(bpat, b'', b'')[0] + b'' re.subn(bre, lambda m: b'', b'')[0] + b'' re.subn(bpat, lambda m: b'', b'')[0] + b'' [out] -_program.py:7: error: Type arguments of "search('AnyStr', 'AnyStr', 'int')" have incompatible values -_program.py:7: note: "AnyStr" arguments must be all "str" or all "bytes" +_program.py:7: error: Type argument 1 of "search" has incompatible value +_program.py:7: note: "AnyStr" must be all one type: str or bytes _program.py:9: error: Cannot infer type argument 1 of "search" [case testReModuleString] @@ -1302,8 +1302,8 @@ re.subn(spat, '', '')[0] + '' re.subn(sre, lambda m: '', '')[0] + '' re.subn(spat, lambda m: '', '')[0] + '' [out] -_program.py:7: error: Type arguments of "search('AnyStr', 'AnyStr', 'int')" have incompatible values -_program.py:7: note: "AnyStr" arguments must be all "str" or all "bytes" +_program.py:7: error: Type argument 1 of "search" has incompatible value +_program.py:7: note: "AnyStr" must be all one type: str or bytes _program.py:9: error: Cannot infer type argument 1 of "search" [case testListSetitemTuple] From 982f4b7249e9af894133630007f7425f9b4a9ee1 Mon Sep 17 00:00:00 2001 From: quartox Date: Sun, 4 Jun 2017 14:12:33 -0700 Subject: [PATCH 10/27] Revert "Check for any constrained type" This reverts commit 508ffe8a01c06dac73272c01f7812d24cc3b1c11. Conflicts: test-data/unit/check-functions.test --- mypy/applytype.py | 46 ++++++++---------------- mypy/messages.py | 23 +++++------- test-data/unit/check-functions.test | 27 +++++++------- test-data/unit/check-inference.test | 8 ++--- test-data/unit/check-overloading.test | 8 ++--- test-data/unit/check-typevar-values.test | 9 ++--- test-data/unit/pythoneval.test | 8 ++--- 7 files changed, 49 insertions(+), 80 deletions(-) diff --git a/mypy/applytype.py b/mypy/applytype.py index 83149d03095e..6e2cc61ed921 100644 --- a/mypy/applytype.py +++ b/mypy/applytype.py @@ -1,12 +1,9 @@ -from typing import List, Dict, Sequence, Tuple +from typing import List, Dict, Sequence import mypy.subtypes from mypy.sametypes import is_same_type from mypy.expandtype import expand_type -from mypy.types import ( - Type, TypeVarId, TypeVarType, TypeVisitor, CallableType, AnyType, PartialType, - Instance, UnionType -) +from mypy.types import Type, TypeVarId, TypeVarType, CallableType, AnyType, PartialType, Instance from mypy.messages import MessageBuilder from mypy.nodes import Context @@ -41,9 +38,10 @@ def apply_generic_arguments(callable: CallableType, types: List[Type], types[i] = value break else: - constraints = get_incompatible_arg_constraints(callable.arg_types, type, i + 1) - if constraints: - msg.incompatible_constrained_arguments(callable, i + 1, constraints, context) + arg_strings = tuple(msg.format(arg_type).replace('"', '') + for arg_type in callable.arg_types) + if has_anystr_incompatible_args(arg_strings, type): + msg.incompatible_anystr_arguments(callable, arg_strings, context) else: msg.incompatible_typevar_value(callable, i + 1, type, context) upper_bound = callable.variables[i].upper_bound @@ -70,30 +68,14 @@ def apply_generic_arguments(callable: CallableType, types: List[Type], ) -def get_incompatible_arg_constraints(arg_types: Sequence[Type], type: Type, - index: int) -> Dict[str, Tuple[str]]: - """Gets incompatible function arguments with the constrained types. +def has_anystr_incompatible_args(arg_strings: Sequence[str], type: Type) -> bool: + """Determines if function has a problem with AnyStr arguments. - An example of a constrained type is AnyStr which must be all str or all byte. + If the function has more than one AnyStr argument and the solver returns the object type, + then the function was passed both an "str" and "bytes" argument type. """ - constraints = {} # type: Dict[str, Tuple[str]] if isinstance(type, Instance) and type.type.name() == 'object': - if index == len(arg_types): - # Index is off by one for '*' arguments - constraints = add_arg_constraints(constraints, arg_types[index - 1]) - else: - constraints = add_arg_constraints(constraints, arg_types[index]) - return constraints - - -def add_arg_constraints(constraints: Dict[str, Tuple[str]], - arg_type: Type) -> Dict[str, Tuple[str]]: - if (isinstance(arg_type, TypeVarType) and - arg_type.values and - len(arg_type.values) > 1 and - arg_type.name not in constraints.keys()): - constraints[arg_type.name] = tuple(vals.type.name() for vals in arg_type.values) - elif isinstance(arg_type, UnionType): - for item in arg_type.items: - constraints = add_arg_constraints(constraints, item) - return constraints + for string in arg_strings: + if 'AnyStr' in string: + return True + return False diff --git a/mypy/messages.py b/mypy/messages.py index 28c17c1a60f7..019784812beb 100644 --- a/mypy/messages.py +++ b/mypy/messages.py @@ -6,7 +6,7 @@ import re import difflib -from typing import cast, List, Dict, Any, Sequence, Iterable, Tuple, Mapping +from typing import cast, List, Dict, Any, Sequence, Iterable, Tuple from mypy.erasetype import erase_type from mypy.errors import Errors @@ -819,20 +819,13 @@ def incompatible_typevar_value(self, callee: CallableType, index: int, self.fail('Type argument {} of {} has incompatible value {}'.format( index, callable_name(callee), self.format(type)), context) - def incompatible_constrained_arguments(self, - callee: CallableType, - index: int, - constraints: Mapping[str, Sequence[str]], - context: Context) -> None: - for key, values in constraints.items(): - self.fail('Type argument {} of {} has incompatible value'.format( - index, callable_name(callee)), context) - if len(values) == 2: - constraint_str = '{} or {}'.format(values[0], values[1]) - elif len(values) > 3: - constraint_str = ', '.join(values[:-1]) + ', or ' + values[-1] - self.note('"{}" must be all one type: {}'.format( - key, constraint_str), context) + def incompatible_anystr_arguments(self, callee: CallableType, arg_strings: Sequence[str], + context: Context) -> None: + if len(arg_strings) == 1: + arg_strings = str(arg_strings).replace(',)', ')') + call_with_types = '"{}{}"'.format(callable_name(callee).replace('"', ''), arg_strings) + self.fail('Type arguments of {} have incompatible values'.format(call_with_types), context) + self.note('"AnyStr" arguments must be all "str" or all "bytes"', context) def overloaded_signatures_overlap(self, index1: int, index2: int, context: Context) -> None: diff --git a/test-data/unit/check-functions.test b/test-data/unit/check-functions.test index e2ddade9887f..f398e979c59b 100644 --- a/test-data/unit/check-functions.test +++ b/test-data/unit/check-functions.test @@ -2128,36 +2128,33 @@ def i() -> List[Union[str, int]]: [case testAnyStrIncompatibleArguments] from typing import TypeVar -AnyStr = TypeVar('AnyStr', str, bytes) +AnyStr = TypeVar('AnyStr', bytes, str) def f(x: AnyStr, y: AnyStr) -> None: pass -def g(x: AnyStr, y: AnyStr, z: int) -> AnyStr: pass +def g(x: AnyStr, y: AnyStr, z: int) -> None: pass f('a', 'b') f(b'a', b'b') -f('a', b'b') # E: Type argument 1 of "f" has incompatible value \ -# N: "AnyStr" must be all one type: str or bytes +f('a', b'b') # E: Type arguments of "f('AnyStr', 'AnyStr')" have incompatible values \ +# N: "AnyStr" arguments must be all "str" or all "bytes" g('a', 'b', 1) -g(b'a', b'b', 1) -g('a', b'b', 1) # E: Type argument 1 of "g" has incompatible value \ -# N: "AnyStr" must be all one type: str or bytes -g('a', b'b', 'c') # E: Type argument 1 of "g" has incompatible value \ -# N: "AnyStr" must be all one type: str or bytes \ +g('a', b'b', 1) # E: Type arguments of "g('AnyStr', 'AnyStr', 'int')" have incompatible values \ +# N: "AnyStr" arguments must be all "str" or all "bytes" +g('a', b'b', 'c') # E: Type arguments of "g('AnyStr', 'AnyStr', 'int')" have incompatible values \ +# N: "AnyStr" arguments must be all "str" or all "bytes" \ # E: Argument 3 to "g" has incompatible type "str"; expected "int" [case testUnionAnyStrIncompatibleArguments] from typing import TypeVar, Union -AnyStr = TypeVar('AnyStr', str, bytes) +AnyStr = TypeVar('AnyStr', bytes, str) def f(x: Union[AnyStr, int], y: AnyStr) -> None: pass f('a', 'b') f(1, 'b') -f('a', b'b') # E: Type argument 1 of "f" has incompatible value \ -# N: "AnyStr" must be all one type: str or bytes +f('a', b'b') # E: Type arguments of "f('Union[AnyStr, int]', 'AnyStr')" have incompatible values \ +# N: "AnyStr" arguments must be all "str" or all "bytes" [case testStarAnyStrIncompatibleArguments] from typing import TypeVar, Union -AnyStr = TypeVar('AnyStr', str, bytes) +AnyStr = TypeVar('AnyStr', bytes, str) def f(*x: AnyStr) -> None: pass -def g(x: int, *y: AnyStr) -> None: pass -def h(*x: AnyStr, y: int) -> None: pass f('a') f('a', 'b') f('a', b'b') # E: Type argument 1 of "f" has incompatible value \ diff --git a/test-data/unit/check-inference.test b/test-data/unit/check-inference.test index 6a3d4c054fc9..d0f9e12ebdb0 100644 --- a/test-data/unit/check-inference.test +++ b/test-data/unit/check-inference.test @@ -750,12 +750,12 @@ AnyStr = TypeVar('AnyStr', bytes, str) def f(x: Union[AnyStr, int], *a: AnyStr) -> None: pass f('foo') f('foo', 'bar') -f('foo', b'bar') # E: Type argument 1 of "f" has incompatible value \ -# N: "AnyStr" must be all one type: bytes or str +f('foo', b'bar') # E: Type arguments of "f('Union[AnyStr, int]', 'AnyStr')" have incompatible values \ +# N: "AnyStr" arguments must be all "str" or all "bytes" f(1) f(1, 'foo') -f(1, 'foo', b'bar') # E: Type argument 1 of "f" has incompatible value \ -# N: "AnyStr" must be all one type: bytes or str +f(1, 'foo', b'bar') # E: Type arguments of "f('Union[AnyStr, int]', 'AnyStr')" have incompatible values \ +# N: "AnyStr" arguments must be all "str" or all "bytes" [builtins fixtures/primitives.pyi] diff --git a/test-data/unit/check-overloading.test b/test-data/unit/check-overloading.test index 3b3ed75615d4..e33948c072ea 100644 --- a/test-data/unit/check-overloading.test +++ b/test-data/unit/check-overloading.test @@ -998,12 +998,12 @@ def g(x: int, *a: AnyStr) -> None: pass g('foo') g('foo', 'bar') -g('foo', b'bar') # E: Type argument 1 of "g" has incompatible value \ -# N: "AnyStr" must be all one type: bytes or str +g('foo', b'bar') # E: Type arguments of "g('AnyStr', 'AnyStr')" have incompatible values \ +# N: "AnyStr" arguments must be all "str" or all "bytes" g(1) g(1, 'foo') -g(1, 'foo', b'bar') # E: Type argument 1 of "g" has incompatible value \ -# N: "AnyStr" must be all one type: bytes or str +g(1, 'foo', b'bar') # E: Type arguments of "g('int', 'AnyStr')" have incompatible values \ +# N: "AnyStr" arguments must be all "str" or all "bytes" [builtins fixtures/primitives.pyi] [case testBadOverlapWithTypeVarsWithValues] diff --git a/test-data/unit/check-typevar-values.test b/test-data/unit/check-typevar-values.test index e8d432268fd0..36df2235a209 100644 --- a/test-data/unit/check-typevar-values.test +++ b/test-data/unit/check-typevar-values.test @@ -7,8 +7,7 @@ T = TypeVar('T', int, str) def f(x: T) -> None: pass f(1) f('x') -f(object()) # E: Type argument 1 of "f" has incompatible value\ -# N: "T" must be all one type: int or str +f(object()) # E: Type argument 1 of "f" has incompatible value "object" [case testCallGenericFunctionWithTypeVarValueRestrictionUsingContext] from typing import TypeVar, List @@ -19,8 +18,7 @@ s = ['x'] o = [object()] i = f(1) s = f('') -o = f(1) # E: Type argument 1 of "f" has incompatible value\ -# N: "T" must be all one type: int or str +o = f(1) # E: Type argument 1 of "f" has incompatible value "object" [builtins fixtures/list.pyi] [case testCallGenericFunctionWithTypeVarValueRestrictionAndAnyArgs] @@ -241,8 +239,7 @@ class A(Generic[X]): A(1) A('x') A(cast(Any, object())) -A(object()) # E: Type argument 1 of "A" has incompatible value\ -# N: "X" must be all one type: int or str +A(object()) # E: Type argument 1 of "A" has incompatible value "object" [case testGenericTypeWithTypevarValuesAndTypevarArgument] from typing import TypeVar, Generic diff --git a/test-data/unit/pythoneval.test b/test-data/unit/pythoneval.test index c38bc5cff9b8..52e4c2be048f 100644 --- a/test-data/unit/pythoneval.test +++ b/test-data/unit/pythoneval.test @@ -1277,8 +1277,8 @@ re.subn(bpat, b'', b'')[0] + b'' re.subn(bre, lambda m: b'', b'')[0] + b'' re.subn(bpat, lambda m: b'', b'')[0] + b'' [out] -_program.py:7: error: Type argument 1 of "search" has incompatible value -_program.py:7: note: "AnyStr" must be all one type: str or bytes +_program.py:7: error: Type arguments of "search('AnyStr', 'AnyStr', 'int')" have incompatible values +_program.py:7: note: "AnyStr" arguments must be all "str" or all "bytes" _program.py:9: error: Cannot infer type argument 1 of "search" [case testReModuleString] @@ -1302,8 +1302,8 @@ re.subn(spat, '', '')[0] + '' re.subn(sre, lambda m: '', '')[0] + '' re.subn(spat, lambda m: '', '')[0] + '' [out] -_program.py:7: error: Type argument 1 of "search" has incompatible value -_program.py:7: note: "AnyStr" must be all one type: str or bytes +_program.py:7: error: Type arguments of "search('AnyStr', 'AnyStr', 'int')" have incompatible values +_program.py:7: note: "AnyStr" arguments must be all "str" or all "bytes" _program.py:9: error: Cannot infer type argument 1 of "search" [case testListSetitemTuple] From 436925ec399cde546bf5738c7a0662b51d448690 Mon Sep 17 00:00:00 2001 From: quartox Date: Sun, 4 Jun 2017 14:22:34 -0700 Subject: [PATCH 11/27] Revert "Revert "Check for any constrained type"" This reverts commit 94985e213afd31b92fb0a0ad71e8ce04e334d573. --- mypy/applytype.py | 46 ++++++++++++++++-------- mypy/messages.py | 23 +++++++----- test-data/unit/check-functions.test | 27 +++++++------- test-data/unit/check-inference.test | 8 ++--- test-data/unit/check-overloading.test | 8 ++--- test-data/unit/check-typevar-values.test | 9 +++-- test-data/unit/pythoneval.test | 8 ++--- 7 files changed, 80 insertions(+), 49 deletions(-) diff --git a/mypy/applytype.py b/mypy/applytype.py index 6e2cc61ed921..83149d03095e 100644 --- a/mypy/applytype.py +++ b/mypy/applytype.py @@ -1,9 +1,12 @@ -from typing import List, Dict, Sequence +from typing import List, Dict, Sequence, Tuple import mypy.subtypes from mypy.sametypes import is_same_type from mypy.expandtype import expand_type -from mypy.types import Type, TypeVarId, TypeVarType, CallableType, AnyType, PartialType, Instance +from mypy.types import ( + Type, TypeVarId, TypeVarType, TypeVisitor, CallableType, AnyType, PartialType, + Instance, UnionType +) from mypy.messages import MessageBuilder from mypy.nodes import Context @@ -38,10 +41,9 @@ def apply_generic_arguments(callable: CallableType, types: List[Type], types[i] = value break else: - arg_strings = tuple(msg.format(arg_type).replace('"', '') - for arg_type in callable.arg_types) - if has_anystr_incompatible_args(arg_strings, type): - msg.incompatible_anystr_arguments(callable, arg_strings, context) + constraints = get_incompatible_arg_constraints(callable.arg_types, type, i + 1) + if constraints: + msg.incompatible_constrained_arguments(callable, i + 1, constraints, context) else: msg.incompatible_typevar_value(callable, i + 1, type, context) upper_bound = callable.variables[i].upper_bound @@ -68,14 +70,30 @@ def apply_generic_arguments(callable: CallableType, types: List[Type], ) -def has_anystr_incompatible_args(arg_strings: Sequence[str], type: Type) -> bool: - """Determines if function has a problem with AnyStr arguments. +def get_incompatible_arg_constraints(arg_types: Sequence[Type], type: Type, + index: int) -> Dict[str, Tuple[str]]: + """Gets incompatible function arguments with the constrained types. - If the function has more than one AnyStr argument and the solver returns the object type, - then the function was passed both an "str" and "bytes" argument type. + An example of a constrained type is AnyStr which must be all str or all byte. """ + constraints = {} # type: Dict[str, Tuple[str]] if isinstance(type, Instance) and type.type.name() == 'object': - for string in arg_strings: - if 'AnyStr' in string: - return True - return False + if index == len(arg_types): + # Index is off by one for '*' arguments + constraints = add_arg_constraints(constraints, arg_types[index - 1]) + else: + constraints = add_arg_constraints(constraints, arg_types[index]) + return constraints + + +def add_arg_constraints(constraints: Dict[str, Tuple[str]], + arg_type: Type) -> Dict[str, Tuple[str]]: + if (isinstance(arg_type, TypeVarType) and + arg_type.values and + len(arg_type.values) > 1 and + arg_type.name not in constraints.keys()): + constraints[arg_type.name] = tuple(vals.type.name() for vals in arg_type.values) + elif isinstance(arg_type, UnionType): + for item in arg_type.items: + constraints = add_arg_constraints(constraints, item) + return constraints diff --git a/mypy/messages.py b/mypy/messages.py index 019784812beb..28c17c1a60f7 100644 --- a/mypy/messages.py +++ b/mypy/messages.py @@ -6,7 +6,7 @@ import re import difflib -from typing import cast, List, Dict, Any, Sequence, Iterable, Tuple +from typing import cast, List, Dict, Any, Sequence, Iterable, Tuple, Mapping from mypy.erasetype import erase_type from mypy.errors import Errors @@ -819,13 +819,20 @@ def incompatible_typevar_value(self, callee: CallableType, index: int, self.fail('Type argument {} of {} has incompatible value {}'.format( index, callable_name(callee), self.format(type)), context) - def incompatible_anystr_arguments(self, callee: CallableType, arg_strings: Sequence[str], - context: Context) -> None: - if len(arg_strings) == 1: - arg_strings = str(arg_strings).replace(',)', ')') - call_with_types = '"{}{}"'.format(callable_name(callee).replace('"', ''), arg_strings) - self.fail('Type arguments of {} have incompatible values'.format(call_with_types), context) - self.note('"AnyStr" arguments must be all "str" or all "bytes"', context) + def incompatible_constrained_arguments(self, + callee: CallableType, + index: int, + constraints: Mapping[str, Sequence[str]], + context: Context) -> None: + for key, values in constraints.items(): + self.fail('Type argument {} of {} has incompatible value'.format( + index, callable_name(callee)), context) + if len(values) == 2: + constraint_str = '{} or {}'.format(values[0], values[1]) + elif len(values) > 3: + constraint_str = ', '.join(values[:-1]) + ', or ' + values[-1] + self.note('"{}" must be all one type: {}'.format( + key, constraint_str), context) def overloaded_signatures_overlap(self, index1: int, index2: int, context: Context) -> None: diff --git a/test-data/unit/check-functions.test b/test-data/unit/check-functions.test index f398e979c59b..e2ddade9887f 100644 --- a/test-data/unit/check-functions.test +++ b/test-data/unit/check-functions.test @@ -2128,33 +2128,36 @@ def i() -> List[Union[str, int]]: [case testAnyStrIncompatibleArguments] from typing import TypeVar -AnyStr = TypeVar('AnyStr', bytes, str) +AnyStr = TypeVar('AnyStr', str, bytes) def f(x: AnyStr, y: AnyStr) -> None: pass -def g(x: AnyStr, y: AnyStr, z: int) -> None: pass +def g(x: AnyStr, y: AnyStr, z: int) -> AnyStr: pass f('a', 'b') f(b'a', b'b') -f('a', b'b') # E: Type arguments of "f('AnyStr', 'AnyStr')" have incompatible values \ -# N: "AnyStr" arguments must be all "str" or all "bytes" +f('a', b'b') # E: Type argument 1 of "f" has incompatible value \ +# N: "AnyStr" must be all one type: str or bytes g('a', 'b', 1) -g('a', b'b', 1) # E: Type arguments of "g('AnyStr', 'AnyStr', 'int')" have incompatible values \ -# N: "AnyStr" arguments must be all "str" or all "bytes" -g('a', b'b', 'c') # E: Type arguments of "g('AnyStr', 'AnyStr', 'int')" have incompatible values \ -# N: "AnyStr" arguments must be all "str" or all "bytes" \ +g(b'a', b'b', 1) +g('a', b'b', 1) # E: Type argument 1 of "g" has incompatible value \ +# N: "AnyStr" must be all one type: str or bytes +g('a', b'b', 'c') # E: Type argument 1 of "g" has incompatible value \ +# N: "AnyStr" must be all one type: str or bytes \ # E: Argument 3 to "g" has incompatible type "str"; expected "int" [case testUnionAnyStrIncompatibleArguments] from typing import TypeVar, Union -AnyStr = TypeVar('AnyStr', bytes, str) +AnyStr = TypeVar('AnyStr', str, bytes) def f(x: Union[AnyStr, int], y: AnyStr) -> None: pass f('a', 'b') f(1, 'b') -f('a', b'b') # E: Type arguments of "f('Union[AnyStr, int]', 'AnyStr')" have incompatible values \ -# N: "AnyStr" arguments must be all "str" or all "bytes" +f('a', b'b') # E: Type argument 1 of "f" has incompatible value \ +# N: "AnyStr" must be all one type: str or bytes [case testStarAnyStrIncompatibleArguments] from typing import TypeVar, Union -AnyStr = TypeVar('AnyStr', bytes, str) +AnyStr = TypeVar('AnyStr', str, bytes) def f(*x: AnyStr) -> None: pass +def g(x: int, *y: AnyStr) -> None: pass +def h(*x: AnyStr, y: int) -> None: pass f('a') f('a', 'b') f('a', b'b') # E: Type argument 1 of "f" has incompatible value \ diff --git a/test-data/unit/check-inference.test b/test-data/unit/check-inference.test index d0f9e12ebdb0..6a3d4c054fc9 100644 --- a/test-data/unit/check-inference.test +++ b/test-data/unit/check-inference.test @@ -750,12 +750,12 @@ AnyStr = TypeVar('AnyStr', bytes, str) def f(x: Union[AnyStr, int], *a: AnyStr) -> None: pass f('foo') f('foo', 'bar') -f('foo', b'bar') # E: Type arguments of "f('Union[AnyStr, int]', 'AnyStr')" have incompatible values \ -# N: "AnyStr" arguments must be all "str" or all "bytes" +f('foo', b'bar') # E: Type argument 1 of "f" has incompatible value \ +# N: "AnyStr" must be all one type: bytes or str f(1) f(1, 'foo') -f(1, 'foo', b'bar') # E: Type arguments of "f('Union[AnyStr, int]', 'AnyStr')" have incompatible values \ -# N: "AnyStr" arguments must be all "str" or all "bytes" +f(1, 'foo', b'bar') # E: Type argument 1 of "f" has incompatible value \ +# N: "AnyStr" must be all one type: bytes or str [builtins fixtures/primitives.pyi] diff --git a/test-data/unit/check-overloading.test b/test-data/unit/check-overloading.test index e33948c072ea..3b3ed75615d4 100644 --- a/test-data/unit/check-overloading.test +++ b/test-data/unit/check-overloading.test @@ -998,12 +998,12 @@ def g(x: int, *a: AnyStr) -> None: pass g('foo') g('foo', 'bar') -g('foo', b'bar') # E: Type arguments of "g('AnyStr', 'AnyStr')" have incompatible values \ -# N: "AnyStr" arguments must be all "str" or all "bytes" +g('foo', b'bar') # E: Type argument 1 of "g" has incompatible value \ +# N: "AnyStr" must be all one type: bytes or str g(1) g(1, 'foo') -g(1, 'foo', b'bar') # E: Type arguments of "g('int', 'AnyStr')" have incompatible values \ -# N: "AnyStr" arguments must be all "str" or all "bytes" +g(1, 'foo', b'bar') # E: Type argument 1 of "g" has incompatible value \ +# N: "AnyStr" must be all one type: bytes or str [builtins fixtures/primitives.pyi] [case testBadOverlapWithTypeVarsWithValues] diff --git a/test-data/unit/check-typevar-values.test b/test-data/unit/check-typevar-values.test index 36df2235a209..e8d432268fd0 100644 --- a/test-data/unit/check-typevar-values.test +++ b/test-data/unit/check-typevar-values.test @@ -7,7 +7,8 @@ T = TypeVar('T', int, str) def f(x: T) -> None: pass f(1) f('x') -f(object()) # E: Type argument 1 of "f" has incompatible value "object" +f(object()) # E: Type argument 1 of "f" has incompatible value\ +# N: "T" must be all one type: int or str [case testCallGenericFunctionWithTypeVarValueRestrictionUsingContext] from typing import TypeVar, List @@ -18,7 +19,8 @@ s = ['x'] o = [object()] i = f(1) s = f('') -o = f(1) # E: Type argument 1 of "f" has incompatible value "object" +o = f(1) # E: Type argument 1 of "f" has incompatible value\ +# N: "T" must be all one type: int or str [builtins fixtures/list.pyi] [case testCallGenericFunctionWithTypeVarValueRestrictionAndAnyArgs] @@ -239,7 +241,8 @@ class A(Generic[X]): A(1) A('x') A(cast(Any, object())) -A(object()) # E: Type argument 1 of "A" has incompatible value "object" +A(object()) # E: Type argument 1 of "A" has incompatible value\ +# N: "X" must be all one type: int or str [case testGenericTypeWithTypevarValuesAndTypevarArgument] from typing import TypeVar, Generic diff --git a/test-data/unit/pythoneval.test b/test-data/unit/pythoneval.test index 52e4c2be048f..c38bc5cff9b8 100644 --- a/test-data/unit/pythoneval.test +++ b/test-data/unit/pythoneval.test @@ -1277,8 +1277,8 @@ re.subn(bpat, b'', b'')[0] + b'' re.subn(bre, lambda m: b'', b'')[0] + b'' re.subn(bpat, lambda m: b'', b'')[0] + b'' [out] -_program.py:7: error: Type arguments of "search('AnyStr', 'AnyStr', 'int')" have incompatible values -_program.py:7: note: "AnyStr" arguments must be all "str" or all "bytes" +_program.py:7: error: Type argument 1 of "search" has incompatible value +_program.py:7: note: "AnyStr" must be all one type: str or bytes _program.py:9: error: Cannot infer type argument 1 of "search" [case testReModuleString] @@ -1302,8 +1302,8 @@ re.subn(spat, '', '')[0] + '' re.subn(sre, lambda m: '', '')[0] + '' re.subn(spat, lambda m: '', '')[0] + '' [out] -_program.py:7: error: Type arguments of "search('AnyStr', 'AnyStr', 'int')" have incompatible values -_program.py:7: note: "AnyStr" arguments must be all "str" or all "bytes" +_program.py:7: error: Type argument 1 of "search" has incompatible value +_program.py:7: note: "AnyStr" must be all one type: str or bytes _program.py:9: error: Cannot infer type argument 1 of "search" [case testListSetitemTuple] From bcf86dd1b5ff0b04f7fc8b00af9185e7d6dc16d9 Mon Sep 17 00:00:00 2001 From: quartox Date: Sun, 4 Jun 2017 14:34:53 -0700 Subject: [PATCH 12/27] Revert "Merge with mypy master" This reverts commit d49292d886dd70d927e30072227d7051a5df2cd0. --- .travis.yml | 1 - LICENSE | 3 +- ROADMAP.md | 96 - docs/source/cheat_sheet.rst | 12 - docs/source/cheat_sheet_py3.rst | 13 - docs/source/common_issues.rst | 2 +- docs/source/config_file.rst | 2 - docs/source/revision_history.rst | 2 +- extensions/setup.py | 2 +- lib-typing/2.7/mod_generics_cache.py | 14 + lib-typing/2.7/setup.py | 46 + lib-typing/2.7/test_typing.py | 1828 +++++++++++++ lib-typing/2.7/typing.py | 2140 +++++++++++++++ lib-typing/3.2/mod_generics_cache.py | 14 + lib-typing/3.2/test_typing.py | 2422 +++++++++++++++++ lib-typing/3.2/typing.py | 2335 ++++++++++++++++ mypy/__main__.py | 8 +- mypy/checker.py | 15 +- mypy/checkexpr.py | 12 +- mypy/checkmember.py | 9 - mypy/fastparse.py | 89 +- mypy/fastparse2.py | 37 +- mypy/funcplugins.py | 7 +- mypy/main.py | 20 +- mypy/options.py | 4 - mypy/parse.py | 6 +- mypy/report.py | 2 +- mypy/semanal.py | 152 +- mypy/subtypes.py | 40 +- mypy/test/data.py | 33 +- mypy/test/helpers.py | 30 +- mypy/test/testcheck.py | 10 +- mypy/test/testpythoneval.py | 10 +- mypy/waiter.py | 14 +- runtests.py | 49 +- scripts/stubgen | 0 setup.cfg | 2 + setup.py | 6 +- .../3.2/test/test_genericpath.py | 2 +- test-data/unit/README.md | 23 +- test-data/unit/check-async-await.test | 44 +- test-data/unit/check-class-namedtuple.test | 1 - test-data/unit/check-classes.test | 159 +- test-data/unit/check-expressions.test | 10 - test-data/unit/check-functions.test | 2 - test-data/unit/check-incomplete-fixture.test | 98 - test-data/unit/check-incremental.test | 480 +--- test-data/unit/check-modules.test | 224 -- test-data/unit/check-newsyntax.test | 16 +- test-data/unit/check-newtype.test | 32 +- test-data/unit/check-optional.test | 18 +- test-data/unit/check-unions.test | 6 +- test-data/unit/check-warnings.test | 17 - test-data/unit/cmdline.test | 8 - test-data/unit/fixtures/f_string.pyi | 36 - test-data/unit/fixtures/module.pyi | 2 - test-data/unit/fixtures/property.pyi | 2 +- test-data/unit/fixtures/typing-full.pyi | 110 - test-data/unit/lib-stub/builtins.pyi | 4 +- test-data/unit/lib-stub/types.pyi | 11 +- test-data/unit/lib-stub/typing.pyi | 40 +- test-data/unit/pythoneval.test | 14 +- test-data/unit/semanal-classvar.test | 2 +- test-data/unit/semanal-types.test | 4 +- typeshed | 2 +- 65 files changed, 9109 insertions(+), 1745 deletions(-) delete mode 100644 ROADMAP.md create mode 100644 lib-typing/2.7/mod_generics_cache.py create mode 100644 lib-typing/2.7/setup.py create mode 100644 lib-typing/2.7/test_typing.py create mode 100644 lib-typing/2.7/typing.py create mode 100644 lib-typing/3.2/mod_generics_cache.py create mode 100644 lib-typing/3.2/test_typing.py create mode 100644 lib-typing/3.2/typing.py mode change 100644 => 100755 scripts/stubgen delete mode 100644 test-data/unit/check-incomplete-fixture.test delete mode 100644 test-data/unit/fixtures/f_string.pyi delete mode 100644 test-data/unit/fixtures/typing-full.pyi diff --git a/.travis.yml b/.travis.yml index 2dffdb283666..56804c71bf50 100644 --- a/.travis.yml +++ b/.travis.yml @@ -15,7 +15,6 @@ python: install: - pip install -r test-requirements.txt - - python2 -m pip install --user typing - python setup.py install script: diff --git a/LICENSE b/LICENSE index afddd48c889e..8145cc386a7b 100644 --- a/LICENSE +++ b/LICENSE @@ -27,7 +27,8 @@ DEALINGS IN THE SOFTWARE. = = = = = Portions of mypy are licensed under different licenses. The files -under stdlib-samples are licensed under the PSF 2 License, reproduced below. +under stdlib-samples and lib-typing are licensed under the PSF 2 +License, reproduced below. = = = = = diff --git a/ROADMAP.md b/ROADMAP.md deleted file mode 100644 index 132d53c2c581..000000000000 --- a/ROADMAP.md +++ /dev/null @@ -1,96 +0,0 @@ -# Mypy Roadmap - -The goal of the roadmap is to document areas the mypy core team is -planning to work on in the future or is currently working on. PRs -targeting these areas are very welcome, but please check first with a -core team member that nobody else is working on the same thing. - -**Note:** This doesn’t include everything that the core team will work -on, and everything is subject to change. Near-term plans are likely -more accurate. - -## April-June 2017 - -- Add more comprehensive testing for `--incremental` and `--quick` - modes to improve reliability. At least write more unit tests with - focus on areas that have previously had bugs. - ([issue](https://github.com/python/mypy/issues/3455)) - -- Speed up `--quick` mode to better support million+ line codebases - through some of these: - - - Make it possible to use remote caching for incremental cache - files. This would speed up a cold run with no local cache data. - We need to update incremental cache to use hashes to determine - whether files have changes to allow - [sharing cache data](https://github.com/python/mypy/issues/3403). - - - See if we can speed up deserialization of incremental cache - files. Initial experiments aren’t very promising though so there - might not be any easy wins left. - ([issue](https://github.com/python/mypy/issues/3456)) - -- Improve support for complex signatures such as `open(fn, 'rb')` and - specific complex decorators such as `contextlib.contextmanager` - through type checker plugins/hooks. - ([issue](https://github.com/python/mypy/issues/1240)) - -- Document basic properties of all type operations used within mypy, - including compatibility, proper subtyping, joins and meets. - ([issue](https://github.com/python/mypy/issues/3454)) - -- Make TypedDict an officially supported mypy feature. This makes it - possible to give precise types for dictionaries that represent JSON - objects, such as `{"path": "/dir/fnam.ext", "size": 1234}`. - ([issue](https://github.com/python/mypy/issues/3453)) - -- Make error messages more useful and informative. - ([issue](https://github.com/python/mypy/labels/topic-usability)) - -- Resolve [#2008](https://github.com/python/mypy/issues/2008) (we are - converging on approach 4). - -## July-December 2017 - -- Invest some effort into systematically filling in missing - annotations and stubs in typeshed, with focus on features heavily - used at Dropbox. Better support for ORMs will be a separate - project. - -- Improve opt-in warnings about `Any` types to make it easier to keep - code free from unwanted `Any` types. For example, warn about using - `list` (instead of `List[x]`) and calling `open` if we can’t infer a - precise return type, or using types imported from ignored modules - (they are implicitly `Any`). - -- Add support for protocols and structural subtyping (PEP 544). - -- Switch completely to pytest and remove the custom testing framework. - ([issue](https://github.com/python/mypy/issues/1673)) - -- Make it possible to run mypy as a daemon to avoid reprocessing the - entire program on each run. This will improve performance - significantly. Even when using the incremental mode, processing a - large number of files is not cheap. - -- Refactor and simplify specific tricky parts of mypy internals, such - as the [conditional type binder](https://github.com/python/mypy/issues/3457), - [symbol tables](https://github.com/python/mypy/issues/3458) or - the various [semantic analysis passes](https://github.com/python/mypy/issues/3459). - -- Implement a general type system plugin architecture. It should be - able to support some typical ORM features at least, such as - metaclasses that add methods with automatically inferred signatures - and complex descriptors such as those used by Django models. - ([issue](https://github.com/python/mypy/issues/1240)) - -- Add support for statically typed - [protobufs](https://developers.google.com/protocol-buffers/). - -- Provide much faster, reliable interactive feedback through - fine-grained incremental type checking, built on top the daemon - mode. - -- Start work on editor plugins and support for selected IDE features. - -- Turn on `--strict-optional` by default. diff --git a/docs/source/cheat_sheet.rst b/docs/source/cheat_sheet.rst index f8e7146c65f4..49919a56831c 100644 --- a/docs/source/cheat_sheet.rst +++ b/docs/source/cheat_sheet.rst @@ -149,18 +149,6 @@ When you're puzzled or when things are complicated reveal_type(c) # -> error: Revealed type is 'builtins.list[builtins.str]' print(c) # -> [4] the object is not cast - # if you want dynamic attributes on your class, have it override __setattr__ or __getattr__ - # in a stub or in your source code. - # __setattr__ allows for dynamic assignment to names - # __getattr__ allows for dynamic access to names - class A: - # this will allow assignment to any A.x, if x is the same type as `value` - def __setattr__(self, name, value): - # type: (str, int) -> None - ... - a.foo = 42 # works - a.bar = 'Ex-parrot' # fails type checking - # TODO: explain "Need type annotation for variable" when # initializing with None or an empty container diff --git a/docs/source/cheat_sheet_py3.rst b/docs/source/cheat_sheet_py3.rst index 5ef62b28134f..adeab7d734d4 100644 --- a/docs/source/cheat_sheet_py3.rst +++ b/docs/source/cheat_sheet_py3.rst @@ -142,19 +142,6 @@ When you're puzzled or when things are complicated reveal_type(c) # -> error: Revealed type is 'builtins.list[builtins.str]' print(c) # -> [4] the object is not cast - # if you want dynamic attributes on your class, have it override __setattr__ or __getattr__ - # in a stub or in your source code. - # __setattr__ allows for dynamic assignment to names - # __getattr__ allows for dynamic access to names - class A: - # this will allow assignment to any A.x, if x is the same type as `value` - def __setattr__(self, name: str, value: int) -> None: ... - # this will allow access to any A.x, if x is compatible with the return type - def __getattr__(self, name: str) -> int: ... - a.foo = 42 # works - a.bar = 'Ex-parrot' # fails type checking - - # TODO: explain "Need type annotation for variable" when # initializing with None or an empty container diff --git a/docs/source/common_issues.rst b/docs/source/common_issues.rst index 0c8b500d8f06..2501acd28fc0 100644 --- a/docs/source/common_issues.rst +++ b/docs/source/common_issues.rst @@ -180,7 +180,7 @@ not support ``sort()``) as a list and sort it in-place: # Type of x is List[int] here. x.sort() # Okay! -.. _variance: +.. _invariance-vs-covariance: Invariance vs covariance ------------------------ diff --git a/docs/source/config_file.rst b/docs/source/config_file.rst index 6fe139b1bb8c..7e04286d6cd3 100644 --- a/docs/source/config_file.rst +++ b/docs/source/config_file.rst @@ -178,8 +178,6 @@ overridden by the pattern sections matching the module name. - ``strict_boolean`` (Boolean, default False) makes using non-boolean expressions in conditions an error. -- ``no_implicit_optional`` (Boolean, default false) changes the treatment of - arguments with a default value of None by not implicitly making their type Optional Example ******* diff --git a/docs/source/revision_history.rst b/docs/source/revision_history.rst index 98f6495dd188..dd74dadd244e 100644 --- a/docs/source/revision_history.rst +++ b/docs/source/revision_history.rst @@ -31,7 +31,7 @@ List of major changes: * Add :ref:`variance-of-generics`. - * Add :ref:`variance`. + * Add :ref:`invariance-vs-covariance`. * Updates to :ref:`python-36`. diff --git a/extensions/setup.py b/extensions/setup.py index 59d634debc3f..32741a0b67b6 100644 --- a/extensions/setup.py +++ b/extensions/setup.py @@ -4,7 +4,7 @@ from distutils.core import setup -version = '0.3.0-dev' +version = '0.2.0' description = 'Experimental type system extensions for programs checked with the mypy typechecker.' long_description = ''' Mypy Extensions diff --git a/lib-typing/2.7/mod_generics_cache.py b/lib-typing/2.7/mod_generics_cache.py new file mode 100644 index 000000000000..d9a60b4b28c3 --- /dev/null +++ b/lib-typing/2.7/mod_generics_cache.py @@ -0,0 +1,14 @@ +"""Module for testing the behavior of generics across different modules.""" + +from typing import TypeVar, Generic + +T = TypeVar('T') + + +class A(Generic[T]): + pass + + +class B(Generic[T]): + class A(Generic[T]): + pass diff --git a/lib-typing/2.7/setup.py b/lib-typing/2.7/setup.py new file mode 100644 index 000000000000..18c34d84be64 --- /dev/null +++ b/lib-typing/2.7/setup.py @@ -0,0 +1,46 @@ +#!/usr/bin/env python + +"""setup.py for Python 2.x typing module""" + +import glob +import os +import os.path +import sys + +from distutils.core import setup + +if sys.version_info >= (3, 0, 0): + sys.stderr.write("ERROR: You need Python 2.x to install this module.\n") + exit(1) + +version = '0.0.1.dev1' +description = 'typing (Python 2.x)' +long_description = ''' +typing (Python 2.x) +=================== + +This module is part of mypy, a static type checker for Python. +'''.lstrip() + +classifiers = [ + 'Development Status :: 2 - Pre-Alpha', + 'Environment :: Console', + 'Intended Audience :: Developers', + 'License :: OSI Approved :: MIT License', + 'Operating System :: POSIX', + 'Programming Language :: Python :: 2.7', + 'Topic :: Software Development', +] + +setup(name='typing', + version=version, + description=description, + long_description=long_description, + author='Jukka Lehtosalo', + author_email='jukka.lehtosalo@iki.fi', + url='http://www.mypy-lang.org/', + license='MIT License', + platforms=['POSIX'], + py_modules=['typing'], + classifiers=classifiers, + ) diff --git a/lib-typing/2.7/test_typing.py b/lib-typing/2.7/test_typing.py new file mode 100644 index 000000000000..2ea954fe30f4 --- /dev/null +++ b/lib-typing/2.7/test_typing.py @@ -0,0 +1,1828 @@ +from __future__ import absolute_import, unicode_literals + +import collections +import pickle +import re +import sys +from unittest import TestCase, main, SkipTest +from copy import copy, deepcopy + +from typing import Any +from typing import TypeVar, AnyStr +from typing import T, KT, VT # Not in __all__. +from typing import Union, Optional +from typing import Tuple, List, MutableMapping +from typing import Callable +from typing import Generic, ClassVar, GenericMeta +from typing import cast +from typing import Type +from typing import NewType +from typing import NamedTuple +from typing import Pattern, Match +import typing +import weakref +try: + import collections.abc as collections_abc +except ImportError: + import collections as collections_abc # Fallback for PY3.2. + + +class BaseTestCase(TestCase): + + def assertIsSubclass(self, cls, class_or_tuple, msg=None): + if not issubclass(cls, class_or_tuple): + message = '%r is not a subclass of %r' % (cls, class_or_tuple) + if msg is not None: + message += ' : %s' % msg + raise self.failureException(message) + + def assertNotIsSubclass(self, cls, class_or_tuple, msg=None): + if issubclass(cls, class_or_tuple): + message = '%r is a subclass of %r' % (cls, class_or_tuple) + if msg is not None: + message += ' : %s' % msg + raise self.failureException(message) + + def clear_caches(self): + for f in typing._cleanups: + f() + + +class Employee(object): + pass + + +class Manager(Employee): + pass + + +class Founder(Employee): + pass + + +class ManagingFounder(Manager, Founder): + pass + + +class AnyTests(BaseTestCase): + + def test_any_instance_type_error(self): + with self.assertRaises(TypeError): + isinstance(42, Any) + + def test_any_subclass_type_error(self): + with self.assertRaises(TypeError): + issubclass(Employee, Any) + with self.assertRaises(TypeError): + issubclass(Any, Employee) + + def test_repr(self): + self.assertEqual(repr(Any), 'typing.Any') + + def test_errors(self): + with self.assertRaises(TypeError): + issubclass(42, Any) + with self.assertRaises(TypeError): + Any[int] # Any is not a generic type. + + def test_cannot_subclass(self): + with self.assertRaises(TypeError): + class A(Any): + pass + with self.assertRaises(TypeError): + class A(type(Any)): + pass + + def test_cannot_instantiate(self): + with self.assertRaises(TypeError): + Any() + with self.assertRaises(TypeError): + type(Any)() + + def test_any_is_subclass(self): + # These expressions must simply not fail. + typing.Match[Any] + typing.Pattern[Any] + typing.IO[Any] + + +class TypeVarTests(BaseTestCase): + + def test_basic_plain(self): + T = TypeVar('T') + # T equals itself. + self.assertEqual(T, T) + # T is an instance of TypeVar + self.assertIsInstance(T, TypeVar) + + def test_typevar_instance_type_error(self): + T = TypeVar('T') + with self.assertRaises(TypeError): + isinstance(42, T) + + def test_typevar_subclass_type_error(self): + T = TypeVar('T') + with self.assertRaises(TypeError): + issubclass(int, T) + with self.assertRaises(TypeError): + issubclass(T, int) + + def test_constrained_error(self): + with self.assertRaises(TypeError): + X = TypeVar('X', int) + X + + def test_union_unique(self): + X = TypeVar('X') + Y = TypeVar('Y') + self.assertNotEqual(X, Y) + self.assertEqual(Union[X], X) + self.assertNotEqual(Union[X], Union[X, Y]) + self.assertEqual(Union[X, X], X) + self.assertNotEqual(Union[X, int], Union[X]) + self.assertNotEqual(Union[X, int], Union[int]) + self.assertEqual(Union[X, int].__args__, (X, int)) + self.assertEqual(Union[X, int].__parameters__, (X,)) + self.assertIs(Union[X, int].__origin__, Union) + + def test_union_constrained(self): + A = TypeVar('A', str, bytes) + self.assertNotEqual(Union[A, str], Union[A]) + + def test_repr(self): + self.assertEqual(repr(T), '~T') + self.assertEqual(repr(KT), '~KT') + self.assertEqual(repr(VT), '~VT') + self.assertEqual(repr(AnyStr), '~AnyStr') + T_co = TypeVar('T_co', covariant=True) + self.assertEqual(repr(T_co), '+T_co') + T_contra = TypeVar('T_contra', contravariant=True) + self.assertEqual(repr(T_contra), '-T_contra') + + def test_no_redefinition(self): + self.assertNotEqual(TypeVar('T'), TypeVar('T')) + self.assertNotEqual(TypeVar('T', int, str), TypeVar('T', int, str)) + + def test_cannot_subclass_vars(self): + with self.assertRaises(TypeError): + class V(TypeVar('T')): + pass + + def test_cannot_subclass_var_itself(self): + with self.assertRaises(TypeError): + class V(TypeVar): + pass + + def test_cannot_instantiate_vars(self): + with self.assertRaises(TypeError): + TypeVar('A')() + + def test_bound_errors(self): + with self.assertRaises(TypeError): + TypeVar('X', bound=42) + with self.assertRaises(TypeError): + TypeVar('X', str, float, bound=Employee) + + def test_no_bivariant(self): + with self.assertRaises(ValueError): + TypeVar('T', covariant=True, contravariant=True) + + +class UnionTests(BaseTestCase): + + def test_basics(self): + u = Union[int, float] + self.assertNotEqual(u, Union) + + def test_subclass_error(self): + with self.assertRaises(TypeError): + issubclass(int, Union) + with self.assertRaises(TypeError): + issubclass(Union, int) + with self.assertRaises(TypeError): + issubclass(int, Union[int, str]) + with self.assertRaises(TypeError): + issubclass(Union[int, str], int) + + def test_union_any(self): + u = Union[Any] + self.assertEqual(u, Any) + u1 = Union[int, Any] + u2 = Union[Any, int] + u3 = Union[Any, object] + self.assertEqual(u1, u2) + self.assertNotEqual(u1, Any) + self.assertNotEqual(u2, Any) + self.assertNotEqual(u3, Any) + + def test_union_object(self): + u = Union[object] + self.assertEqual(u, object) + u = Union[int, object] + self.assertEqual(u, object) + u = Union[object, int] + self.assertEqual(u, object) + + def test_unordered(self): + u1 = Union[int, float] + u2 = Union[float, int] + self.assertEqual(u1, u2) + + def test_single_class_disappears(self): + t = Union[Employee] + self.assertIs(t, Employee) + + def test_base_class_disappears(self): + u = Union[Employee, Manager, int] + self.assertEqual(u, Union[int, Employee]) + u = Union[Manager, int, Employee] + self.assertEqual(u, Union[int, Employee]) + u = Union[Employee, Manager] + self.assertIs(u, Employee) + + def test_union_union(self): + u = Union[int, float] + v = Union[u, Employee] + self.assertEqual(v, Union[int, float, Employee]) + + def test_repr(self): + self.assertEqual(repr(Union), 'typing.Union') + u = Union[Employee, int] + self.assertEqual(repr(u), 'typing.Union[%s.Employee, int]' % __name__) + u = Union[int, Employee] + self.assertEqual(repr(u), 'typing.Union[int, %s.Employee]' % __name__) + T = TypeVar('T') + u = Union[T, int][int] + self.assertEqual(repr(u), repr(int)) + u = Union[List[int], int] + self.assertEqual(repr(u), 'typing.Union[typing.List[int], int]') + + def test_cannot_subclass(self): + with self.assertRaises(TypeError): + class C(Union): + pass + with self.assertRaises(TypeError): + class C(type(Union)): + pass + with self.assertRaises(TypeError): + class C(Union[int, str]): + pass + + def test_cannot_instantiate(self): + with self.assertRaises(TypeError): + Union() + u = Union[int, float] + with self.assertRaises(TypeError): + u() + with self.assertRaises(TypeError): + type(u)() + + def test_union_generalization(self): + self.assertFalse(Union[str, typing.Iterable[int]] == str) + self.assertFalse(Union[str, typing.Iterable[int]] == typing.Iterable[int]) + self.assertTrue(Union[str, typing.Iterable] == typing.Iterable) + + def test_union_compare_other(self): + self.assertNotEqual(Union, object) + self.assertNotEqual(Union, Any) + self.assertNotEqual(ClassVar, Union) + self.assertNotEqual(Optional, Union) + self.assertNotEqual([None], Optional) + self.assertNotEqual(Optional, typing.Mapping) + self.assertNotEqual(Optional[typing.MutableMapping], Union) + + def test_optional(self): + o = Optional[int] + u = Union[int, None] + self.assertEqual(o, u) + + def test_empty(self): + with self.assertRaises(TypeError): + Union[()] + + def test_union_instance_type_error(self): + with self.assertRaises(TypeError): + isinstance(42, Union[int, str]) + + def test_no_eval_union(self): + u = Union[int, str] + self.assertIs(u._eval_type({}, {}), u) + + def test_function_repr_union(self): + def fun(): pass + self.assertEqual(repr(Union[fun, int]), 'typing.Union[fun, int]') + + def test_union_str_pattern(self): + # Shouldn't crash; see http://bugs.python.org/issue25390 + A = Union[str, Pattern] + A + + def test_etree(self): + # See https://github.com/python/typing/issues/229 + # (Only relevant for Python 2.) + try: + from xml.etree.cElementTree import Element + except ImportError: + raise SkipTest("cElementTree not found") + Union[Element, str] # Shouldn't crash + + def Elem(*args): + return Element(*args) + + Union[Elem, str] # Nor should this + + +class TupleTests(BaseTestCase): + + def test_basics(self): + with self.assertRaises(TypeError): + issubclass(Tuple, Tuple[int, str]) + with self.assertRaises(TypeError): + issubclass(tuple, Tuple[int, str]) + + class TP(tuple): pass + self.assertTrue(issubclass(tuple, Tuple)) + self.assertTrue(issubclass(TP, Tuple)) + + def test_equality(self): + self.assertEqual(Tuple[int], Tuple[int]) + self.assertEqual(Tuple[int, ...], Tuple[int, ...]) + self.assertNotEqual(Tuple[int], Tuple[int, int]) + self.assertNotEqual(Tuple[int], Tuple[int, ...]) + + def test_tuple_subclass(self): + class MyTuple(tuple): + pass + self.assertTrue(issubclass(MyTuple, Tuple)) + + def test_tuple_instance_type_error(self): + with self.assertRaises(TypeError): + isinstance((0, 0), Tuple[int, int]) + isinstance((0, 0), Tuple) + + def test_repr(self): + self.assertEqual(repr(Tuple), 'typing.Tuple') + self.assertEqual(repr(Tuple[()]), 'typing.Tuple[()]') + self.assertEqual(repr(Tuple[int, float]), 'typing.Tuple[int, float]') + self.assertEqual(repr(Tuple[int, ...]), 'typing.Tuple[int, ...]') + + def test_errors(self): + with self.assertRaises(TypeError): + issubclass(42, Tuple) + with self.assertRaises(TypeError): + issubclass(42, Tuple[int]) + + +class CallableTests(BaseTestCase): + + def test_self_subclass(self): + with self.assertRaises(TypeError): + self.assertTrue(issubclass(type(lambda x: x), Callable[[int], int])) + self.assertTrue(issubclass(type(lambda x: x), Callable)) + + def test_eq_hash(self): + self.assertEqual(Callable[[int], int], Callable[[int], int]) + self.assertEqual(len({Callable[[int], int], Callable[[int], int]}), 1) + self.assertNotEqual(Callable[[int], int], Callable[[int], str]) + self.assertNotEqual(Callable[[int], int], Callable[[str], int]) + self.assertNotEqual(Callable[[int], int], Callable[[int, int], int]) + self.assertNotEqual(Callable[[int], int], Callable[[], int]) + self.assertNotEqual(Callable[[int], int], Callable) + + def test_cannot_instantiate(self): + with self.assertRaises(TypeError): + Callable() + with self.assertRaises(TypeError): + type(Callable)() + c = Callable[[int], str] + with self.assertRaises(TypeError): + c() + with self.assertRaises(TypeError): + type(c)() + + def test_callable_wrong_forms(self): + with self.assertRaises(TypeError): + Callable[(), int] + with self.assertRaises(TypeError): + Callable[[()], int] + with self.assertRaises(TypeError): + Callable[[int, 1], 2] + with self.assertRaises(TypeError): + Callable[int] + + def test_callable_instance_works(self): + def f(): + pass + self.assertIsInstance(f, Callable) + self.assertNotIsInstance(None, Callable) + + def test_callable_instance_type_error(self): + def f(): + pass + with self.assertRaises(TypeError): + self.assertIsInstance(f, Callable[[], None]) + with self.assertRaises(TypeError): + self.assertIsInstance(f, Callable[[], Any]) + with self.assertRaises(TypeError): + self.assertNotIsInstance(None, Callable[[], None]) + with self.assertRaises(TypeError): + self.assertNotIsInstance(None, Callable[[], Any]) + + def test_repr(self): + ct0 = Callable[[], bool] + self.assertEqual(repr(ct0), 'typing.Callable[[], bool]') + ct2 = Callable[[str, float], int] + self.assertEqual(repr(ct2), 'typing.Callable[[str, float], int]') + ctv = Callable[..., str] + self.assertEqual(repr(ctv), 'typing.Callable[..., str]') + + def test_ellipsis_in_generic(self): + # Shouldn't crash; see https://github.com/python/typing/issues/259 + typing.List[Callable[..., str]] + + +XK = TypeVar('XK', unicode, bytes) +XV = TypeVar('XV') + + +class SimpleMapping(Generic[XK, XV]): + + def __getitem__(self, key): + pass + + def __setitem__(self, key, value): + pass + + def get(self, key, default=None): + pass + + +class MySimpleMapping(SimpleMapping[XK, XV]): + + def __init__(self): + self.store = {} + + def __getitem__(self, key): + return self.store[key] + + def __setitem__(self, key, value): + self.store[key] = value + + def get(self, key, default=None): + try: + return self.store[key] + except KeyError: + return default + + +class ProtocolTests(BaseTestCase): + + def test_supports_int(self): + self.assertIsSubclass(int, typing.SupportsInt) + self.assertNotIsSubclass(str, typing.SupportsInt) + + def test_supports_float(self): + self.assertIsSubclass(float, typing.SupportsFloat) + self.assertNotIsSubclass(str, typing.SupportsFloat) + + def test_supports_complex(self): + + # Note: complex itself doesn't have __complex__. + class C(object): + def __complex__(self): + return 0j + + self.assertIsSubclass(C, typing.SupportsComplex) + self.assertNotIsSubclass(str, typing.SupportsComplex) + + def test_supports_abs(self): + self.assertIsSubclass(float, typing.SupportsAbs) + self.assertIsSubclass(int, typing.SupportsAbs) + self.assertNotIsSubclass(str, typing.SupportsAbs) + + def test_reversible(self): + self.assertIsSubclass(list, typing.Reversible) + self.assertNotIsSubclass(int, typing.Reversible) + + def test_protocol_instance_type_error(self): + with self.assertRaises(TypeError): + isinstance(0, typing.SupportsAbs) + class C1(typing.SupportsInt): + def __int__(self): + return 42 + class C2(C1): + pass + c = C2() + self.assertIsInstance(c, C1) + + +class GenericTests(BaseTestCase): + + def test_basics(self): + X = SimpleMapping[str, Any] + self.assertEqual(X.__parameters__, ()) + with self.assertRaises(TypeError): + X[unicode] + with self.assertRaises(TypeError): + X[unicode, unicode] + Y = SimpleMapping[XK, unicode] + self.assertEqual(Y.__parameters__, (XK,)) + Y[unicode] + with self.assertRaises(TypeError): + Y[unicode, unicode] + self.assertIsSubclass(SimpleMapping[str, int], SimpleMapping) + + def test_generic_errors(self): + T = TypeVar('T') + S = TypeVar('S') + with self.assertRaises(TypeError): + Generic[T]() + with self.assertRaises(TypeError): + Generic[T][T] + with self.assertRaises(TypeError): + Generic[T][S] + with self.assertRaises(TypeError): + isinstance([], List[int]) + with self.assertRaises(TypeError): + issubclass(list, List[int]) + with self.assertRaises(TypeError): + class NewGeneric(Generic): pass + with self.assertRaises(TypeError): + class MyGeneric(Generic[T], Generic[S]): pass + with self.assertRaises(TypeError): + class MyGeneric(List[T], Generic[S]): pass + + def test_init(self): + T = TypeVar('T') + S = TypeVar('S') + with self.assertRaises(TypeError): + Generic[T, T] + with self.assertRaises(TypeError): + Generic[T, S, T] + + def test_repr(self): + self.assertEqual(repr(SimpleMapping), + __name__ + '.' + 'SimpleMapping') + self.assertEqual(repr(MySimpleMapping), + __name__ + '.' + 'MySimpleMapping') + + def test_chain_repr(self): + T = TypeVar('T') + S = TypeVar('S') + + class C(Generic[T]): + pass + + X = C[Tuple[S, T]] + self.assertEqual(X, C[Tuple[S, T]]) + self.assertNotEqual(X, C[Tuple[T, S]]) + + Y = X[T, int] + self.assertEqual(Y, X[T, int]) + self.assertNotEqual(Y, X[S, int]) + self.assertNotEqual(Y, X[T, str]) + + Z = Y[str] + self.assertEqual(Z, Y[str]) + self.assertNotEqual(Z, Y[int]) + self.assertNotEqual(Z, Y[T]) + + self.assertTrue(str(Z).endswith( + '.C[typing.Tuple[str, int]]')) + + def test_new_repr(self): + T = TypeVar('T') + U = TypeVar('U', covariant=True) + S = TypeVar('S') + + self.assertEqual(repr(List), 'typing.List') + self.assertEqual(repr(List[T]), 'typing.List[~T]') + self.assertEqual(repr(List[U]), 'typing.List[+U]') + self.assertEqual(repr(List[S][T][int]), 'typing.List[int]') + self.assertEqual(repr(List[int]), 'typing.List[int]') + + def test_new_repr_complex(self): + T = TypeVar('T') + TS = TypeVar('TS') + + self.assertEqual(repr(typing.Mapping[T, TS][TS, T]), 'typing.Mapping[~TS, ~T]') + self.assertEqual(repr(List[Tuple[T, TS]][int, T]), + 'typing.List[typing.Tuple[int, ~T]]') + self.assertEqual( + repr(List[Tuple[T, T]][List[int]]), + 'typing.List[typing.Tuple[typing.List[int], typing.List[int]]]' + ) + + def test_new_repr_bare(self): + T = TypeVar('T') + self.assertEqual(repr(Generic[T]), 'typing.Generic[~T]') + self.assertEqual(repr(typing._Protocol[T]), 'typing.Protocol[~T]') + class C(typing.Dict[Any, Any]): pass + # this line should just work + repr(C.__mro__) + + def test_dict(self): + T = TypeVar('T') + + class B(Generic[T]): + pass + + b = B() + b.foo = 42 + self.assertEqual(b.__dict__, {'foo': 42}) + + class C(B[int]): + pass + + c = C() + c.bar = 'abc' + self.assertEqual(c.__dict__, {'bar': 'abc'}) + + def test_subscripted_generics_as_proxies(self): + T = TypeVar('T') + class C(Generic[T]): + x = 'def' + self.assertEqual(C[int].x, 'def') + self.assertEqual(C[C[int]].x, 'def') + C[C[int]].x = 'changed' + self.assertEqual(C.x, 'changed') + self.assertEqual(C[str].x, 'changed') + C[List[str]].z = 'new' + self.assertEqual(C.z, 'new') + self.assertEqual(C[Tuple[int]].z, 'new') + + self.assertEqual(C().x, 'changed') + self.assertEqual(C[Tuple[str]]().z, 'new') + + class D(C[T]): + pass + self.assertEqual(D[int].x, 'changed') + self.assertEqual(D.z, 'new') + D.z = 'from derived z' + D[int].x = 'from derived x' + self.assertEqual(C.x, 'changed') + self.assertEqual(C[int].z, 'new') + self.assertEqual(D.x, 'from derived x') + self.assertEqual(D[str].z, 'from derived z') + + def test_abc_registry_kept(self): + T = TypeVar('T') + class C(Generic[T]): pass + C.register(int) + self.assertIsInstance(1, C) + C[int] + self.assertIsInstance(1, C) + + def test_false_subclasses(self): + class MyMapping(MutableMapping[str, str]): pass + self.assertNotIsInstance({}, MyMapping) + self.assertNotIsSubclass(dict, MyMapping) + + def test_abc_bases(self): + class MM(MutableMapping[str, str]): + def __getitem__(self, k): + return None + def __setitem__(self, k, v): + pass + def __delitem__(self, k): + pass + def __iter__(self): + return iter(()) + def __len__(self): + return 0 + # this should just work + MM().update() + self.assertIsInstance(MM(), collections_abc.MutableMapping) + self.assertIsInstance(MM(), MutableMapping) + self.assertNotIsInstance(MM(), List) + self.assertNotIsInstance({}, MM) + + def test_multiple_bases(self): + class MM1(MutableMapping[str, str], collections_abc.MutableMapping): + pass + with self.assertRaises(TypeError): + # consistent MRO not possible + class MM2(collections_abc.MutableMapping, MutableMapping[str, str]): + pass + + def test_orig_bases(self): + T = TypeVar('T') + class C(typing.Dict[str, T]): pass + self.assertEqual(C.__orig_bases__, (typing.Dict[str, T],)) + + def test_naive_runtime_checks(self): + def naive_dict_check(obj, tp): + # Check if a dictionary conforms to Dict type + if len(tp.__parameters__) > 0: + raise NotImplementedError + if tp.__args__: + KT, VT = tp.__args__ + return all( + isinstance(k, KT) and isinstance(v, VT) + for k, v in obj.items() + ) + self.assertTrue(naive_dict_check({'x': 1}, typing.Dict[typing.Text, int])) + self.assertFalse(naive_dict_check({1: 'x'}, typing.Dict[typing.Text, int])) + with self.assertRaises(NotImplementedError): + naive_dict_check({1: 'x'}, typing.Dict[typing.Text, T]) + + def naive_generic_check(obj, tp): + # Check if an instance conforms to the generic class + if not hasattr(obj, '__orig_class__'): + raise NotImplementedError + return obj.__orig_class__ == tp + class Node(Generic[T]): pass + self.assertTrue(naive_generic_check(Node[int](), Node[int])) + self.assertFalse(naive_generic_check(Node[str](), Node[int])) + self.assertFalse(naive_generic_check(Node[str](), List)) + with self.assertRaises(NotImplementedError): + naive_generic_check([1, 2, 3], Node[int]) + + def naive_list_base_check(obj, tp): + # Check if list conforms to a List subclass + return all(isinstance(x, tp.__orig_bases__[0].__args__[0]) + for x in obj) + class C(List[int]): pass + self.assertTrue(naive_list_base_check([1, 2, 3], C)) + self.assertFalse(naive_list_base_check(['a', 'b'], C)) + + def test_multi_subscr_base(self): + T = TypeVar('T') + U = TypeVar('U') + V = TypeVar('V') + class C(List[T][U][V]): pass + class D(C, List[T][U][V]): pass + self.assertEqual(C.__parameters__, (V,)) + self.assertEqual(D.__parameters__, (V,)) + self.assertEqual(C[int].__parameters__, ()) + self.assertEqual(D[int].__parameters__, ()) + self.assertEqual(C[int].__args__, (int,)) + self.assertEqual(D[int].__args__, (int,)) + self.assertEqual(C.__bases__, (List,)) + self.assertEqual(D.__bases__, (C, List)) + self.assertEqual(C.__orig_bases__, (List[T][U][V],)) + self.assertEqual(D.__orig_bases__, (C, List[T][U][V])) + + def test_subscript_meta(self): + T = TypeVar('T') + self.assertEqual(Type[GenericMeta], Type[GenericMeta]) + self.assertEqual(Union[T, int][GenericMeta], Union[GenericMeta, int]) + self.assertEqual(Callable[..., GenericMeta].__args__, (Ellipsis, GenericMeta)) + + def test_generic_hashes(self): + import mod_generics_cache + class A(Generic[T]): + __module__ = 'test_typing' + + class B(Generic[T]): + class A(Generic[T]): + pass + + self.assertEqual(A, A) + self.assertEqual(mod_generics_cache.A[str], mod_generics_cache.A[str]) + self.assertEqual(B.A, B.A) + self.assertEqual(mod_generics_cache.B.A[B.A[str]], + mod_generics_cache.B.A[B.A[str]]) + + self.assertNotEqual(A, B.A) + self.assertNotEqual(A, mod_generics_cache.A) + self.assertNotEqual(A, mod_generics_cache.B.A) + self.assertNotEqual(B.A, mod_generics_cache.A) + self.assertNotEqual(B.A, mod_generics_cache.B.A) + + self.assertNotEqual(A[str], B.A[str]) + self.assertNotEqual(A[List[Any]], B.A[List[Any]]) + self.assertNotEqual(A[str], mod_generics_cache.A[str]) + self.assertNotEqual(A[str], mod_generics_cache.B.A[str]) + self.assertNotEqual(B.A[int], mod_generics_cache.A[int]) + self.assertNotEqual(B.A[List[Any]], mod_generics_cache.B.A[List[Any]]) + + self.assertNotEqual(Tuple[A[str]], Tuple[B.A[str]]) + self.assertNotEqual(Tuple[A[List[Any]]], Tuple[B.A[List[Any]]]) + self.assertNotEqual(Union[str, A[str]], Union[str, mod_generics_cache.A[str]]) + self.assertNotEqual(Union[A[str], A[str]], + Union[A[str], mod_generics_cache.A[str]]) + self.assertNotEqual(typing.FrozenSet[A[str]], + typing.FrozenSet[mod_generics_cache.B.A[str]]) + + self.assertTrue(repr(Tuple[A[str]]).endswith('test_typing.A[str]]')) + self.assertTrue(repr(Tuple[mod_generics_cache.A[str]]) + .endswith('mod_generics_cache.A[str]]')) + + def test_extended_generic_rules_eq(self): + T = TypeVar('T') + U = TypeVar('U') + self.assertEqual(Tuple[T, T][int], Tuple[int, int]) + self.assertEqual(typing.Iterable[Tuple[T, T]][T], typing.Iterable[Tuple[T, T]]) + with self.assertRaises(TypeError): + Tuple[T, int][()] + with self.assertRaises(TypeError): + Tuple[T, U][T, ...] + + self.assertEqual(Union[T, int][int], int) + self.assertEqual(Union[T, U][int, Union[int, str]], Union[int, str]) + class Base(object): pass + class Derived(Base): pass + self.assertEqual(Union[T, Base][Derived], Base) + with self.assertRaises(TypeError): + Union[T, int][1] + + self.assertEqual(Callable[[T], T][KT], Callable[[KT], KT]) + self.assertEqual(Callable[..., List[T]][int], Callable[..., List[int]]) + with self.assertRaises(TypeError): + Callable[[T], U][..., int] + with self.assertRaises(TypeError): + Callable[[T], U][[], int] + + def test_extended_generic_rules_repr(self): + T = TypeVar('T') + self.assertEqual(repr(Union[Tuple, Callable]).replace('typing.', ''), + 'Union[Tuple, Callable]') + self.assertEqual(repr(Union[Tuple, Tuple[int]]).replace('typing.', ''), + 'Tuple') + self.assertEqual(repr(Callable[..., Optional[T]][int]).replace('typing.', ''), + 'Callable[..., Union[int, NoneType]]') + self.assertEqual(repr(Callable[[], List[T]][int]).replace('typing.', ''), + 'Callable[[], List[int]]') + + def test_generic_forvard_ref(self): + LLT = List[List['CC']] + class CC: pass + self.assertEqual(typing._eval_type(LLT, globals(), locals()), List[List[CC]]) + T = TypeVar('T') + AT = Tuple[T, ...] + self.assertIs(typing._eval_type(AT, globals(), locals()), AT) + CT = Callable[..., List[T]] + self.assertIs(typing._eval_type(CT, globals(), locals()), CT) + + def test_extended_generic_rules_subclassing(self): + class T1(Tuple[T, KT]): pass + class T2(Tuple[T, ...]): pass + class C1(Callable[[T], T]): pass + class C2(Callable[..., int]): + def __call__(self): + return None + + self.assertEqual(T1.__parameters__, (T, KT)) + self.assertEqual(T1[int, str].__args__, (int, str)) + self.assertEqual(T1[int, T].__origin__, T1) + + self.assertEqual(T2.__parameters__, (T,)) + with self.assertRaises(TypeError): + T1[int] + with self.assertRaises(TypeError): + T2[int, str] + + self.assertEqual(repr(C1[int]).split('.')[-1], 'C1[int]') + self.assertEqual(C2.__parameters__, ()) + self.assertIsInstance(C2(), collections_abc.Callable) + self.assertIsSubclass(C2, collections_abc.Callable) + self.assertIsSubclass(C1, collections_abc.Callable) + self.assertIsInstance(T1(), tuple) + self.assertIsSubclass(T2, tuple) + self.assertIsSubclass(Tuple[int, ...], typing.Sequence) + self.assertIsSubclass(Tuple[int, ...], typing.Iterable) + + def test_fail_with_bare_union(self): + with self.assertRaises(TypeError): + List[Union] + with self.assertRaises(TypeError): + Tuple[Optional] + with self.assertRaises(TypeError): + ClassVar[ClassVar] + with self.assertRaises(TypeError): + List[ClassVar[int]] + + def test_fail_with_bare_generic(self): + T = TypeVar('T') + with self.assertRaises(TypeError): + List[Generic] + with self.assertRaises(TypeError): + Tuple[Generic[T]] + with self.assertRaises(TypeError): + List[typing._Protocol] + with self.assertRaises(TypeError): + isinstance(1, Generic) + + def test_type_erasure_special(self): + T = TypeVar('T') + # this is the only test that checks type caching + self.clear_caches() + class MyTup(Tuple[T, T]): pass + self.assertIs(MyTup[int]().__class__, MyTup) + self.assertIs(MyTup[int]().__orig_class__, MyTup[int]) + class MyCall(Callable[..., T]): + def __call__(self): return None + self.assertIs(MyCall[T]().__class__, MyCall) + self.assertIs(MyCall[T]().__orig_class__, MyCall[T]) + class MyDict(typing.Dict[T, T]): pass + self.assertIs(MyDict[int]().__class__, MyDict) + self.assertIs(MyDict[int]().__orig_class__, MyDict[int]) + class MyDef(typing.DefaultDict[str, T]): pass + self.assertIs(MyDef[int]().__class__, MyDef) + self.assertIs(MyDef[int]().__orig_class__, MyDef[int]) + + def test_all_repr_eq_any(self): + objs = (getattr(typing, el) for el in typing.__all__) + for obj in objs: + self.assertNotEqual(repr(obj), '') + self.assertEqual(obj, obj) + if getattr(obj, '__parameters__', None) and len(obj.__parameters__) == 1: + self.assertEqual(obj[Any].__args__, (Any,)) + if isinstance(obj, type): + for base in obj.__mro__: + self.assertNotEqual(repr(base), '') + self.assertEqual(base, base) + + def test_pickle(self): + global C # pickle wants to reference the class by name + T = TypeVar('T') + + class B(Generic[T]): + pass + + class C(B[int]): + pass + + c = C() + c.foo = 42 + c.bar = 'abc' + for proto in range(pickle.HIGHEST_PROTOCOL + 1): + z = pickle.dumps(c, proto) + x = pickle.loads(z) + self.assertEqual(x.foo, 42) + self.assertEqual(x.bar, 'abc') + self.assertEqual(x.__dict__, {'foo': 42, 'bar': 'abc'}) + simples = [Any, Union, Tuple, Callable, ClassVar, List, typing.Iterable] + for s in simples: + for proto in range(pickle.HIGHEST_PROTOCOL + 1): + z = pickle.dumps(s, proto) + x = pickle.loads(z) + self.assertEqual(s, x) + + def test_copy_and_deepcopy(self): + T = TypeVar('T') + class Node(Generic[T]): pass + things = [ + Any, + Callable[..., T], + Callable[[int], int], + ClassVar[List[T]], + ClassVar[int], + List['T'], + Node[Any], + Node[T], + Node[int], + Tuple['T', 'T'], + Tuple[Any, Any], + Tuple[T, int], + Union['T', int], + Union[T, int], + typing.Dict[T, Any], + typing.Dict[int, str], + typing.Iterable[Any], + typing.Iterable[T], + typing.Iterable[int], + typing.Mapping['T', int] + ] + for t in things: + self.assertEqual(t, deepcopy(t)) + self.assertEqual(t, copy(t)) + + def test_weakref_all(self): + T = TypeVar('T') + things = [Any, Union[T, int], Callable[..., T], Tuple[Any, Any], + Optional[List[int]], typing.Mapping[int, str], + typing.re.Match[bytes], typing.Iterable['whatever']] + for t in things: + self.assertEqual(weakref.ref(t)(), t) + + def test_parameterized_slots(self): + T = TypeVar('T') + class C(Generic[T]): + __slots__ = ('potato',) + + c = C() + c_int = C[int]() + self.assertEqual(C.__slots__, C[str].__slots__) + + c.potato = 0 + c_int.potato = 0 + with self.assertRaises(AttributeError): + c.tomato = 0 + with self.assertRaises(AttributeError): + c_int.tomato = 0 + + self.assertEqual(typing._eval_type(C['C'], globals(), locals()), C[C]) + self.assertEqual(typing._eval_type(C['C'], globals(), locals()).__slots__, + C.__slots__) + self.assertEqual(copy(C[int]), deepcopy(C[int])) + + def test_parameterized_slots_dict(self): + T = TypeVar('T') + class D(Generic[T]): + __slots__ = {'banana': 42} + + d = D() + d_int = D[int]() + self.assertEqual(D.__slots__, D[str].__slots__) + + d.banana = 'yes' + d_int.banana = 'yes' + with self.assertRaises(AttributeError): + d.foobar = 'no' + with self.assertRaises(AttributeError): + d_int.foobar = 'no' + + def test_errors(self): + with self.assertRaises(TypeError): + B = SimpleMapping[XK, Any] + + class C(Generic[B]): + pass + + def test_repr_2(self): + PY32 = sys.version_info[:2] < (3, 3) + + class C(Generic[T]): + pass + + self.assertEqual(C.__module__, __name__) + if not PY32: + self.assertEqual(C.__qualname__, + 'GenericTests.test_repr_2..C') + self.assertEqual(repr(C).split('.')[-1], 'C') + X = C[int] + self.assertEqual(X.__module__, __name__) + if not PY32: + self.assertTrue(X.__qualname__.endswith('..C')) + self.assertEqual(repr(X).split('.')[-1], 'C[int]') + + class Y(C[int]): + pass + + self.assertEqual(Y.__module__, __name__) + if not PY32: + self.assertEqual(Y.__qualname__, + 'GenericTests.test_repr_2..Y') + self.assertEqual(repr(Y).split('.')[-1], 'Y') + + def test_eq_1(self): + self.assertEqual(Generic, Generic) + self.assertEqual(Generic[T], Generic[T]) + self.assertNotEqual(Generic[KT], Generic[VT]) + + def test_eq_2(self): + + class A(Generic[T]): + pass + + class B(Generic[T]): + pass + + self.assertEqual(A, A) + self.assertNotEqual(A, B) + self.assertEqual(A[T], A[T]) + self.assertNotEqual(A[T], B[T]) + + def test_multiple_inheritance(self): + + class A(Generic[T, VT]): + pass + + class B(Generic[KT, T]): + pass + + class C(A[T, VT], Generic[VT, T, KT], B[KT, T]): + pass + + self.assertEqual(C.__parameters__, (VT, T, KT)) + + def test_nested(self): + + G = Generic + + class Visitor(G[T]): + + a = None + + def set(self, a): + self.a = a + + def get(self): + return self.a + + def visit(self): + return self.a + + V = Visitor[typing.List[int]] + + class IntListVisitor(V): + + def append(self, x): + self.a.append(x) + + a = IntListVisitor() + a.set([]) + a.append(1) + a.append(42) + self.assertEqual(a.get(), [1, 42]) + + def test_type_erasure(self): + T = TypeVar('T') + + class Node(Generic[T]): + def __init__(self, label, + left=None, + right=None): + self.label = label # type: T + self.left = left # type: Optional[Node[T]] + self.right = right # type: Optional[Node[T]] + + def foo(x): + a = Node(x) + b = Node[T](x) + c = Node[Any](x) + self.assertIs(type(a), Node) + self.assertIs(type(b), Node) + self.assertIs(type(c), Node) + self.assertEqual(a.label, x) + self.assertEqual(b.label, x) + self.assertEqual(c.label, x) + + foo(42) + + def test_implicit_any(self): + T = TypeVar('T') + + class C(Generic[T]): + pass + + class D(C): + pass + + self.assertEqual(D.__parameters__, ()) + + with self.assertRaises(Exception): + D[int] + with self.assertRaises(Exception): + D[Any] + with self.assertRaises(Exception): + D[T] + + +class ClassVarTests(BaseTestCase): + + def test_basics(self): + with self.assertRaises(TypeError): + ClassVar[1] + with self.assertRaises(TypeError): + ClassVar[int, str] + with self.assertRaises(TypeError): + ClassVar[int][str] + + def test_repr(self): + self.assertEqual(repr(ClassVar), 'typing.ClassVar') + cv = ClassVar[int] + self.assertEqual(repr(cv), 'typing.ClassVar[int]') + cv = ClassVar[Employee] + self.assertEqual(repr(cv), 'typing.ClassVar[%s.Employee]' % __name__) + + def test_cannot_subclass(self): + with self.assertRaises(TypeError): + class C(type(ClassVar)): + pass + with self.assertRaises(TypeError): + class C(type(ClassVar[int])): + pass + + def test_cannot_init(self): + with self.assertRaises(TypeError): + ClassVar() + with self.assertRaises(TypeError): + type(ClassVar)() + with self.assertRaises(TypeError): + type(ClassVar[Optional[int]])() + + def test_no_isinstance(self): + with self.assertRaises(TypeError): + isinstance(1, ClassVar[int]) + with self.assertRaises(TypeError): + issubclass(int, ClassVar) + + +class CastTests(BaseTestCase): + + def test_basics(self): + self.assertEqual(cast(int, 42), 42) + self.assertEqual(cast(float, 42), 42) + self.assertIs(type(cast(float, 42)), int) + self.assertEqual(cast(Any, 42), 42) + self.assertEqual(cast(list, 42), 42) + self.assertEqual(cast(Union[str, float], 42), 42) + self.assertEqual(cast(AnyStr, 42), 42) + self.assertEqual(cast(None, 42), 42) + + def test_errors(self): + # Bogus calls are not expected to fail. + cast(42, 42) + cast('hello', 42) + + +class ForwardRefTests(BaseTestCase): + + def test_forwardref_instance_type_error(self): + fr = typing._ForwardRef('int') + with self.assertRaises(TypeError): + isinstance(42, fr) + + def test_syntax_error(self): + + with self.assertRaises(SyntaxError): + Generic['/T'] + + def test_forwardref_subclass_type_error(self): + fr = typing._ForwardRef('int') + with self.assertRaises(TypeError): + issubclass(int, fr) + + def test_forward_equality(self): + fr = typing._ForwardRef('int') + self.assertEqual(fr, typing._ForwardRef('int')) + self.assertNotEqual(List['int'], List[int]) + + def test_forward_repr(self): + self.assertEqual(repr(List['int']), "typing.List[_ForwardRef(%r)]" % 'int') + + +class OverloadTests(BaseTestCase): + + def test_overload_fails(self): + from typing import overload + + with self.assertRaises(RuntimeError): + + @overload + def blah(): + pass + + blah() + + def test_overload_succeeds(self): + from typing import overload + + @overload + def blah(): + pass + + def blah(): + pass + + blah() + + +class CollectionsAbcTests(BaseTestCase): + + def test_hashable(self): + self.assertIsInstance(42, typing.Hashable) + self.assertNotIsInstance([], typing.Hashable) + + def test_iterable(self): + self.assertIsInstance([], typing.Iterable) + # Due to ABC caching, the second time takes a separate code + # path and could fail. So call this a few times. + self.assertIsInstance([], typing.Iterable) + self.assertIsInstance([], typing.Iterable) + self.assertNotIsInstance(42, typing.Iterable) + # Just in case, also test issubclass() a few times. + self.assertIsSubclass(list, typing.Iterable) + self.assertIsSubclass(list, typing.Iterable) + + def test_iterator(self): + it = iter([]) + self.assertIsInstance(it, typing.Iterator) + self.assertNotIsInstance(42, typing.Iterator) + + def test_sized(self): + self.assertIsInstance([], typing.Sized) + self.assertNotIsInstance(42, typing.Sized) + + def test_container(self): + self.assertIsInstance([], typing.Container) + self.assertNotIsInstance(42, typing.Container) + + def test_abstractset(self): + self.assertIsInstance(set(), typing.AbstractSet) + self.assertNotIsInstance(42, typing.AbstractSet) + + def test_mutableset(self): + self.assertIsInstance(set(), typing.MutableSet) + self.assertNotIsInstance(frozenset(), typing.MutableSet) + + def test_mapping(self): + self.assertIsInstance({}, typing.Mapping) + self.assertNotIsInstance(42, typing.Mapping) + + def test_mutablemapping(self): + self.assertIsInstance({}, typing.MutableMapping) + self.assertNotIsInstance(42, typing.MutableMapping) + + def test_sequence(self): + self.assertIsInstance([], typing.Sequence) + self.assertNotIsInstance(42, typing.Sequence) + + def test_mutablesequence(self): + self.assertIsInstance([], typing.MutableSequence) + self.assertNotIsInstance((), typing.MutableSequence) + + def test_bytestring(self): + self.assertIsInstance(b'', typing.ByteString) + self.assertIsInstance(bytearray(b''), typing.ByteString) + + def test_list(self): + self.assertIsSubclass(list, typing.List) + + def test_deque(self): + self.assertIsSubclass(collections.deque, typing.Deque) + class MyDeque(typing.Deque[int]): pass + self.assertIsInstance(MyDeque(), collections.deque) + + def test_counter(self): + self.assertIsSubclass(collections.Counter, typing.Counter) + + def test_set(self): + self.assertIsSubclass(set, typing.Set) + self.assertNotIsSubclass(frozenset, typing.Set) + + def test_frozenset(self): + self.assertIsSubclass(frozenset, typing.FrozenSet) + self.assertNotIsSubclass(set, typing.FrozenSet) + + def test_dict(self): + self.assertIsSubclass(dict, typing.Dict) + + def test_no_list_instantiation(self): + with self.assertRaises(TypeError): + typing.List() + with self.assertRaises(TypeError): + typing.List[T]() + with self.assertRaises(TypeError): + typing.List[int]() + + def test_list_subclass(self): + + class MyList(typing.List[int]): + pass + + a = MyList() + self.assertIsInstance(a, MyList) + self.assertIsInstance(a, typing.Sequence) + + self.assertIsSubclass(MyList, list) + self.assertNotIsSubclass(list, MyList) + + def test_no_dict_instantiation(self): + with self.assertRaises(TypeError): + typing.Dict() + with self.assertRaises(TypeError): + typing.Dict[KT, VT]() + with self.assertRaises(TypeError): + typing.Dict[str, int]() + + def test_dict_subclass(self): + + class MyDict(typing.Dict[str, int]): + pass + + d = MyDict() + self.assertIsInstance(d, MyDict) + self.assertIsInstance(d, typing.MutableMapping) + + self.assertIsSubclass(MyDict, dict) + self.assertNotIsSubclass(dict, MyDict) + + def test_defaultdict_instantiation(self): + self.assertIs(type(typing.DefaultDict()), collections.defaultdict) + self.assertIs(type(typing.DefaultDict[KT, VT]()), collections.defaultdict) + self.assertIs(type(typing.DefaultDict[str, int]()), collections.defaultdict) + + def test_defaultdict_subclass(self): + + class MyDefDict(typing.DefaultDict[str, int]): + pass + + dd = MyDefDict() + self.assertIsInstance(dd, MyDefDict) + + self.assertIsSubclass(MyDefDict, collections.defaultdict) + self.assertNotIsSubclass(collections.defaultdict, MyDefDict) + + def test_deque_instantiation(self): + self.assertIs(type(typing.Deque()), collections.deque) + self.assertIs(type(typing.Deque[T]()), collections.deque) + self.assertIs(type(typing.Deque[int]()), collections.deque) + class D(typing.Deque[T]): pass + self.assertIs(type(D[int]()), D) + + def test_counter_instantiation(self): + self.assertIs(type(typing.Counter()), collections.Counter) + self.assertIs(type(typing.Counter[T]()), collections.Counter) + self.assertIs(type(typing.Counter[int]()), collections.Counter) + class C(typing.Counter[T]): pass + self.assertIs(type(C[int]()), C) + + def test_counter_subclass_instantiation(self): + + class MyCounter(typing.Counter[int]): + pass + + d = MyCounter() + self.assertIsInstance(d, MyCounter) + self.assertIsInstance(d, typing.Counter) + self.assertIsInstance(d, collections.Counter) + + def test_no_set_instantiation(self): + with self.assertRaises(TypeError): + typing.Set() + with self.assertRaises(TypeError): + typing.Set[T]() + with self.assertRaises(TypeError): + typing.Set[int]() + + def test_set_subclass_instantiation(self): + + class MySet(typing.Set[int]): + pass + + d = MySet() + self.assertIsInstance(d, MySet) + + def test_no_frozenset_instantiation(self): + with self.assertRaises(TypeError): + typing.FrozenSet() + with self.assertRaises(TypeError): + typing.FrozenSet[T]() + with self.assertRaises(TypeError): + typing.FrozenSet[int]() + + def test_frozenset_subclass_instantiation(self): + + class MyFrozenSet(typing.FrozenSet[int]): + pass + + d = MyFrozenSet() + self.assertIsInstance(d, MyFrozenSet) + + def test_no_tuple_instantiation(self): + with self.assertRaises(TypeError): + Tuple() + with self.assertRaises(TypeError): + Tuple[T]() + with self.assertRaises(TypeError): + Tuple[int]() + + def test_generator(self): + def foo(): + yield 42 + g = foo() + self.assertIsSubclass(type(g), typing.Generator) + + def test_no_generator_instantiation(self): + with self.assertRaises(TypeError): + typing.Generator() + with self.assertRaises(TypeError): + typing.Generator[T, T, T]() + with self.assertRaises(TypeError): + typing.Generator[int, int, int]() + + def test_subclassing(self): + + class MMA(typing.MutableMapping): + pass + + with self.assertRaises(TypeError): # It's abstract + MMA() + + class MMC(MMA): + def __getitem__(self, k): + return None + def __setitem__(self, k, v): + pass + def __delitem__(self, k): + pass + def __iter__(self): + return iter(()) + def __len__(self): + return 0 + + self.assertEqual(len(MMC()), 0) + assert callable(MMC.update) + self.assertIsInstance(MMC(), typing.Mapping) + + class MMB(typing.MutableMapping[KT, VT]): + def __getitem__(self, k): + return None + def __setitem__(self, k, v): + pass + def __delitem__(self, k): + pass + def __iter__(self): + return iter(()) + def __len__(self): + return 0 + + self.assertEqual(len(MMB()), 0) + self.assertEqual(len(MMB[str, str]()), 0) + self.assertEqual(len(MMB[KT, VT]()), 0) + + self.assertNotIsSubclass(dict, MMA) + self.assertNotIsSubclass(dict, MMB) + + self.assertIsSubclass(MMA, typing.Mapping) + self.assertIsSubclass(MMB, typing.Mapping) + self.assertIsSubclass(MMC, typing.Mapping) + + self.assertIsInstance(MMB[KT, VT](), typing.Mapping) + self.assertIsInstance(MMB[KT, VT](), collections.Mapping) + + self.assertIsSubclass(MMA, collections.Mapping) + self.assertIsSubclass(MMB, collections.Mapping) + self.assertIsSubclass(MMC, collections.Mapping) + + self.assertIsSubclass(MMB[str, str], typing.Mapping) + self.assertIsSubclass(MMC, MMA) + + class I(typing.Iterable): pass + self.assertNotIsSubclass(list, I) + + class G(typing.Generator[int, int, int]): pass + def g(): yield 0 + self.assertIsSubclass(G, typing.Generator) + self.assertIsSubclass(G, typing.Iterable) + if hasattr(collections, 'Generator'): + self.assertIsSubclass(G, collections.Generator) + self.assertIsSubclass(G, collections.Iterable) + self.assertNotIsSubclass(type(g), G) + + def test_subclassing_subclasshook(self): + + class Base(typing.Iterable): + @classmethod + def __subclasshook__(cls, other): + if other.__name__ == 'Foo': + return True + else: + return False + + class C(Base): pass + class Foo: pass + class Bar: pass + self.assertIsSubclass(Foo, Base) + self.assertIsSubclass(Foo, C) + self.assertNotIsSubclass(Bar, C) + + def test_subclassing_register(self): + + class A(typing.Container): pass + class B(A): pass + + class C: pass + A.register(C) + self.assertIsSubclass(C, A) + self.assertNotIsSubclass(C, B) + + class D: pass + B.register(D) + self.assertIsSubclass(D, A) + self.assertIsSubclass(D, B) + + class M(): pass + collections.MutableMapping.register(M) + self.assertIsSubclass(M, typing.Mapping) + + def test_collections_as_base(self): + + class M(collections.Mapping): pass + self.assertIsSubclass(M, typing.Mapping) + self.assertIsSubclass(M, typing.Iterable) + + class S(collections.MutableSequence): pass + self.assertIsSubclass(S, typing.MutableSequence) + self.assertIsSubclass(S, typing.Iterable) + + class I(collections.Iterable): pass + self.assertIsSubclass(I, typing.Iterable) + + class A(collections.Mapping): pass + class B: pass + A.register(B) + self.assertIsSubclass(B, typing.Mapping) + + +class TypeTests(BaseTestCase): + + def test_type_basic(self): + + class User(object): pass + class BasicUser(User): pass + class ProUser(User): pass + + def new_user(user_class): + # type: (Type[User]) -> User + return user_class() + + new_user(BasicUser) + + def test_type_typevar(self): + + class User(object): pass + class BasicUser(User): pass + class ProUser(User): pass + + global U + U = TypeVar('U', bound=User) + + def new_user(user_class): + # type: (Type[U]) -> U + return user_class() + + new_user(BasicUser) + + def test_type_optional(self): + A = Optional[Type[BaseException]] # noqa + + def foo(a): + # type: (A) -> Optional[BaseException] + if a is None: + return None + else: + return a() + + assert isinstance(foo(KeyboardInterrupt), KeyboardInterrupt) + assert foo(None) is None + + +class NewTypeTests(BaseTestCase): + + def test_basic(self): + UserId = NewType('UserId', int) + UserName = NewType('UserName', str) + self.assertIsInstance(UserId(5), int) + self.assertIsInstance(UserName('Joe'), type('Joe')) + self.assertEqual(UserId(5) + 1, 6) + + def test_errors(self): + UserId = NewType('UserId', int) + UserName = NewType('UserName', str) + with self.assertRaises(TypeError): + issubclass(UserId, int) + with self.assertRaises(TypeError): + class D(UserName): + pass + + +class NamedTupleTests(BaseTestCase): + + def test_basics(self): + Emp = NamedTuple('Emp', [('name', str), ('id', int)]) + self.assertIsSubclass(Emp, tuple) + joe = Emp('Joe', 42) + jim = Emp(name='Jim', id=1) + self.assertIsInstance(joe, Emp) + self.assertIsInstance(joe, tuple) + self.assertEqual(joe.name, 'Joe') + self.assertEqual(joe.id, 42) + self.assertEqual(jim.name, 'Jim') + self.assertEqual(jim.id, 1) + self.assertEqual(Emp.__name__, 'Emp') + self.assertEqual(Emp._fields, ('name', 'id')) + self.assertEqual(Emp._field_types, dict(name=str, id=int)) + + def test_pickle(self): + global Emp # pickle wants to reference the class by name + Emp = NamedTuple('Emp', [('name', str), ('id', int)]) + jane = Emp('jane', 37) + for proto in range(pickle.HIGHEST_PROTOCOL + 1): + z = pickle.dumps(jane, proto) + jane2 = pickle.loads(z) + self.assertEqual(jane2, jane) + + +class IOTests(BaseTestCase): + + def test_io_submodule(self): + from typing.io import IO, TextIO, BinaryIO, __all__, __name__ + self.assertIs(IO, typing.IO) + self.assertIs(TextIO, typing.TextIO) + self.assertIs(BinaryIO, typing.BinaryIO) + self.assertEqual(set(__all__), set(['IO', 'TextIO', 'BinaryIO'])) + self.assertEqual(__name__, 'typing.io') + + +class RETests(BaseTestCase): + # Much of this is really testing _TypeAlias. + + def test_basics(self): + pat = re.compile('[a-z]+', re.I) + self.assertIsSubclass(pat.__class__, Pattern) + self.assertIsSubclass(type(pat), Pattern) + self.assertIsInstance(pat, Pattern) + + mat = pat.search('12345abcde.....') + self.assertIsSubclass(mat.__class__, Match) + self.assertIsSubclass(type(mat), Match) + self.assertIsInstance(mat, Match) + + # these should just work + Pattern[Union[str, bytes]] + Match[Union[bytes, str]] + + def test_alias_equality(self): + self.assertEqual(Pattern[str], Pattern[str]) + self.assertNotEqual(Pattern[str], Pattern[bytes]) + self.assertNotEqual(Pattern[str], Match[str]) + self.assertNotEqual(Pattern[str], str) + + def test_errors(self): + with self.assertRaises(TypeError): + # Doesn't fit AnyStr. + Pattern[int] + with self.assertRaises(TypeError): + # Can't change type vars? + Match[T] + m = Match[Union[str, bytes]] + with self.assertRaises(TypeError): + # Too complicated? + m[str] + with self.assertRaises(TypeError): + # We don't support isinstance(). + isinstance(42, Pattern[str]) + with self.assertRaises(TypeError): + # We don't support issubclass(). + issubclass(Pattern[bytes], Pattern[str]) + + def test_repr(self): + self.assertEqual(repr(Pattern), 'Pattern[~AnyStr]') + self.assertEqual(repr(Pattern[unicode]), 'Pattern[unicode]') + self.assertEqual(repr(Pattern[str]), 'Pattern[str]') + self.assertEqual(repr(Match), 'Match[~AnyStr]') + self.assertEqual(repr(Match[unicode]), 'Match[unicode]') + self.assertEqual(repr(Match[str]), 'Match[str]') + + def test_re_submodule(self): + from typing.re import Match, Pattern, __all__, __name__ + self.assertIs(Match, typing.Match) + self.assertIs(Pattern, typing.Pattern) + self.assertEqual(set(__all__), set(['Match', 'Pattern'])) + self.assertEqual(__name__, 'typing.re') + + def test_cannot_subclass(self): + with self.assertRaises(TypeError) as ex: + + class A(typing.Match): + pass + + self.assertEqual(str(ex.exception), + "Cannot subclass typing._TypeAlias") + + +class AllTests(BaseTestCase): + """Tests for __all__.""" + + def test_all(self): + from typing import __all__ as a + # Just spot-check the first and last of every category. + self.assertIn('AbstractSet', a) + self.assertIn('ValuesView', a) + self.assertIn('cast', a) + self.assertIn('overload', a) + # Check that io and re are not exported. + self.assertNotIn('io', a) + self.assertNotIn('re', a) + # Spot-check that stdlib modules aren't exported. + self.assertNotIn('os', a) + self.assertNotIn('sys', a) + # Check that Text is defined. + self.assertIn('Text', a) + + def test_respect_no_type_check(self): + @typing.no_type_check + class NoTpCheck(object): + class Inn(object): + def __init__(self, x): + # type: (this is not actually a type) -> None + pass + self.assertTrue(NoTpCheck.__no_type_check__) + self.assertTrue(NoTpCheck.Inn.__init__.__no_type_check__) + + def test_get_type_hints_dummy(self): + + def foo(x): + # type: (int) -> int + return x + 1 + + self.assertIsNone(typing.get_type_hints(foo)) + + +if __name__ == '__main__': + main() diff --git a/lib-typing/2.7/typing.py b/lib-typing/2.7/typing.py new file mode 100644 index 000000000000..0d67e4c3e150 --- /dev/null +++ b/lib-typing/2.7/typing.py @@ -0,0 +1,2140 @@ +from __future__ import absolute_import, unicode_literals + +import abc +from abc import abstractmethod, abstractproperty +import collections +import functools +import re as stdlib_re # Avoid confusion with the re we export. +import sys +import types +try: + import collections.abc as collections_abc +except ImportError: + import collections as collections_abc # Fallback for PY3.2. + + +# Please keep __all__ alphabetized within each category. +__all__ = [ + # Super-special typing primitives. + 'Any', + 'Callable', + 'ClassVar', + 'Generic', + 'Optional', + 'Tuple', + 'Type', + 'TypeVar', + 'Union', + + # ABCs (from collections.abc). + 'AbstractSet', # collections.abc.Set. + 'GenericMeta', # subclass of abc.ABCMeta and a metaclass + # for 'Generic' and ABCs below. + 'ByteString', + 'Container', + 'Hashable', + 'ItemsView', + 'Iterable', + 'Iterator', + 'KeysView', + 'Mapping', + 'MappingView', + 'MutableMapping', + 'MutableSequence', + 'MutableSet', + 'Sequence', + 'Sized', + 'ValuesView', + + # Structural checks, a.k.a. protocols. + 'Reversible', + 'SupportsAbs', + 'SupportsFloat', + 'SupportsInt', + + # Concrete collection types. + 'Counter', + 'Deque', + 'Dict', + 'DefaultDict', + 'List', + 'Set', + 'FrozenSet', + 'NamedTuple', # Not really a type. + 'Generator', + + # One-off things. + 'AnyStr', + 'cast', + 'get_type_hints', + 'NewType', + 'no_type_check', + 'no_type_check_decorator', + 'overload', + 'Text', + 'TYPE_CHECKING', +] + +# The pseudo-submodules 're' and 'io' are part of the public +# namespace, but excluded from __all__ because they might stomp on +# legitimate imports of those modules. + + +def _qualname(x): + if sys.version_info[:2] >= (3, 3): + return x.__qualname__ + else: + # Fall back to just name. + return x.__name__ + + +def _trim_name(nm): + whitelist = ('_TypeAlias', '_ForwardRef', '_TypingBase', '_FinalTypingBase') + if nm.startswith('_') and nm not in whitelist: + nm = nm[1:] + return nm + + +class TypingMeta(type): + """Metaclass for most types defined in typing module + (not a part of public API). + + This also defines a dummy constructor (all the work for most typing + constructs is done in __new__) and a nicer repr(). + """ + + _is_protocol = False + + def __new__(cls, name, bases, namespace): + return super(TypingMeta, cls).__new__(cls, str(name), bases, namespace) + + @classmethod + def assert_no_subclassing(cls, bases): + for base in bases: + if isinstance(base, cls): + raise TypeError("Cannot subclass %s" % + (', '.join(map(_type_repr, bases)) or '()')) + + def __init__(self, *args, **kwds): + pass + + def _eval_type(self, globalns, localns): + """Override this in subclasses to interpret forward references. + + For example, List['C'] is internally stored as + List[_ForwardRef('C')], which should evaluate to List[C], + where C is an object found in globalns or localns (searching + localns first, of course). + """ + return self + + def _get_type_vars(self, tvars): + pass + + def __repr__(self): + qname = _trim_name(_qualname(self)) + return '%s.%s' % (self.__module__, qname) + + +class _TypingBase(object): + """Internal indicator of special typing constructs.""" + __metaclass__ = TypingMeta + __slots__ = ('__weakref__',) + + def __init__(self, *args, **kwds): + pass + + def __new__(cls, *args, **kwds): + """Constructor. + + This only exists to give a better error message in case + someone tries to subclass a special typing object (not a good idea). + """ + if (len(args) == 3 and + isinstance(args[0], str) and + isinstance(args[1], tuple)): + # Close enough. + raise TypeError("Cannot subclass %r" % cls) + return super(_TypingBase, cls).__new__(cls) + + # Things that are not classes also need these. + def _eval_type(self, globalns, localns): + return self + + def _get_type_vars(self, tvars): + pass + + def __repr__(self): + cls = type(self) + qname = _trim_name(_qualname(cls)) + return '%s.%s' % (cls.__module__, qname) + + def __call__(self, *args, **kwds): + raise TypeError("Cannot instantiate %r" % type(self)) + + +class _FinalTypingBase(_TypingBase): + """Internal mix-in class to prevent instantiation. + + Prevents instantiation unless _root=True is given in class call. + It is used to create pseudo-singleton instances Any, Union, Optional, etc. + """ + + __slots__ = () + + def __new__(cls, *args, **kwds): + self = super(_FinalTypingBase, cls).__new__(cls, *args, **kwds) + if '_root' in kwds and kwds['_root'] is True: + return self + raise TypeError("Cannot instantiate %r" % cls) + + def __reduce__(self): + return _trim_name(type(self).__name__) + + +class _ForwardRef(_TypingBase): + """Internal wrapper to hold a forward reference.""" + + __slots__ = ('__forward_arg__', '__forward_code__', + '__forward_evaluated__', '__forward_value__') + + def __init__(self, arg): + super(_ForwardRef, self).__init__(arg) + if not isinstance(arg, basestring): + raise TypeError('Forward reference must be a string -- got %r' % (arg,)) + try: + code = compile(arg, '', 'eval') + except SyntaxError: + raise SyntaxError('Forward reference must be an expression -- got %r' % + (arg,)) + self.__forward_arg__ = arg + self.__forward_code__ = code + self.__forward_evaluated__ = False + self.__forward_value__ = None + + def _eval_type(self, globalns, localns): + if not self.__forward_evaluated__ or localns is not globalns: + if globalns is None and localns is None: + globalns = localns = {} + elif globalns is None: + globalns = localns + elif localns is None: + localns = globalns + self.__forward_value__ = _type_check( + eval(self.__forward_code__, globalns, localns), + "Forward references must evaluate to types.") + self.__forward_evaluated__ = True + return self.__forward_value__ + + def __eq__(self, other): + if not isinstance(other, _ForwardRef): + return NotImplemented + return (self.__forward_arg__ == other.__forward_arg__ and + self.__forward_value__ == other.__forward_value__) + + def __hash__(self): + return hash((self.__forward_arg__, self.__forward_value__)) + + def __instancecheck__(self, obj): + raise TypeError("Forward references cannot be used with isinstance().") + + def __subclasscheck__(self, cls): + raise TypeError("Forward references cannot be used with issubclass().") + + def __repr__(self): + return '_ForwardRef(%r)' % (self.__forward_arg__,) + + +class _TypeAlias(_TypingBase): + """Internal helper class for defining generic variants of concrete types. + + Note that this is not a type; let's call it a pseudo-type. It cannot + be used in instance and subclass checks in parameterized form, i.e. + ``isinstance(42, Match[str])`` raises ``TypeError`` instead of returning + ``False``. + """ + + __slots__ = ('name', 'type_var', 'impl_type', 'type_checker') + + def __init__(self, name, type_var, impl_type, type_checker): + """Initializer. + + Args: + name: The name, e.g. 'Pattern'. + type_var: The type parameter, e.g. AnyStr, or the + specific type, e.g. str. + impl_type: The implementation type. + type_checker: Function that takes an impl_type instance. + and returns a value that should be a type_var instance. + """ + assert isinstance(name, basestring), repr(name) + assert isinstance(impl_type, type), repr(impl_type) + assert not isinstance(impl_type, TypingMeta), repr(impl_type) + assert isinstance(type_var, (type, _TypingBase)), repr(type_var) + self.name = name + self.type_var = type_var + self.impl_type = impl_type + self.type_checker = type_checker + + def __repr__(self): + return "%s[%s]" % (self.name, _type_repr(self.type_var)) + + def __getitem__(self, parameter): + if not isinstance(self.type_var, TypeVar): + raise TypeError("%s cannot be further parameterized." % self) + if self.type_var.__constraints__ and isinstance(parameter, type): + if not issubclass(parameter, self.type_var.__constraints__): + raise TypeError("%s is not a valid substitution for %s." % + (parameter, self.type_var)) + if isinstance(parameter, TypeVar) and parameter is not self.type_var: + raise TypeError("%s cannot be re-parameterized." % self) + return self.__class__(self.name, parameter, + self.impl_type, self.type_checker) + + def __eq__(self, other): + if not isinstance(other, _TypeAlias): + return NotImplemented + return self.name == other.name and self.type_var == other.type_var + + def __hash__(self): + return hash((self.name, self.type_var)) + + def __instancecheck__(self, obj): + if not isinstance(self.type_var, TypeVar): + raise TypeError("Parameterized type aliases cannot be used " + "with isinstance().") + return isinstance(obj, self.impl_type) + + def __subclasscheck__(self, cls): + if not isinstance(self.type_var, TypeVar): + raise TypeError("Parameterized type aliases cannot be used " + "with issubclass().") + return issubclass(cls, self.impl_type) + + +def _get_type_vars(types, tvars): + for t in types: + if isinstance(t, TypingMeta) or isinstance(t, _TypingBase): + t._get_type_vars(tvars) + + +def _type_vars(types): + tvars = [] + _get_type_vars(types, tvars) + return tuple(tvars) + + +def _eval_type(t, globalns, localns): + if isinstance(t, TypingMeta) or isinstance(t, _TypingBase): + return t._eval_type(globalns, localns) + return t + + +def _type_check(arg, msg): + """Check that the argument is a type, and return it (internal helper). + + As a special case, accept None and return type(None) instead. + Also, _TypeAlias instances (e.g. Match, Pattern) are acceptable. + + The msg argument is a human-readable error message, e.g. + + "Union[arg, ...]: arg should be a type." + + We append the repr() of the actual value (truncated to 100 chars). + """ + if arg is None: + return type(None) + if isinstance(arg, basestring): + arg = _ForwardRef(arg) + if ( + isinstance(arg, _TypingBase) and type(arg).__name__ == '_ClassVar' or + not isinstance(arg, (type, _TypingBase)) and not callable(arg) + ): + raise TypeError(msg + " Got %.100r." % (arg,)) + # Bare Union etc. are not valid as type arguments + if ( + type(arg).__name__ in ('_Union', '_Optional') and + not getattr(arg, '__origin__', None) or + isinstance(arg, TypingMeta) and _gorg(arg) in (Generic, _Protocol) + ): + raise TypeError("Plain %s is not valid as type argument" % arg) + return arg + + +def _type_repr(obj): + """Return the repr() of an object, special-casing types (internal helper). + + If obj is a type, we return a shorter version than the default + type.__repr__, based on the module and qualified name, which is + typically enough to uniquely identify a type. For everything + else, we fall back on repr(obj). + """ + if isinstance(obj, type) and not isinstance(obj, TypingMeta): + if obj.__module__ == '__builtin__': + return _qualname(obj) + return '%s.%s' % (obj.__module__, _qualname(obj)) + if obj is Ellipsis: + return('...') + if isinstance(obj, types.FunctionType): + return obj.__name__ + return repr(obj) + + +class ClassVarMeta(TypingMeta): + """Metaclass for _ClassVar""" + + def __new__(cls, name, bases, namespace): + cls.assert_no_subclassing(bases) + self = super(ClassVarMeta, cls).__new__(cls, name, bases, namespace) + return self + + +class _ClassVar(_FinalTypingBase): + """Special type construct to mark class variables. + + An annotation wrapped in ClassVar indicates that a given + attribute is intended to be used as a class variable and + should not be set on instances of that class. Usage:: + + class Starship: + stats = {} # type: ClassVar[Dict[str, int]] # class variable + damage = 10 # type: int # instance variable + + ClassVar accepts only types and cannot be further subscribed. + + Note that ClassVar is not a class itself, and should not + be used with isinstance() or issubclass(). + """ + + __metaclass__ = ClassVarMeta + __slots__ = ('__type__',) + + def __init__(self, tp=None, _root=False): + self.__type__ = tp + + def __getitem__(self, item): + cls = type(self) + if self.__type__ is None: + return cls(_type_check(item, + '{} accepts only types.'.format(cls.__name__[1:])), + _root=True) + raise TypeError('{} cannot be further subscripted' + .format(cls.__name__[1:])) + + def _eval_type(self, globalns, localns): + return type(self)(_eval_type(self.__type__, globalns, localns), + _root=True) + + def __repr__(self): + r = super(_ClassVar, self).__repr__() + if self.__type__ is not None: + r += '[{}]'.format(_type_repr(self.__type__)) + return r + + def __hash__(self): + return hash((type(self).__name__, self.__type__)) + + def __eq__(self, other): + if not isinstance(other, _ClassVar): + return NotImplemented + if self.__type__ is not None: + return self.__type__ == other.__type__ + return self is other + + +ClassVar = _ClassVar(_root=True) + + +class AnyMeta(TypingMeta): + """Metaclass for Any.""" + + def __new__(cls, name, bases, namespace): + cls.assert_no_subclassing(bases) + self = super(AnyMeta, cls).__new__(cls, name, bases, namespace) + return self + + +class _Any(_FinalTypingBase): + """Special type indicating an unconstrained type. + + - Any is compatible with every type. + - Any assumed to have all methods. + - All values assumed to be instances of Any. + + Note that all the above statements are true from the point of view of + static type checkers. At runtime, Any should not be used with instance + or class checks. + """ + __metaclass__ = AnyMeta + __slots__ = () + + def __instancecheck__(self, obj): + raise TypeError("Any cannot be used with isinstance().") + + def __subclasscheck__(self, cls): + raise TypeError("Any cannot be used with issubclass().") + + +Any = _Any(_root=True) + + +class TypeVarMeta(TypingMeta): + def __new__(cls, name, bases, namespace): + cls.assert_no_subclassing(bases) + return super(TypeVarMeta, cls).__new__(cls, name, bases, namespace) + + +class TypeVar(_TypingBase): + """Type variable. + + Usage:: + + T = TypeVar('T') # Can be anything + A = TypeVar('A', str, bytes) # Must be str or bytes + + Type variables exist primarily for the benefit of static type + checkers. They serve as the parameters for generic types as well + as for generic function definitions. See class Generic for more + information on generic types. Generic functions work as follows: + + def repeat(x: T, n: int) -> List[T]: + '''Return a list containing n references to x.''' + return [x]*n + + def longest(x: A, y: A) -> A: + '''Return the longest of two strings.''' + return x if len(x) >= len(y) else y + + The latter example's signature is essentially the overloading + of (str, str) -> str and (bytes, bytes) -> bytes. Also note + that if the arguments are instances of some subclass of str, + the return type is still plain str. + + At runtime, isinstance(x, T) and issubclass(C, T) will raise TypeError. + + Type variables defined with covariant=True or contravariant=True + can be used do declare covariant or contravariant generic types. + See PEP 484 for more details. By default generic types are invariant + in all type variables. + + Type variables can be introspected. e.g.: + + T.__name__ == 'T' + T.__constraints__ == () + T.__covariant__ == False + T.__contravariant__ = False + A.__constraints__ == (str, bytes) + """ + + __metaclass__ = TypeVarMeta + __slots__ = ('__name__', '__bound__', '__constraints__', + '__covariant__', '__contravariant__') + + def __init__(self, name, *constraints, **kwargs): + super(TypeVar, self).__init__(name, *constraints, **kwargs) + bound = kwargs.get('bound', None) + covariant = kwargs.get('covariant', False) + contravariant = kwargs.get('contravariant', False) + self.__name__ = name + if covariant and contravariant: + raise ValueError("Bivariant types are not supported.") + self.__covariant__ = bool(covariant) + self.__contravariant__ = bool(contravariant) + if constraints and bound is not None: + raise TypeError("Constraints cannot be combined with bound=...") + if constraints and len(constraints) == 1: + raise TypeError("A single constraint is not allowed") + msg = "TypeVar(name, constraint, ...): constraints must be types." + self.__constraints__ = tuple(_type_check(t, msg) for t in constraints) + if bound: + self.__bound__ = _type_check(bound, "Bound must be a type.") + else: + self.__bound__ = None + + def _get_type_vars(self, tvars): + if self not in tvars: + tvars.append(self) + + def __repr__(self): + if self.__covariant__: + prefix = '+' + elif self.__contravariant__: + prefix = '-' + else: + prefix = '~' + return prefix + self.__name__ + + def __instancecheck__(self, instance): + raise TypeError("Type variables cannot be used with isinstance().") + + def __subclasscheck__(self, cls): + raise TypeError("Type variables cannot be used with issubclass().") + + +# Some unconstrained type variables. These are used by the container types. +# (These are not for export.) +T = TypeVar('T') # Any type. +KT = TypeVar('KT') # Key type. +VT = TypeVar('VT') # Value type. +T_co = TypeVar('T_co', covariant=True) # Any type covariant containers. +V_co = TypeVar('V_co', covariant=True) # Any type covariant containers. +VT_co = TypeVar('VT_co', covariant=True) # Value type covariant containers. +T_contra = TypeVar('T_contra', contravariant=True) # Ditto contravariant. + +# A useful type variable with constraints. This represents string types. +# (This one *is* for export!) +AnyStr = TypeVar('AnyStr', bytes, unicode) + + +def _replace_arg(arg, tvars, args): + """An internal helper function: replace arg if it is a type variable + found in tvars with corresponding substitution from args or + with corresponding substitution sub-tree if arg is a generic type. + """ + + if tvars is None: + tvars = [] + if hasattr(arg, '_subs_tree') and isinstance(arg, (GenericMeta, _TypingBase)): + return arg._subs_tree(tvars, args) + if isinstance(arg, TypeVar): + for i, tvar in enumerate(tvars): + if arg == tvar: + return args[i] + return arg + + +# Special typing constructs Union, Optional, Generic, Callable and Tuple +# use three special attributes for internal bookkeeping of generic types: +# * __parameters__ is a tuple of unique free type parameters of a generic +# type, for example, Dict[T, T].__parameters__ == (T,); +# * __origin__ keeps a reference to a type that was subscripted, +# e.g., Union[T, int].__origin__ == Union; +# * __args__ is a tuple of all arguments used in subscripting, +# e.g., Dict[T, int].__args__ == (T, int). + + +def _subs_tree(cls, tvars=None, args=None): + """An internal helper function: calculate substitution tree + for generic cls after replacing its type parameters with + substitutions in tvars -> args (if any). + Repeat the same following __origin__'s. + + Return a list of arguments with all possible substitutions + performed. Arguments that are generic classes themselves are represented + as tuples (so that no new classes are created by this function). + For example: _subs_tree(List[Tuple[int, T]][str]) == [(Tuple, int, str)] + """ + + if cls.__origin__ is None: + return cls + # Make of chain of origins (i.e. cls -> cls.__origin__) + current = cls.__origin__ + orig_chain = [] + while current.__origin__ is not None: + orig_chain.append(current) + current = current.__origin__ + # Replace type variables in __args__ if asked ... + tree_args = [] + for arg in cls.__args__: + tree_args.append(_replace_arg(arg, tvars, args)) + # ... then continue replacing down the origin chain. + for ocls in orig_chain: + new_tree_args = [] + for arg in ocls.__args__: + new_tree_args.append(_replace_arg(arg, ocls.__parameters__, tree_args)) + tree_args = new_tree_args + return tree_args + + +def _remove_dups_flatten(parameters): + """An internal helper for Union creation and substitution: flatten Union's + among parameters, then remove duplicates and strict subclasses. + """ + + # Flatten out Union[Union[...], ...]. + params = [] + for p in parameters: + if isinstance(p, _Union) and p.__origin__ is Union: + params.extend(p.__args__) + elif isinstance(p, tuple) and len(p) > 0 and p[0] is Union: + params.extend(p[1:]) + else: + params.append(p) + # Weed out strict duplicates, preserving the first of each occurrence. + all_params = set(params) + if len(all_params) < len(params): + new_params = [] + for t in params: + if t in all_params: + new_params.append(t) + all_params.remove(t) + params = new_params + assert not all_params, all_params + # Weed out subclasses. + # E.g. Union[int, Employee, Manager] == Union[int, Employee]. + # If object is present it will be sole survivor among proper classes. + # Never discard type variables. + # (In particular, Union[str, AnyStr] != AnyStr.) + all_params = set(params) + for t1 in params: + if not isinstance(t1, type): + continue + if any(isinstance(t2, type) and issubclass(t1, t2) + for t2 in all_params - {t1} + if not (isinstance(t2, GenericMeta) and + t2.__origin__ is not None)): + all_params.remove(t1) + return tuple(t for t in params if t in all_params) + + +def _check_generic(cls, parameters): + # Check correct count for parameters of a generic cls (internal helper). + if not cls.__parameters__: + raise TypeError("%s is not a generic class" % repr(cls)) + alen = len(parameters) + elen = len(cls.__parameters__) + if alen != elen: + raise TypeError("Too %s parameters for %s; actual %s, expected %s" % + ("many" if alen > elen else "few", repr(cls), alen, elen)) + + +_cleanups = [] + + +def _tp_cache(func): + maxsize = 128 + cache = {} + _cleanups.append(cache.clear) + + @functools.wraps(func) + def inner(*args): + key = args + try: + return cache[key] + except TypeError: + # Assume it's an unhashable argument. + return func(*args) + except KeyError: + value = func(*args) + if len(cache) >= maxsize: + # If the cache grows too much, just start over. + cache.clear() + cache[key] = value + return value + + return inner + + +class UnionMeta(TypingMeta): + """Metaclass for Union.""" + + def __new__(cls, name, bases, namespace): + cls.assert_no_subclassing(bases) + return super(UnionMeta, cls).__new__(cls, name, bases, namespace) + + +class _Union(_FinalTypingBase): + """Union type; Union[X, Y] means either X or Y. + + To define a union, use e.g. Union[int, str]. Details: + + - The arguments must be types and there must be at least one. + + - None as an argument is a special case and is replaced by + type(None). + + - Unions of unions are flattened, e.g.:: + + Union[Union[int, str], float] == Union[int, str, float] + + - Unions of a single argument vanish, e.g.:: + + Union[int] == int # The constructor actually returns int + + - Redundant arguments are skipped, e.g.:: + + Union[int, str, int] == Union[int, str] + + - When comparing unions, the argument order is ignored, e.g.:: + + Union[int, str] == Union[str, int] + + - When two arguments have a subclass relationship, the least + derived argument is kept, e.g.:: + + class Employee: pass + class Manager(Employee): pass + Union[int, Employee, Manager] == Union[int, Employee] + Union[Manager, int, Employee] == Union[int, Employee] + Union[Employee, Manager] == Employee + + - Similar for object:: + + Union[int, object] == object + + - You cannot subclass or instantiate a union. + + - You can use Optional[X] as a shorthand for Union[X, None]. + """ + + __metaclass__ = UnionMeta + __slots__ = ('__parameters__', '__args__', '__origin__', '__tree_hash__') + + def __new__(cls, parameters=None, origin=None, *args, **kwds): + self = super(_Union, cls).__new__(cls, parameters, origin, *args, **kwds) + if origin is None: + self.__parameters__ = None + self.__args__ = None + self.__origin__ = None + self.__tree_hash__ = hash(frozenset(('Union',))) + return self + if not isinstance(parameters, tuple): + raise TypeError("Expected parameters=") + if origin is Union: + parameters = _remove_dups_flatten(parameters) + # It's not a union if there's only one type left. + if len(parameters) == 1: + return parameters[0] + self.__parameters__ = _type_vars(parameters) + self.__args__ = parameters + self.__origin__ = origin + # Pre-calculate the __hash__ on instantiation. + # This improves speed for complex substitutions. + subs_tree = self._subs_tree() + if isinstance(subs_tree, tuple): + self.__tree_hash__ = hash(frozenset(subs_tree)) + else: + self.__tree_hash__ = hash(subs_tree) + return self + + def _eval_type(self, globalns, localns): + if self.__args__ is None: + return self + ev_args = tuple(_eval_type(t, globalns, localns) for t in self.__args__) + ev_origin = _eval_type(self.__origin__, globalns, localns) + if ev_args == self.__args__ and ev_origin == self.__origin__: + # Everything is already evaluated. + return self + return self.__class__(ev_args, ev_origin, _root=True) + + def _get_type_vars(self, tvars): + if self.__origin__ and self.__parameters__: + _get_type_vars(self.__parameters__, tvars) + + def __repr__(self): + if self.__origin__ is None: + return super(_Union, self).__repr__() + tree = self._subs_tree() + if not isinstance(tree, tuple): + return repr(tree) + return tree[0]._tree_repr(tree) + + def _tree_repr(self, tree): + arg_list = [] + for arg in tree[1:]: + if not isinstance(arg, tuple): + arg_list.append(_type_repr(arg)) + else: + arg_list.append(arg[0]._tree_repr(arg)) + return super(_Union, self).__repr__() + '[%s]' % ', '.join(arg_list) + + @_tp_cache + def __getitem__(self, parameters): + if parameters == (): + raise TypeError("Cannot take a Union of no types.") + if not isinstance(parameters, tuple): + parameters = (parameters,) + if self.__origin__ is None: + msg = "Union[arg, ...]: each arg must be a type." + else: + msg = "Parameters to generic types must be types." + parameters = tuple(_type_check(p, msg) for p in parameters) + if self is not Union: + _check_generic(self, parameters) + return self.__class__(parameters, origin=self, _root=True) + + def _subs_tree(self, tvars=None, args=None): + if self is Union: + return Union # Nothing to substitute + tree_args = _subs_tree(self, tvars, args) + tree_args = _remove_dups_flatten(tree_args) + if len(tree_args) == 1: + return tree_args[0] # Union of a single type is that type + return (Union,) + tree_args + + def __eq__(self, other): + if isinstance(other, _Union): + return self.__tree_hash__ == other.__tree_hash__ + elif self is not Union: + return self._subs_tree() == other + else: + return self is other + + def __hash__(self): + return self.__tree_hash__ + + def __instancecheck__(self, obj): + raise TypeError("Unions cannot be used with isinstance().") + + def __subclasscheck__(self, cls): + raise TypeError("Unions cannot be used with issubclass().") + + +Union = _Union(_root=True) + + +class OptionalMeta(TypingMeta): + """Metaclass for Optional.""" + + def __new__(cls, name, bases, namespace): + cls.assert_no_subclassing(bases) + return super(OptionalMeta, cls).__new__(cls, name, bases, namespace) + + +class _Optional(_FinalTypingBase): + """Optional type. + + Optional[X] is equivalent to Union[X, None]. + """ + + __metaclass__ = OptionalMeta + __slots__ = () + + @_tp_cache + def __getitem__(self, arg): + arg = _type_check(arg, "Optional[t] requires a single type.") + return Union[arg, type(None)] + + +Optional = _Optional(_root=True) + + +def _gorg(a): + """Return the farthest origin of a generic class (internal helper).""" + assert isinstance(a, GenericMeta) + while a.__origin__ is not None: + a = a.__origin__ + return a + + +def _geqv(a, b): + """Return whether two generic classes are equivalent (internal helper). + + The intention is to consider generic class X and any of its + parameterized forms (X[T], X[int], etc.) as equivalent. + + However, X is not equivalent to a subclass of X. + + The relation is reflexive, symmetric and transitive. + """ + assert isinstance(a, GenericMeta) and isinstance(b, GenericMeta) + # Reduce each to its origin. + return _gorg(a) is _gorg(b) + + +def _next_in_mro(cls): + """Helper for Generic.__new__. + + Returns the class after the last occurrence of Generic or + Generic[...] in cls.__mro__. + """ + next_in_mro = object + # Look for the last occurrence of Generic or Generic[...]. + for i, c in enumerate(cls.__mro__[:-1]): + if isinstance(c, GenericMeta) and _gorg(c) is Generic: + next_in_mro = cls.__mro__[i + 1] + return next_in_mro + + +def _make_subclasshook(cls): + """Construct a __subclasshook__ callable that incorporates + the associated __extra__ class in subclass checks performed + against cls. + """ + if isinstance(cls.__extra__, abc.ABCMeta): + # The logic mirrors that of ABCMeta.__subclasscheck__. + # Registered classes need not be checked here because + # cls and its extra share the same _abc_registry. + def __extrahook__(cls, subclass): + res = cls.__extra__.__subclasshook__(subclass) + if res is not NotImplemented: + return res + if cls.__extra__ in getattr(subclass, '__mro__', ()): + return True + for scls in cls.__extra__.__subclasses__(): + if isinstance(scls, GenericMeta): + continue + if issubclass(subclass, scls): + return True + return NotImplemented + else: + # For non-ABC extras we'll just call issubclass(). + def __extrahook__(cls, subclass): + if cls.__extra__ and issubclass(subclass, cls.__extra__): + return True + return NotImplemented + return classmethod(__extrahook__) + + +class GenericMeta(TypingMeta, abc.ABCMeta): + """Metaclass for generic types. + + This is a metaclass for typing.Generic and generic ABCs defined in + typing module. User defined subclasses of GenericMeta can override + __new__ and invoke super().__new__. Note that GenericMeta.__new__ + has strict rules on what is allowed in its bases argument: + * plain Generic is disallowed in bases; + * Generic[...] should appear in bases at most once; + * if Generic[...] is present, then it should list all type variables + that appear in other bases. + In addition, type of all generic bases is erased, e.g., C[int] is + stripped to plain C. + """ + + def __new__(cls, name, bases, namespace, + tvars=None, args=None, origin=None, extra=None, orig_bases=None): + """Create a new generic class. GenericMeta.__new__ accepts + keyword arguments that are used for internal bookkeeping, therefore + an override should pass unused keyword arguments to super(). + """ + if tvars is not None: + # Called from __getitem__() below. + assert origin is not None + assert all(isinstance(t, TypeVar) for t in tvars), tvars + else: + # Called from class statement. + assert tvars is None, tvars + assert args is None, args + assert origin is None, origin + + # Get the full set of tvars from the bases. + tvars = _type_vars(bases) + # Look for Generic[T1, ..., Tn]. + # If found, tvars must be a subset of it. + # If not found, tvars is it. + # Also check for and reject plain Generic, + # and reject multiple Generic[...]. + gvars = None + for base in bases: + if base is Generic: + raise TypeError("Cannot inherit from plain Generic") + if (isinstance(base, GenericMeta) and + base.__origin__ is Generic): + if gvars is not None: + raise TypeError( + "Cannot inherit from Generic[...] multiple types.") + gvars = base.__parameters__ + if gvars is None: + gvars = tvars + else: + tvarset = set(tvars) + gvarset = set(gvars) + if not tvarset <= gvarset: + raise TypeError( + "Some type variables (%s) " + "are not listed in Generic[%s]" % + (", ".join(str(t) for t in tvars if t not in gvarset), + ", ".join(str(g) for g in gvars))) + tvars = gvars + + initial_bases = bases + if extra is None: + extra = namespace.get('__extra__') + if extra is not None and type(extra) is abc.ABCMeta and extra not in bases: + bases = (extra,) + bases + bases = tuple(_gorg(b) if isinstance(b, GenericMeta) else b for b in bases) + + # remove bare Generic from bases if there are other generic bases + if any(isinstance(b, GenericMeta) and b is not Generic for b in bases): + bases = tuple(b for b in bases if b is not Generic) + namespace.update({'__origin__': origin, '__extra__': extra}) + self = super(GenericMeta, cls).__new__(cls, name, bases, namespace) + + self.__parameters__ = tvars + # Be prepared that GenericMeta will be subclassed by TupleMeta + # and CallableMeta, those two allow ..., (), or [] in __args___. + self.__args__ = tuple(Ellipsis if a is _TypingEllipsis else + () if a is _TypingEmpty else + a for a in args) if args else None + # Speed hack (https://github.com/python/typing/issues/196). + self.__next_in_mro__ = _next_in_mro(self) + # Preserve base classes on subclassing (__bases__ are type erased now). + if orig_bases is None: + self.__orig_bases__ = initial_bases + + # This allows unparameterized generic collections to be used + # with issubclass() and isinstance() in the same way as their + # collections.abc counterparts (e.g., isinstance([], Iterable)). + if ( + '__subclasshook__' not in namespace and extra or + # allow overriding + getattr(self.__subclasshook__, '__name__', '') == '__extrahook__' + ): + self.__subclasshook__ = _make_subclasshook(self) + + if origin and hasattr(origin, '__qualname__'): # Fix for Python 3.2. + self.__qualname__ = origin.__qualname__ + self.__tree_hash__ = (hash(self._subs_tree()) if origin else + super(GenericMeta, self).__hash__()) + return self + + def __init__(self, *args, **kwargs): + super(GenericMeta, self).__init__(*args, **kwargs) + if isinstance(self.__extra__, abc.ABCMeta): + self._abc_registry = self.__extra__._abc_registry + self._abc_cache = self.__extra__._abc_cache + elif self.__origin__ is not None: + self._abc_registry = self.__origin__._abc_registry + self._abc_cache = self.__origin__._abc_cache + + # _abc_negative_cache and _abc_negative_cache_version + # realised as descriptors, since GenClass[t1, t2, ...] always + # share subclass info with GenClass. + # This is an important memory optimization. + @property + def _abc_negative_cache(self): + if isinstance(self.__extra__, abc.ABCMeta): + return self.__extra__._abc_negative_cache + return _gorg(self)._abc_generic_negative_cache + + @_abc_negative_cache.setter + def _abc_negative_cache(self, value): + if self.__origin__ is None: + if isinstance(self.__extra__, abc.ABCMeta): + self.__extra__._abc_negative_cache = value + else: + self._abc_generic_negative_cache = value + + @property + def _abc_negative_cache_version(self): + if isinstance(self.__extra__, abc.ABCMeta): + return self.__extra__._abc_negative_cache_version + return _gorg(self)._abc_generic_negative_cache_version + + @_abc_negative_cache_version.setter + def _abc_negative_cache_version(self, value): + if self.__origin__ is None: + if isinstance(self.__extra__, abc.ABCMeta): + self.__extra__._abc_negative_cache_version = value + else: + self._abc_generic_negative_cache_version = value + + def _get_type_vars(self, tvars): + if self.__origin__ and self.__parameters__: + _get_type_vars(self.__parameters__, tvars) + + def _eval_type(self, globalns, localns): + ev_origin = (self.__origin__._eval_type(globalns, localns) + if self.__origin__ else None) + ev_args = tuple(_eval_type(a, globalns, localns) for a + in self.__args__) if self.__args__ else None + if ev_origin == self.__origin__ and ev_args == self.__args__: + return self + return self.__class__(self.__name__, + self.__bases__, + dict(self.__dict__), + tvars=_type_vars(ev_args) if ev_args else None, + args=ev_args, + origin=ev_origin, + extra=self.__extra__, + orig_bases=self.__orig_bases__) + + def __repr__(self): + if self.__origin__ is None: + return super(GenericMeta, self).__repr__() + return self._tree_repr(self._subs_tree()) + + def _tree_repr(self, tree): + arg_list = [] + for arg in tree[1:]: + if arg == (): + arg_list.append('()') + elif not isinstance(arg, tuple): + arg_list.append(_type_repr(arg)) + else: + arg_list.append(arg[0]._tree_repr(arg)) + return super(GenericMeta, self).__repr__() + '[%s]' % ', '.join(arg_list) + + def _subs_tree(self, tvars=None, args=None): + if self.__origin__ is None: + return self + tree_args = _subs_tree(self, tvars, args) + return (_gorg(self),) + tuple(tree_args) + + def __eq__(self, other): + if not isinstance(other, GenericMeta): + return NotImplemented + if self.__origin__ is None or other.__origin__ is None: + return self is other + return self.__tree_hash__ == other.__tree_hash__ + + def __hash__(self): + return self.__tree_hash__ + + @_tp_cache + def __getitem__(self, params): + if not isinstance(params, tuple): + params = (params,) + if not params and not _gorg(self) is Tuple: + raise TypeError( + "Parameter list to %s[...] cannot be empty" % _qualname(self)) + msg = "Parameters to generic types must be types." + params = tuple(_type_check(p, msg) for p in params) + if self is Generic: + # Generic can only be subscripted with unique type variables. + if not all(isinstance(p, TypeVar) for p in params): + raise TypeError( + "Parameters to Generic[...] must all be type variables") + if len(set(params)) != len(params): + raise TypeError( + "Parameters to Generic[...] must all be unique") + tvars = params + args = params + elif self in (Tuple, Callable): + tvars = _type_vars(params) + args = params + elif self is _Protocol: + # _Protocol is internal, don't check anything. + tvars = params + args = params + elif self.__origin__ in (Generic, _Protocol): + # Can't subscript Generic[...] or _Protocol[...]. + raise TypeError("Cannot subscript already-subscripted %s" % + repr(self)) + else: + # Subscripting a regular Generic subclass. + _check_generic(self, params) + tvars = _type_vars(params) + args = params + + prepend = (self,) if self.__origin__ is None else () + return self.__class__(self.__name__, + prepend + self.__bases__, + dict(self.__dict__), + tvars=tvars, + args=args, + origin=self, + extra=self.__extra__, + orig_bases=self.__orig_bases__) + + def __subclasscheck__(self, cls): + if self.__origin__ is not None: + if sys._getframe(1).f_globals['__name__'] not in ['abc', 'functools']: + raise TypeError("Parameterized generics cannot be used with class " + "or instance checks") + return False + if self is Generic: + raise TypeError("Class %r cannot be used with class " + "or instance checks" % self) + return super(GenericMeta, self).__subclasscheck__(cls) + + def __instancecheck__(self, instance): + # Since we extend ABC.__subclasscheck__ and + # ABC.__instancecheck__ inlines the cache checking done by the + # latter, we must extend __instancecheck__ too. For simplicity + # we just skip the cache check -- instance checks for generic + # classes are supposed to be rare anyways. + if not isinstance(instance, type): + return issubclass(instance.__class__, self) + return False + + def __copy__(self): + return self.__class__(self.__name__, self.__bases__, dict(self.__dict__), + self.__parameters__, self.__args__, self.__origin__, + self.__extra__, self.__orig_bases__) + + def __setattr__(self, attr, value): + # We consider all the subscripted genrics as proxies for original class + if ( + attr.startswith('__') and attr.endswith('__') or + attr.startswith('_abc_') + ): + super(GenericMeta, self).__setattr__(attr, value) + else: + super(GenericMeta, _gorg(self)).__setattr__(attr, value) + + +# Prevent checks for Generic to crash when defining Generic. +Generic = None + + +def _generic_new(base_cls, cls, *args, **kwds): + # Assure type is erased on instantiation, + # but attempt to store it in __orig_class__ + if cls.__origin__ is None: + return base_cls.__new__(cls) + else: + origin = _gorg(cls) + obj = base_cls.__new__(origin) + try: + obj.__orig_class__ = cls + except AttributeError: + pass + obj.__init__(*args, **kwds) + return obj + + +class Generic(object): + """Abstract base class for generic types. + + A generic type is typically declared by inheriting from + this class parameterized with one or more type variables. + For example, a generic mapping type might be defined as:: + + class Mapping(Generic[KT, VT]): + def __getitem__(self, key: KT) -> VT: + ... + # Etc. + + This class can then be used as follows:: + + def lookup_name(mapping: Mapping[KT, VT], key: KT, default: VT) -> VT: + try: + return mapping[key] + except KeyError: + return default + """ + + __metaclass__ = GenericMeta + __slots__ = () + + def __new__(cls, *args, **kwds): + if _geqv(cls, Generic): + raise TypeError("Type Generic cannot be instantiated; " + "it can be used only as a base class") + return _generic_new(cls.__next_in_mro__, cls, *args, **kwds) + + +class _TypingEmpty(object): + """Internal placeholder for () or []. Used by TupleMeta and CallableMeta + to allow empty list/tuple in specific places, without allowing them + to sneak in where prohibited. + """ + + +class _TypingEllipsis(object): + """Internal placeholder for ... (ellipsis).""" + + +class TupleMeta(GenericMeta): + """Metaclass for Tuple (internal).""" + + @_tp_cache + def __getitem__(self, parameters): + if self.__origin__ is not None or not _geqv(self, Tuple): + # Normal generic rules apply if this is not the first subscription + # or a subscription of a subclass. + return super(TupleMeta, self).__getitem__(parameters) + if parameters == (): + return super(TupleMeta, self).__getitem__((_TypingEmpty,)) + if not isinstance(parameters, tuple): + parameters = (parameters,) + if len(parameters) == 2 and parameters[1] is Ellipsis: + msg = "Tuple[t, ...]: t must be a type." + p = _type_check(parameters[0], msg) + return super(TupleMeta, self).__getitem__((p, _TypingEllipsis)) + msg = "Tuple[t0, t1, ...]: each t must be a type." + parameters = tuple(_type_check(p, msg) for p in parameters) + return super(TupleMeta, self).__getitem__(parameters) + + def __instancecheck__(self, obj): + if self.__args__ is None: + return isinstance(obj, tuple) + raise TypeError("Parameterized Tuple cannot be used " + "with isinstance().") + + def __subclasscheck__(self, cls): + if self.__args__ is None: + return issubclass(cls, tuple) + raise TypeError("Parameterized Tuple cannot be used " + "with issubclass().") + + +class Tuple(tuple): + """Tuple type; Tuple[X, Y] is the cross-product type of X and Y. + + Example: Tuple[T1, T2] is a tuple of two elements corresponding + to type variables T1 and T2. Tuple[int, float, str] is a tuple + of an int, a float and a string. + + To specify a variable-length tuple of homogeneous type, use Tuple[T, ...]. + """ + + __metaclass__ = TupleMeta + __extra__ = tuple + __slots__ = () + + def __new__(cls, *args, **kwds): + if _geqv(cls, Tuple): + raise TypeError("Type Tuple cannot be instantiated; " + "use tuple() instead") + return _generic_new(tuple, cls, *args, **kwds) + + +class CallableMeta(GenericMeta): + """ Metaclass for Callable.""" + + def __repr__(self): + if self.__origin__ is None: + return super(CallableMeta, self).__repr__() + return self._tree_repr(self._subs_tree()) + + def _tree_repr(self, tree): + if _gorg(self) is not Callable: + return super(CallableMeta, self)._tree_repr(tree) + # For actual Callable (not its subclass) we override + # super(CallableMeta, self)._tree_repr() for nice formatting. + arg_list = [] + for arg in tree[1:]: + if not isinstance(arg, tuple): + arg_list.append(_type_repr(arg)) + else: + arg_list.append(arg[0]._tree_repr(arg)) + if arg_list[0] == '...': + return repr(tree[0]) + '[..., %s]' % arg_list[1] + return (repr(tree[0]) + + '[[%s], %s]' % (', '.join(arg_list[:-1]), arg_list[-1])) + + def __getitem__(self, parameters): + """A thin wrapper around __getitem_inner__ to provide the latter + with hashable arguments to improve speed. + """ + + if self.__origin__ is not None or not _geqv(self, Callable): + return super(CallableMeta, self).__getitem__(parameters) + if not isinstance(parameters, tuple) or len(parameters) != 2: + raise TypeError("Callable must be used as " + "Callable[[arg, ...], result].") + args, result = parameters + if args is Ellipsis: + parameters = (Ellipsis, result) + else: + if not isinstance(args, list): + raise TypeError("Callable[args, result]: args must be a list." + " Got %.100r." % (args,)) + parameters = (tuple(args), result) + return self.__getitem_inner__(parameters) + + @_tp_cache + def __getitem_inner__(self, parameters): + args, result = parameters + msg = "Callable[args, result]: result must be a type." + result = _type_check(result, msg) + if args is Ellipsis: + return super(CallableMeta, self).__getitem__((_TypingEllipsis, result)) + msg = "Callable[[arg, ...], result]: each arg must be a type." + args = tuple(_type_check(arg, msg) for arg in args) + parameters = args + (result,) + return super(CallableMeta, self).__getitem__(parameters) + + +class Callable(object): + """Callable type; Callable[[int], str] is a function of (int) -> str. + + The subscription syntax must always be used with exactly two + values: the argument list and the return type. The argument list + must be a list of types or ellipsis; the return type must be a single type. + + There is no syntax to indicate optional or keyword arguments, + such function types are rarely used as callback types. + """ + + __metaclass__ = CallableMeta + __extra__ = collections_abc.Callable + __slots__ = () + + def __new__(cls, *args, **kwds): + if _geqv(cls, Callable): + raise TypeError("Type Callable cannot be instantiated; " + "use a non-abstract subclass instead") + return _generic_new(cls.__next_in_mro__, cls, *args, **kwds) + + +def cast(typ, val): + """Cast a value to a type. + + This returns the value unchanged. To the type checker this + signals that the return value has the designated type, but at + runtime we intentionally don't check anything (we want this + to be as fast as possible). + """ + return val + + +def _get_defaults(func): + """Internal helper to extract the default arguments, by name.""" + code = func.__code__ + pos_count = code.co_argcount + arg_names = code.co_varnames + arg_names = arg_names[:pos_count] + defaults = func.__defaults__ or () + kwdefaults = func.__kwdefaults__ + res = dict(kwdefaults) if kwdefaults else {} + pos_offset = pos_count - len(defaults) + for name, value in zip(arg_names[pos_offset:], defaults): + assert name not in res + res[name] = value + return res + + +def get_type_hints(obj, globalns=None, localns=None): + """In Python 2 this is not supported and always returns None.""" + return None + + +def no_type_check(arg): + """Decorator to indicate that annotations are not type hints. + + The argument must be a class or function; if it is a class, it + applies recursively to all methods and classes defined in that class + (but not to methods defined in its superclasses or subclasses). + + This mutates the function(s) or class(es) in place. + """ + if isinstance(arg, type): + arg_attrs = arg.__dict__.copy() + for attr, val in arg.__dict__.items(): + if val in arg.__bases__: + arg_attrs.pop(attr) + for obj in arg_attrs.values(): + if isinstance(obj, types.FunctionType): + obj.__no_type_check__ = True + if isinstance(obj, type): + no_type_check(obj) + try: + arg.__no_type_check__ = True + except TypeError: # built-in classes + pass + return arg + + +def no_type_check_decorator(decorator): + """Decorator to give another decorator the @no_type_check effect. + + This wraps the decorator with something that wraps the decorated + function in @no_type_check. + """ + + @functools.wraps(decorator) + def wrapped_decorator(*args, **kwds): + func = decorator(*args, **kwds) + func = no_type_check(func) + return func + + return wrapped_decorator + + +def _overload_dummy(*args, **kwds): + """Helper for @overload to raise when called.""" + raise NotImplementedError( + "You should not call an overloaded function. " + "A series of @overload-decorated functions " + "outside a stub module should always be followed " + "by an implementation that is not @overload-ed.") + + +def overload(func): + """Decorator for overloaded functions/methods. + + In a stub file, place two or more stub definitions for the same + function in a row, each decorated with @overload. For example: + + @overload + def utf8(value: None) -> None: ... + @overload + def utf8(value: bytes) -> bytes: ... + @overload + def utf8(value: str) -> bytes: ... + + In a non-stub file (i.e. a regular .py file), do the same but + follow it with an implementation. The implementation should *not* + be decorated with @overload. For example: + + @overload + def utf8(value: None) -> None: ... + @overload + def utf8(value: bytes) -> bytes: ... + @overload + def utf8(value: str) -> bytes: ... + def utf8(value): + # implementation goes here + """ + return _overload_dummy + + +class _ProtocolMeta(GenericMeta): + """Internal metaclass for _Protocol. + + This exists so _Protocol classes can be generic without deriving + from Generic. + """ + + def __instancecheck__(self, obj): + if _Protocol not in self.__bases__: + return super(_ProtocolMeta, self).__instancecheck__(obj) + raise TypeError("Protocols cannot be used with isinstance().") + + def __subclasscheck__(self, cls): + if not self._is_protocol: + # No structural checks since this isn't a protocol. + return NotImplemented + + if self is _Protocol: + # Every class is a subclass of the empty protocol. + return True + + # Find all attributes defined in the protocol. + attrs = self._get_protocol_attrs() + + for attr in attrs: + if not any(attr in d.__dict__ for d in cls.__mro__): + return False + return True + + def _get_protocol_attrs(self): + # Get all Protocol base classes. + protocol_bases = [] + for c in self.__mro__: + if getattr(c, '_is_protocol', False) and c.__name__ != '_Protocol': + protocol_bases.append(c) + + # Get attributes included in protocol. + attrs = set() + for base in protocol_bases: + for attr in base.__dict__.keys(): + # Include attributes not defined in any non-protocol bases. + for c in self.__mro__: + if (c is not base and attr in c.__dict__ and + not getattr(c, '_is_protocol', False)): + break + else: + if (not attr.startswith('_abc_') and + attr != '__abstractmethods__' and + attr != '_is_protocol' and + attr != '__dict__' and + attr != '__args__' and + attr != '__slots__' and + attr != '_get_protocol_attrs' and + attr != '__next_in_mro__' and + attr != '__parameters__' and + attr != '__origin__' and + attr != '__orig_bases__' and + attr != '__extra__' and + attr != '__tree_hash__' and + attr != '__module__'): + attrs.add(attr) + + return attrs + + +class _Protocol(object): + """Internal base class for protocol classes. + + This implements a simple-minded structural issubclass check + (similar but more general than the one-offs in collections.abc + such as Hashable). + """ + + __metaclass__ = _ProtocolMeta + __slots__ = () + + _is_protocol = True + + +# Various ABCs mimicking those in collections.abc. +# A few are simply re-exported for completeness. + +Hashable = collections_abc.Hashable # Not generic. + + +class Iterable(Generic[T_co]): + __slots__ = () + __extra__ = collections_abc.Iterable + + +class Iterator(Iterable[T_co]): + __slots__ = () + __extra__ = collections_abc.Iterator + + +class SupportsInt(_Protocol): + __slots__ = () + + @abstractmethod + def __int__(self): + pass + + +class SupportsFloat(_Protocol): + __slots__ = () + + @abstractmethod + def __float__(self): + pass + + +class SupportsComplex(_Protocol): + __slots__ = () + + @abstractmethod + def __complex__(self): + pass + + +class SupportsAbs(_Protocol[T_co]): + __slots__ = () + + @abstractmethod + def __abs__(self): + pass + + +if hasattr(collections_abc, 'Reversible'): + class Reversible(Iterable[T_co]): + __slots__ = () + __extra__ = collections_abc.Reversible +else: + class Reversible(_Protocol[T_co]): + __slots__ = () + + @abstractmethod + def __reversed__(self): + pass + + +Sized = collections_abc.Sized # Not generic. + + +class Container(Generic[T_co]): + __slots__ = () + __extra__ = collections_abc.Container + + +# Callable was defined earlier. + + +class AbstractSet(Sized, Iterable[T_co], Container[T_co]): + __slots__ = () + __extra__ = collections_abc.Set + + +class MutableSet(AbstractSet[T]): + __slots__ = () + __extra__ = collections_abc.MutableSet + + +# NOTE: It is only covariant in the value type. +class Mapping(Sized, Iterable[KT], Container[KT], Generic[KT, VT_co]): + __slots__ = () + __extra__ = collections_abc.Mapping + + +class MutableMapping(Mapping[KT, VT]): + __slots__ = () + __extra__ = collections_abc.MutableMapping + + +if hasattr(collections_abc, 'Reversible'): + class Sequence(Sized, Reversible[T_co], Container[T_co]): + __slots__ = () + __extra__ = collections_abc.Sequence +else: + class Sequence(Sized, Iterable[T_co], Container[T_co]): + __slots__ = () + __extra__ = collections_abc.Sequence + + +class MutableSequence(Sequence[T]): + __slots__ = () + __extra__ = collections_abc.MutableSequence + + +class ByteString(Sequence[int]): + pass + + +ByteString.register(str) +ByteString.register(bytearray) + + +class List(list, MutableSequence[T]): + __slots__ = () + __extra__ = list + + def __new__(cls, *args, **kwds): + if _geqv(cls, List): + raise TypeError("Type List cannot be instantiated; " + "use list() instead") + return _generic_new(list, cls, *args, **kwds) + + +class Deque(collections.deque, MutableSequence[T]): + __slots__ = () + __extra__ = collections.deque + + def __new__(cls, *args, **kwds): + if _geqv(cls, Deque): + return collections.deque(*args, **kwds) + return _generic_new(collections.deque, cls, *args, **kwds) + + +class Set(set, MutableSet[T]): + __slots__ = () + __extra__ = set + + def __new__(cls, *args, **kwds): + if _geqv(cls, Set): + raise TypeError("Type Set cannot be instantiated; " + "use set() instead") + return _generic_new(set, cls, *args, **kwds) + + +class FrozenSet(frozenset, AbstractSet[T_co]): + __slots__ = () + __extra__ = frozenset + + def __new__(cls, *args, **kwds): + if _geqv(cls, FrozenSet): + raise TypeError("Type FrozenSet cannot be instantiated; " + "use frozenset() instead") + return _generic_new(frozenset, cls, *args, **kwds) + + +class MappingView(Sized, Iterable[T_co]): + __slots__ = () + __extra__ = collections_abc.MappingView + + +class KeysView(MappingView[KT], AbstractSet[KT]): + __slots__ = () + __extra__ = collections_abc.KeysView + + +class ItemsView(MappingView[Tuple[KT, VT_co]], + AbstractSet[Tuple[KT, VT_co]], + Generic[KT, VT_co]): + __slots__ = () + __extra__ = collections_abc.ItemsView + + +class ValuesView(MappingView[VT_co]): + __slots__ = () + __extra__ = collections_abc.ValuesView + + +class Dict(dict, MutableMapping[KT, VT]): + __slots__ = () + __extra__ = dict + + def __new__(cls, *args, **kwds): + if _geqv(cls, Dict): + raise TypeError("Type Dict cannot be instantiated; " + "use dict() instead") + return _generic_new(dict, cls, *args, **kwds) + + +class DefaultDict(collections.defaultdict, MutableMapping[KT, VT]): + __slots__ = () + __extra__ = collections.defaultdict + + def __new__(cls, *args, **kwds): + if _geqv(cls, DefaultDict): + return collections.defaultdict(*args, **kwds) + return _generic_new(collections.defaultdict, cls, *args, **kwds) + + +class Counter(collections.Counter, Dict[T, int]): + __slots__ = () + __extra__ = collections.Counter + + def __new__(cls, *args, **kwds): + if _geqv(cls, Counter): + return collections.Counter(*args, **kwds) + return _generic_new(collections.Counter, cls, *args, **kwds) + + +# Determine what base class to use for Generator. +if hasattr(collections_abc, 'Generator'): + # Sufficiently recent versions of 3.5 have a Generator ABC. + _G_base = collections_abc.Generator +else: + # Fall back on the exact type. + _G_base = types.GeneratorType + + +class Generator(Iterator[T_co], Generic[T_co, T_contra, V_co]): + __slots__ = () + __extra__ = _G_base + + def __new__(cls, *args, **kwds): + if _geqv(cls, Generator): + raise TypeError("Type Generator cannot be instantiated; " + "create a subclass instead") + return _generic_new(_G_base, cls, *args, **kwds) + + +# Internal type variable used for Type[]. +CT_co = TypeVar('CT_co', covariant=True, bound=type) + + +# This is not a real generic class. Don't use outside annotations. +class Type(Generic[CT_co]): + """A special construct usable to annotate class objects. + + For example, suppose we have the following classes:: + + class User: ... # Abstract base for User classes + class BasicUser(User): ... + class ProUser(User): ... + class TeamUser(User): ... + + And a function that takes a class argument that's a subclass of + User and returns an instance of the corresponding class:: + + U = TypeVar('U', bound=User) + def new_user(user_class: Type[U]) -> U: + user = user_class() + # (Here we could write the user object to a database) + return user + + joe = new_user(BasicUser) + + At this point the type checker knows that joe has type BasicUser. + """ + __slots__ = () + __extra__ = type + + +def NamedTuple(typename, fields): + """Typed version of namedtuple. + + Usage:: + + Employee = typing.NamedTuple('Employee', [('name', str), ('id', int)]) + + This is equivalent to:: + + Employee = collections.namedtuple('Employee', ['name', 'id']) + + The resulting class has one extra attribute: _field_types, + giving a dict mapping field names to types. (The field names + are in the _fields attribute, which is part of the namedtuple + API.) + """ + fields = [(n, t) for n, t in fields] + cls = collections.namedtuple(typename, [n for n, t in fields]) + cls._field_types = dict(fields) + # Set the module to the caller's module (otherwise it'd be 'typing'). + try: + cls.__module__ = sys._getframe(1).f_globals.get('__name__', '__main__') + except (AttributeError, ValueError): + pass + return cls + + +def NewType(name, tp): + """NewType creates simple unique types with almost zero + runtime overhead. NewType(name, tp) is considered a subtype of tp + by static type checkers. At runtime, NewType(name, tp) returns + a dummy function that simply returns its argument. Usage:: + + UserId = NewType('UserId', int) + + def name_by_id(user_id): + # type: (UserId) -> str + ... + + UserId('user') # Fails type check + + name_by_id(42) # Fails type check + name_by_id(UserId(42)) # OK + + num = UserId(5) + 1 # type: int + """ + + def new_type(x): + return x + + # Some versions of Python 2 complain because of making all strings unicode + new_type.__name__ = str(name) + new_type.__supertype__ = tp + return new_type + + +# Python-version-specific alias (Python 2: unicode; Python 3: str) +Text = unicode + + +# Constant that's True when type checking, but False here. +TYPE_CHECKING = False + + +class IO(Generic[AnyStr]): + """Generic base class for TextIO and BinaryIO. + + This is an abstract, generic version of the return of open(). + + NOTE: This does not distinguish between the different possible + classes (text vs. binary, read vs. write vs. read/write, + append-only, unbuffered). The TextIO and BinaryIO subclasses + below capture the distinctions between text vs. binary, which is + pervasive in the interface; however we currently do not offer a + way to track the other distinctions in the type system. + """ + + __slots__ = () + + @abstractproperty + def mode(self): + pass + + @abstractproperty + def name(self): + pass + + @abstractmethod + def close(self): + pass + + @abstractmethod + def closed(self): + pass + + @abstractmethod + def fileno(self): + pass + + @abstractmethod + def flush(self): + pass + + @abstractmethod + def isatty(self): + pass + + @abstractmethod + def read(self, n=-1): + pass + + @abstractmethod + def readable(self): + pass + + @abstractmethod + def readline(self, limit=-1): + pass + + @abstractmethod + def readlines(self, hint=-1): + pass + + @abstractmethod + def seek(self, offset, whence=0): + pass + + @abstractmethod + def seekable(self): + pass + + @abstractmethod + def tell(self): + pass + + @abstractmethod + def truncate(self, size=None): + pass + + @abstractmethod + def writable(self): + pass + + @abstractmethod + def write(self, s): + pass + + @abstractmethod + def writelines(self, lines): + pass + + @abstractmethod + def __enter__(self): + pass + + @abstractmethod + def __exit__(self, type, value, traceback): + pass + + +class BinaryIO(IO[bytes]): + """Typed version of the return of open() in binary mode.""" + + __slots__ = () + + @abstractmethod + def write(self, s): + pass + + @abstractmethod + def __enter__(self): + pass + + +class TextIO(IO[unicode]): + """Typed version of the return of open() in text mode.""" + + __slots__ = () + + @abstractproperty + def buffer(self): + pass + + @abstractproperty + def encoding(self): + pass + + @abstractproperty + def errors(self): + pass + + @abstractproperty + def line_buffering(self): + pass + + @abstractproperty + def newlines(self): + pass + + @abstractmethod + def __enter__(self): + pass + + +class io(object): + """Wrapper namespace for IO generic classes.""" + + __all__ = ['IO', 'TextIO', 'BinaryIO'] + IO = IO + TextIO = TextIO + BinaryIO = BinaryIO + + +io.__name__ = __name__ + b'.io' +sys.modules[io.__name__] = io + + +Pattern = _TypeAlias('Pattern', AnyStr, type(stdlib_re.compile('')), + lambda p: p.pattern) +Match = _TypeAlias('Match', AnyStr, type(stdlib_re.match('', '')), + lambda m: m.re.pattern) + + +class re(object): + """Wrapper namespace for re type aliases.""" + + __all__ = ['Pattern', 'Match'] + Pattern = Pattern + Match = Match + + +re.__name__ = __name__ + b'.re' +sys.modules[re.__name__] = re diff --git a/lib-typing/3.2/mod_generics_cache.py b/lib-typing/3.2/mod_generics_cache.py new file mode 100644 index 000000000000..d9a60b4b28c3 --- /dev/null +++ b/lib-typing/3.2/mod_generics_cache.py @@ -0,0 +1,14 @@ +"""Module for testing the behavior of generics across different modules.""" + +from typing import TypeVar, Generic + +T = TypeVar('T') + + +class A(Generic[T]): + pass + + +class B(Generic[T]): + class A(Generic[T]): + pass diff --git a/lib-typing/3.2/test_typing.py b/lib-typing/3.2/test_typing.py new file mode 100644 index 000000000000..586a7870013a --- /dev/null +++ b/lib-typing/3.2/test_typing.py @@ -0,0 +1,2422 @@ +import contextlib +import collections +import pickle +import re +import sys +from unittest import TestCase, main, skipUnless, SkipTest +from copy import copy, deepcopy + +from typing import Any +from typing import TypeVar, AnyStr +from typing import T, KT, VT # Not in __all__. +from typing import Union, Optional +from typing import Tuple, List, MutableMapping +from typing import Callable +from typing import Generic, ClassVar, GenericMeta +from typing import cast +from typing import get_type_hints +from typing import no_type_check, no_type_check_decorator +from typing import Type +from typing import NewType +from typing import NamedTuple +from typing import IO, TextIO, BinaryIO +from typing import Pattern, Match +import abc +import typing +import weakref +try: + import collections.abc as collections_abc +except ImportError: + import collections as collections_abc # Fallback for PY3.2. + + +class BaseTestCase(TestCase): + + def assertIsSubclass(self, cls, class_or_tuple, msg=None): + if not issubclass(cls, class_or_tuple): + message = '%r is not a subclass of %r' % (cls, class_or_tuple) + if msg is not None: + message += ' : %s' % msg + raise self.failureException(message) + + def assertNotIsSubclass(self, cls, class_or_tuple, msg=None): + if issubclass(cls, class_or_tuple): + message = '%r is a subclass of %r' % (cls, class_or_tuple) + if msg is not None: + message += ' : %s' % msg + raise self.failureException(message) + + def clear_caches(self): + for f in typing._cleanups: + f() + + +class Employee: + pass + + +class Manager(Employee): + pass + + +class Founder(Employee): + pass + + +class ManagingFounder(Manager, Founder): + pass + + +class AnyTests(BaseTestCase): + + def test_any_instance_type_error(self): + with self.assertRaises(TypeError): + isinstance(42, Any) + + def test_any_subclass_type_error(self): + with self.assertRaises(TypeError): + issubclass(Employee, Any) + with self.assertRaises(TypeError): + issubclass(Any, Employee) + + def test_repr(self): + self.assertEqual(repr(Any), 'typing.Any') + + def test_errors(self): + with self.assertRaises(TypeError): + issubclass(42, Any) + with self.assertRaises(TypeError): + Any[int] # Any is not a generic type. + + def test_cannot_subclass(self): + with self.assertRaises(TypeError): + class A(Any): + pass + with self.assertRaises(TypeError): + class A(type(Any)): + pass + + def test_cannot_instantiate(self): + with self.assertRaises(TypeError): + Any() + with self.assertRaises(TypeError): + type(Any)() + + def test_any_works_with_alias(self): + # These expressions must simply not fail. + typing.Match[Any] + typing.Pattern[Any] + typing.IO[Any] + + +class TypeVarTests(BaseTestCase): + + def test_basic_plain(self): + T = TypeVar('T') + # T equals itself. + self.assertEqual(T, T) + # T is an instance of TypeVar + self.assertIsInstance(T, TypeVar) + + def test_typevar_instance_type_error(self): + T = TypeVar('T') + with self.assertRaises(TypeError): + isinstance(42, T) + + def test_typevar_subclass_type_error(self): + T = TypeVar('T') + with self.assertRaises(TypeError): + issubclass(int, T) + with self.assertRaises(TypeError): + issubclass(T, int) + + def test_constrained_error(self): + with self.assertRaises(TypeError): + X = TypeVar('X', int) + X + + def test_union_unique(self): + X = TypeVar('X') + Y = TypeVar('Y') + self.assertNotEqual(X, Y) + self.assertEqual(Union[X], X) + self.assertNotEqual(Union[X], Union[X, Y]) + self.assertEqual(Union[X, X], X) + self.assertNotEqual(Union[X, int], Union[X]) + self.assertNotEqual(Union[X, int], Union[int]) + self.assertEqual(Union[X, int].__args__, (X, int)) + self.assertEqual(Union[X, int].__parameters__, (X,)) + self.assertIs(Union[X, int].__origin__, Union) + + def test_union_constrained(self): + A = TypeVar('A', str, bytes) + self.assertNotEqual(Union[A, str], Union[A]) + + def test_repr(self): + self.assertEqual(repr(T), '~T') + self.assertEqual(repr(KT), '~KT') + self.assertEqual(repr(VT), '~VT') + self.assertEqual(repr(AnyStr), '~AnyStr') + T_co = TypeVar('T_co', covariant=True) + self.assertEqual(repr(T_co), '+T_co') + T_contra = TypeVar('T_contra', contravariant=True) + self.assertEqual(repr(T_contra), '-T_contra') + + def test_no_redefinition(self): + self.assertNotEqual(TypeVar('T'), TypeVar('T')) + self.assertNotEqual(TypeVar('T', int, str), TypeVar('T', int, str)) + + def test_cannot_subclass_vars(self): + with self.assertRaises(TypeError): + class V(TypeVar('T')): + pass + + def test_cannot_subclass_var_itself(self): + with self.assertRaises(TypeError): + class V(TypeVar): + pass + + def test_cannot_instantiate_vars(self): + with self.assertRaises(TypeError): + TypeVar('A')() + + def test_bound_errors(self): + with self.assertRaises(TypeError): + TypeVar('X', bound=42) + with self.assertRaises(TypeError): + TypeVar('X', str, float, bound=Employee) + + def test_no_bivariant(self): + with self.assertRaises(ValueError): + TypeVar('T', covariant=True, contravariant=True) + + +class UnionTests(BaseTestCase): + + def test_basics(self): + u = Union[int, float] + self.assertNotEqual(u, Union) + + def test_subclass_error(self): + with self.assertRaises(TypeError): + issubclass(int, Union) + with self.assertRaises(TypeError): + issubclass(Union, int) + with self.assertRaises(TypeError): + issubclass(int, Union[int, str]) + with self.assertRaises(TypeError): + issubclass(Union[int, str], int) + + def test_union_any(self): + u = Union[Any] + self.assertEqual(u, Any) + u1 = Union[int, Any] + u2 = Union[Any, int] + u3 = Union[Any, object] + self.assertEqual(u1, u2) + self.assertNotEqual(u1, Any) + self.assertNotEqual(u2, Any) + self.assertNotEqual(u3, Any) + + def test_union_object(self): + u = Union[object] + self.assertEqual(u, object) + u = Union[int, object] + self.assertEqual(u, object) + u = Union[object, int] + self.assertEqual(u, object) + + def test_unordered(self): + u1 = Union[int, float] + u2 = Union[float, int] + self.assertEqual(u1, u2) + + def test_single_class_disappears(self): + t = Union[Employee] + self.assertIs(t, Employee) + + def test_base_class_disappears(self): + u = Union[Employee, Manager, int] + self.assertEqual(u, Union[int, Employee]) + u = Union[Manager, int, Employee] + self.assertEqual(u, Union[int, Employee]) + u = Union[Employee, Manager] + self.assertIs(u, Employee) + + def test_union_union(self): + u = Union[int, float] + v = Union[u, Employee] + self.assertEqual(v, Union[int, float, Employee]) + + def test_repr(self): + self.assertEqual(repr(Union), 'typing.Union') + u = Union[Employee, int] + self.assertEqual(repr(u), 'typing.Union[%s.Employee, int]' % __name__) + u = Union[int, Employee] + self.assertEqual(repr(u), 'typing.Union[int, %s.Employee]' % __name__) + T = TypeVar('T') + u = Union[T, int][int] + self.assertEqual(repr(u), repr(int)) + u = Union[List[int], int] + self.assertEqual(repr(u), 'typing.Union[typing.List[int], int]') + + def test_cannot_subclass(self): + with self.assertRaises(TypeError): + class C(Union): + pass + with self.assertRaises(TypeError): + class C(type(Union)): + pass + with self.assertRaises(TypeError): + class C(Union[int, str]): + pass + + def test_cannot_instantiate(self): + with self.assertRaises(TypeError): + Union() + with self.assertRaises(TypeError): + type(Union)() + u = Union[int, float] + with self.assertRaises(TypeError): + u() + with self.assertRaises(TypeError): + type(u)() + + def test_union_generalization(self): + self.assertFalse(Union[str, typing.Iterable[int]] == str) + self.assertFalse(Union[str, typing.Iterable[int]] == typing.Iterable[int]) + self.assertTrue(Union[str, typing.Iterable] == typing.Iterable) + + def test_union_compare_other(self): + self.assertNotEqual(Union, object) + self.assertNotEqual(Union, Any) + self.assertNotEqual(ClassVar, Union) + self.assertNotEqual(Optional, Union) + self.assertNotEqual([None], Optional) + self.assertNotEqual(Optional, typing.Mapping) + self.assertNotEqual(Optional[typing.MutableMapping], Union) + + def test_optional(self): + o = Optional[int] + u = Union[int, None] + self.assertEqual(o, u) + + def test_empty(self): + with self.assertRaises(TypeError): + Union[()] + + def test_union_instance_type_error(self): + with self.assertRaises(TypeError): + isinstance(42, Union[int, str]) + + def test_no_eval_union(self): + u = Union[int, str] + def f(x: u): ... + self.assertIs(get_type_hints(f)['x'], u) + + def test_function_repr_union(self): + def fun() -> int: ... + self.assertEqual(repr(Union[fun, int]), 'typing.Union[fun, int]') + + def test_union_str_pattern(self): + # Shouldn't crash; see http://bugs.python.org/issue25390 + A = Union[str, Pattern] + A + + def test_etree(self): + # See https://github.com/python/typing/issues/229 + # (Only relevant for Python 2.) + try: + from xml.etree.cElementTree import Element + except ImportError: + raise SkipTest("cElementTree not found") + Union[Element, str] # Shouldn't crash + + def Elem(*args): + return Element(*args) + + Union[Elem, str] # Nor should this + + +class TupleTests(BaseTestCase): + + def test_basics(self): + with self.assertRaises(TypeError): + issubclass(Tuple, Tuple[int, str]) + with self.assertRaises(TypeError): + issubclass(tuple, Tuple[int, str]) + + class TP(tuple): ... + self.assertTrue(issubclass(tuple, Tuple)) + self.assertTrue(issubclass(TP, Tuple)) + + def test_equality(self): + self.assertEqual(Tuple[int], Tuple[int]) + self.assertEqual(Tuple[int, ...], Tuple[int, ...]) + self.assertNotEqual(Tuple[int], Tuple[int, int]) + self.assertNotEqual(Tuple[int], Tuple[int, ...]) + + def test_tuple_subclass(self): + class MyTuple(tuple): + pass + self.assertTrue(issubclass(MyTuple, Tuple)) + + def test_tuple_instance_type_error(self): + with self.assertRaises(TypeError): + isinstance((0, 0), Tuple[int, int]) + self.assertIsInstance((0, 0), Tuple) + + def test_repr(self): + self.assertEqual(repr(Tuple), 'typing.Tuple') + self.assertEqual(repr(Tuple[()]), 'typing.Tuple[()]') + self.assertEqual(repr(Tuple[int, float]), 'typing.Tuple[int, float]') + self.assertEqual(repr(Tuple[int, ...]), 'typing.Tuple[int, ...]') + + def test_errors(self): + with self.assertRaises(TypeError): + issubclass(42, Tuple) + with self.assertRaises(TypeError): + issubclass(42, Tuple[int]) + + +class CallableTests(BaseTestCase): + + def test_self_subclass(self): + with self.assertRaises(TypeError): + self.assertTrue(issubclass(type(lambda x: x), Callable[[int], int])) + self.assertTrue(issubclass(type(lambda x: x), Callable)) + + def test_eq_hash(self): + self.assertEqual(Callable[[int], int], Callable[[int], int]) + self.assertEqual(len({Callable[[int], int], Callable[[int], int]}), 1) + self.assertNotEqual(Callable[[int], int], Callable[[int], str]) + self.assertNotEqual(Callable[[int], int], Callable[[str], int]) + self.assertNotEqual(Callable[[int], int], Callable[[int, int], int]) + self.assertNotEqual(Callable[[int], int], Callable[[], int]) + self.assertNotEqual(Callable[[int], int], Callable) + + def test_cannot_instantiate(self): + with self.assertRaises(TypeError): + Callable() + with self.assertRaises(TypeError): + type(Callable)() + c = Callable[[int], str] + with self.assertRaises(TypeError): + c() + with self.assertRaises(TypeError): + type(c)() + + def test_callable_wrong_forms(self): + with self.assertRaises(TypeError): + Callable[[...], int] + with self.assertRaises(TypeError): + Callable[(), int] + with self.assertRaises(TypeError): + Callable[[()], int] + with self.assertRaises(TypeError): + Callable[[int, 1], 2] + with self.assertRaises(TypeError): + Callable[int] + + def test_callable_instance_works(self): + def f(): + pass + self.assertIsInstance(f, Callable) + self.assertNotIsInstance(None, Callable) + + def test_callable_instance_type_error(self): + def f(): + pass + with self.assertRaises(TypeError): + self.assertIsInstance(f, Callable[[], None]) + with self.assertRaises(TypeError): + self.assertIsInstance(f, Callable[[], Any]) + with self.assertRaises(TypeError): + self.assertNotIsInstance(None, Callable[[], None]) + with self.assertRaises(TypeError): + self.assertNotIsInstance(None, Callable[[], Any]) + + def test_repr(self): + ct0 = Callable[[], bool] + self.assertEqual(repr(ct0), 'typing.Callable[[], bool]') + ct2 = Callable[[str, float], int] + self.assertEqual(repr(ct2), 'typing.Callable[[str, float], int]') + ctv = Callable[..., str] + self.assertEqual(repr(ctv), 'typing.Callable[..., str]') + + def test_callable_with_ellipsis(self): + + def foo(a: Callable[..., T]): + pass + + self.assertEqual(get_type_hints(foo, globals(), locals()), + {'a': Callable[..., T]}) + + def test_ellipsis_in_generic(self): + # Shouldn't crash; see https://github.com/python/typing/issues/259 + typing.List[Callable[..., str]] + + +XK = TypeVar('XK', str, bytes) +XV = TypeVar('XV') + + +class SimpleMapping(Generic[XK, XV]): + + def __getitem__(self, key: XK) -> XV: + ... + + def __setitem__(self, key: XK, value: XV): + ... + + def get(self, key: XK, default: XV = None) -> XV: + ... + + +class MySimpleMapping(SimpleMapping[XK, XV]): + + def __init__(self): + self.store = {} + + def __getitem__(self, key: str): + return self.store[key] + + def __setitem__(self, key: str, value): + self.store[key] = value + + def get(self, key: str, default=None): + try: + return self.store[key] + except KeyError: + return default + + +class ProtocolTests(BaseTestCase): + + def test_supports_int(self): + self.assertIsSubclass(int, typing.SupportsInt) + self.assertNotIsSubclass(str, typing.SupportsInt) + + def test_supports_float(self): + self.assertIsSubclass(float, typing.SupportsFloat) + self.assertNotIsSubclass(str, typing.SupportsFloat) + + def test_supports_complex(self): + + # Note: complex itself doesn't have __complex__. + class C: + def __complex__(self): + return 0j + + self.assertIsSubclass(C, typing.SupportsComplex) + self.assertNotIsSubclass(str, typing.SupportsComplex) + + def test_supports_bytes(self): + + # Note: bytes itself doesn't have __bytes__. + class B: + def __bytes__(self): + return b'' + + self.assertIsSubclass(B, typing.SupportsBytes) + self.assertNotIsSubclass(str, typing.SupportsBytes) + + def test_supports_abs(self): + self.assertIsSubclass(float, typing.SupportsAbs) + self.assertIsSubclass(int, typing.SupportsAbs) + self.assertNotIsSubclass(str, typing.SupportsAbs) + + def test_supports_round(self): + issubclass(float, typing.SupportsRound) + self.assertIsSubclass(float, typing.SupportsRound) + self.assertIsSubclass(int, typing.SupportsRound) + self.assertNotIsSubclass(str, typing.SupportsRound) + + def test_reversible(self): + self.assertIsSubclass(list, typing.Reversible) + self.assertNotIsSubclass(int, typing.Reversible) + + def test_protocol_instance_type_error(self): + with self.assertRaises(TypeError): + isinstance(0, typing.SupportsAbs) + class C1(typing.SupportsInt): + def __int__(self) -> int: + return 42 + class C2(C1): + pass + c = C2() + self.assertIsInstance(c, C1) + + +class GenericTests(BaseTestCase): + + def test_basics(self): + X = SimpleMapping[str, Any] + self.assertEqual(X.__parameters__, ()) + with self.assertRaises(TypeError): + X[str] + with self.assertRaises(TypeError): + X[str, str] + Y = SimpleMapping[XK, str] + self.assertEqual(Y.__parameters__, (XK,)) + Y[str] + with self.assertRaises(TypeError): + Y[str, str] + self.assertIsSubclass(SimpleMapping[str, int], SimpleMapping) + + def test_generic_errors(self): + T = TypeVar('T') + S = TypeVar('S') + with self.assertRaises(TypeError): + Generic[T]() + with self.assertRaises(TypeError): + Generic[T][T] + with self.assertRaises(TypeError): + Generic[T][S] + with self.assertRaises(TypeError): + isinstance([], List[int]) + with self.assertRaises(TypeError): + issubclass(list, List[int]) + with self.assertRaises(TypeError): + class NewGeneric(Generic): ... + with self.assertRaises(TypeError): + class MyGeneric(Generic[T], Generic[S]): ... + with self.assertRaises(TypeError): + class MyGeneric(List[T], Generic[S]): ... + + def test_init(self): + T = TypeVar('T') + S = TypeVar('S') + with self.assertRaises(TypeError): + Generic[T, T] + with self.assertRaises(TypeError): + Generic[T, S, T] + + def test_repr(self): + self.assertEqual(repr(SimpleMapping), + __name__ + '.' + 'SimpleMapping') + self.assertEqual(repr(MySimpleMapping), + __name__ + '.' + 'MySimpleMapping') + + def test_chain_repr(self): + T = TypeVar('T') + S = TypeVar('S') + + class C(Generic[T]): + pass + + X = C[Tuple[S, T]] + self.assertEqual(X, C[Tuple[S, T]]) + self.assertNotEqual(X, C[Tuple[T, S]]) + + Y = X[T, int] + self.assertEqual(Y, X[T, int]) + self.assertNotEqual(Y, X[S, int]) + self.assertNotEqual(Y, X[T, str]) + + Z = Y[str] + self.assertEqual(Z, Y[str]) + self.assertNotEqual(Z, Y[int]) + self.assertNotEqual(Z, Y[T]) + + self.assertTrue(str(Z).endswith( + '.C[typing.Tuple[str, int]]')) + + def test_new_repr(self): + T = TypeVar('T') + U = TypeVar('U', covariant=True) + S = TypeVar('S') + + self.assertEqual(repr(List), 'typing.List') + self.assertEqual(repr(List[T]), 'typing.List[~T]') + self.assertEqual(repr(List[U]), 'typing.List[+U]') + self.assertEqual(repr(List[S][T][int]), 'typing.List[int]') + self.assertEqual(repr(List[int]), 'typing.List[int]') + + def test_new_repr_complex(self): + T = TypeVar('T') + TS = TypeVar('TS') + + self.assertEqual(repr(typing.Mapping[T, TS][TS, T]), 'typing.Mapping[~TS, ~T]') + self.assertEqual(repr(List[Tuple[T, TS]][int, T]), + 'typing.List[typing.Tuple[int, ~T]]') + self.assertEqual( + repr(List[Tuple[T, T]][List[int]]), + 'typing.List[typing.Tuple[typing.List[int], typing.List[int]]]' + ) + + def test_new_repr_bare(self): + T = TypeVar('T') + self.assertEqual(repr(Generic[T]), 'typing.Generic[~T]') + self.assertEqual(repr(typing._Protocol[T]), 'typing.Protocol[~T]') + class C(typing.Dict[Any, Any]): ... + # this line should just work + repr(C.__mro__) + + def test_dict(self): + T = TypeVar('T') + + class B(Generic[T]): + pass + + b = B() + b.foo = 42 + self.assertEqual(b.__dict__, {'foo': 42}) + + class C(B[int]): + pass + + c = C() + c.bar = 'abc' + self.assertEqual(c.__dict__, {'bar': 'abc'}) + + def test_subscripted_generics_as_proxies(self): + T = TypeVar('T') + class C(Generic[T]): + x = 'def' + self.assertEqual(C[int].x, 'def') + self.assertEqual(C[C[int]].x, 'def') + C[C[int]].x = 'changed' + self.assertEqual(C.x, 'changed') + self.assertEqual(C[str].x, 'changed') + C[List[str]].z = 'new' + self.assertEqual(C.z, 'new') + self.assertEqual(C[Tuple[int]].z, 'new') + + self.assertEqual(C().x, 'changed') + self.assertEqual(C[Tuple[str]]().z, 'new') + + class D(C[T]): + pass + self.assertEqual(D[int].x, 'changed') + self.assertEqual(D.z, 'new') + D.z = 'from derived z' + D[int].x = 'from derived x' + self.assertEqual(C.x, 'changed') + self.assertEqual(C[int].z, 'new') + self.assertEqual(D.x, 'from derived x') + self.assertEqual(D[str].z, 'from derived z') + + def test_abc_registry_kept(self): + T = TypeVar('T') + class C(Generic[T]): ... + C.register(int) + self.assertIsInstance(1, C) + C[int] + self.assertIsInstance(1, C) + + def test_false_subclasses(self): + class MyMapping(MutableMapping[str, str]): pass + self.assertNotIsInstance({}, MyMapping) + self.assertNotIsSubclass(dict, MyMapping) + + def test_abc_bases(self): + class MM(MutableMapping[str, str]): + def __getitem__(self, k): + return None + def __setitem__(self, k, v): + pass + def __delitem__(self, k): + pass + def __iter__(self): + return iter(()) + def __len__(self): + return 0 + # this should just work + MM().update() + self.assertIsInstance(MM(), collections_abc.MutableMapping) + self.assertIsInstance(MM(), MutableMapping) + self.assertNotIsInstance(MM(), List) + self.assertNotIsInstance({}, MM) + + def test_multiple_bases(self): + class MM1(MutableMapping[str, str], collections_abc.MutableMapping): + pass + with self.assertRaises(TypeError): + # consistent MRO not possible + class MM2(collections_abc.MutableMapping, MutableMapping[str, str]): + pass + + def test_orig_bases(self): + T = TypeVar('T') + class C(typing.Dict[str, T]): ... + self.assertEqual(C.__orig_bases__, (typing.Dict[str, T],)) + + def test_naive_runtime_checks(self): + def naive_dict_check(obj, tp): + # Check if a dictionary conforms to Dict type + if len(tp.__parameters__) > 0: + raise NotImplementedError + if tp.__args__: + KT, VT = tp.__args__ + return all( + isinstance(k, KT) and isinstance(v, VT) + for k, v in obj.items() + ) + self.assertTrue(naive_dict_check({'x': 1}, typing.Dict[str, int])) + self.assertFalse(naive_dict_check({1: 'x'}, typing.Dict[str, int])) + with self.assertRaises(NotImplementedError): + naive_dict_check({1: 'x'}, typing.Dict[str, T]) + + def naive_generic_check(obj, tp): + # Check if an instance conforms to the generic class + if not hasattr(obj, '__orig_class__'): + raise NotImplementedError + return obj.__orig_class__ == tp + class Node(Generic[T]): ... + self.assertTrue(naive_generic_check(Node[int](), Node[int])) + self.assertFalse(naive_generic_check(Node[str](), Node[int])) + self.assertFalse(naive_generic_check(Node[str](), List)) + with self.assertRaises(NotImplementedError): + naive_generic_check([1, 2, 3], Node[int]) + + def naive_list_base_check(obj, tp): + # Check if list conforms to a List subclass + return all(isinstance(x, tp.__orig_bases__[0].__args__[0]) + for x in obj) + class C(List[int]): ... + self.assertTrue(naive_list_base_check([1, 2, 3], C)) + self.assertFalse(naive_list_base_check(['a', 'b'], C)) + + def test_multi_subscr_base(self): + T = TypeVar('T') + U = TypeVar('U') + V = TypeVar('V') + class C(List[T][U][V]): ... + class D(C, List[T][U][V]): ... + self.assertEqual(C.__parameters__, (V,)) + self.assertEqual(D.__parameters__, (V,)) + self.assertEqual(C[int].__parameters__, ()) + self.assertEqual(D[int].__parameters__, ()) + self.assertEqual(C[int].__args__, (int,)) + self.assertEqual(D[int].__args__, (int,)) + self.assertEqual(C.__bases__, (List,)) + self.assertEqual(D.__bases__, (C, List)) + self.assertEqual(C.__orig_bases__, (List[T][U][V],)) + self.assertEqual(D.__orig_bases__, (C, List[T][U][V])) + + def test_subscript_meta(self): + T = TypeVar('T') + self.assertEqual(Type[GenericMeta], Type[GenericMeta]) + self.assertEqual(Union[T, int][GenericMeta], Union[GenericMeta, int]) + self.assertEqual(Callable[..., GenericMeta].__args__, (Ellipsis, GenericMeta)) + + def test_generic_hashes(self): + try: + from test import mod_generics_cache + except ImportError: # for Python 3.4 and previous versions + import mod_generics_cache + class A(Generic[T]): + ... + + class B(Generic[T]): + class A(Generic[T]): + ... + + self.assertEqual(A, A) + self.assertEqual(mod_generics_cache.A[str], mod_generics_cache.A[str]) + self.assertEqual(B.A, B.A) + self.assertEqual(mod_generics_cache.B.A[B.A[str]], + mod_generics_cache.B.A[B.A[str]]) + + self.assertNotEqual(A, B.A) + self.assertNotEqual(A, mod_generics_cache.A) + self.assertNotEqual(A, mod_generics_cache.B.A) + self.assertNotEqual(B.A, mod_generics_cache.A) + self.assertNotEqual(B.A, mod_generics_cache.B.A) + + self.assertNotEqual(A[str], B.A[str]) + self.assertNotEqual(A[List[Any]], B.A[List[Any]]) + self.assertNotEqual(A[str], mod_generics_cache.A[str]) + self.assertNotEqual(A[str], mod_generics_cache.B.A[str]) + self.assertNotEqual(B.A[int], mod_generics_cache.A[int]) + self.assertNotEqual(B.A[List[Any]], mod_generics_cache.B.A[List[Any]]) + + self.assertNotEqual(Tuple[A[str]], Tuple[B.A[str]]) + self.assertNotEqual(Tuple[A[List[Any]]], Tuple[B.A[List[Any]]]) + self.assertNotEqual(Union[str, A[str]], Union[str, mod_generics_cache.A[str]]) + self.assertNotEqual(Union[A[str], A[str]], + Union[A[str], mod_generics_cache.A[str]]) + self.assertNotEqual(typing.FrozenSet[A[str]], + typing.FrozenSet[mod_generics_cache.B.A[str]]) + + if sys.version_info[:2] > (3, 2): + self.assertTrue(repr(Tuple[A[str]]).endswith('.A[str]]')) + self.assertTrue(repr(Tuple[B.A[str]]).endswith('.B.A[str]]')) + self.assertTrue(repr(Tuple[mod_generics_cache.A[str]]) + .endswith('mod_generics_cache.A[str]]')) + self.assertTrue(repr(Tuple[mod_generics_cache.B.A[str]]) + .endswith('mod_generics_cache.B.A[str]]')) + + def test_extended_generic_rules_eq(self): + T = TypeVar('T') + U = TypeVar('U') + self.assertEqual(Tuple[T, T][int], Tuple[int, int]) + self.assertEqual(typing.Iterable[Tuple[T, T]][T], typing.Iterable[Tuple[T, T]]) + with self.assertRaises(TypeError): + Tuple[T, int][()] + with self.assertRaises(TypeError): + Tuple[T, U][T, ...] + + self.assertEqual(Union[T, int][int], int) + self.assertEqual(Union[T, U][int, Union[int, str]], Union[int, str]) + class Base: ... + class Derived(Base): ... + self.assertEqual(Union[T, Base][Derived], Base) + with self.assertRaises(TypeError): + Union[T, int][1] + + self.assertEqual(Callable[[T], T][KT], Callable[[KT], KT]) + self.assertEqual(Callable[..., List[T]][int], Callable[..., List[int]]) + with self.assertRaises(TypeError): + Callable[[T], U][..., int] + with self.assertRaises(TypeError): + Callable[[T], U][[], int] + + def test_extended_generic_rules_repr(self): + T = TypeVar('T') + self.assertEqual(repr(Union[Tuple, Callable]).replace('typing.', ''), + 'Union[Tuple, Callable]') + self.assertEqual(repr(Union[Tuple, Tuple[int]]).replace('typing.', ''), + 'Tuple') + self.assertEqual(repr(Callable[..., Optional[T]][int]).replace('typing.', ''), + 'Callable[..., Union[int, NoneType]]') + self.assertEqual(repr(Callable[[], List[T]][int]).replace('typing.', ''), + 'Callable[[], List[int]]') + + def test_generic_forward_ref(self): + def foobar(x: List[List['CC']]): ... + class CC: ... + self.assertEqual( + get_type_hints(foobar, globals(), locals()), + {'x': List[List[CC]]} + ) + T = TypeVar('T') + AT = Tuple[T, ...] + def barfoo(x: AT): ... + self.assertIs(get_type_hints(barfoo, globals(), locals())['x'], AT) + CT = Callable[..., List[T]] + def barfoo2(x: CT): ... + self.assertIs(get_type_hints(barfoo2, globals(), locals())['x'], CT) + + def test_extended_generic_rules_subclassing(self): + class T1(Tuple[T, KT]): ... + class T2(Tuple[T, ...]): ... + class C1(Callable[[T], T]): ... + class C2(Callable[..., int]): + def __call__(self): + return None + + self.assertEqual(T1.__parameters__, (T, KT)) + self.assertEqual(T1[int, str].__args__, (int, str)) + self.assertEqual(T1[int, T].__origin__, T1) + + self.assertEqual(T2.__parameters__, (T,)) + with self.assertRaises(TypeError): + T1[int] + with self.assertRaises(TypeError): + T2[int, str] + + self.assertEqual(repr(C1[int]).split('.')[-1], 'C1[int]') + self.assertEqual(C2.__parameters__, ()) + self.assertIsInstance(C2(), collections_abc.Callable) + self.assertIsSubclass(C2, collections_abc.Callable) + self.assertIsSubclass(C1, collections_abc.Callable) + self.assertIsInstance(T1(), tuple) + self.assertIsSubclass(T2, tuple) + self.assertIsSubclass(Tuple[int, ...], typing.Sequence) + self.assertIsSubclass(Tuple[int, ...], typing.Iterable) + + def test_fail_with_bare_union(self): + with self.assertRaises(TypeError): + List[Union] + with self.assertRaises(TypeError): + Tuple[Optional] + with self.assertRaises(TypeError): + ClassVar[ClassVar] + with self.assertRaises(TypeError): + List[ClassVar[int]] + + def test_fail_with_bare_generic(self): + T = TypeVar('T') + with self.assertRaises(TypeError): + List[Generic] + with self.assertRaises(TypeError): + Tuple[Generic[T]] + with self.assertRaises(TypeError): + List[typing._Protocol] + with self.assertRaises(TypeError): + isinstance(1, Generic) + + def test_type_erasure_special(self): + T = TypeVar('T') + # this is the only test that checks type caching + self.clear_caches() + class MyTup(Tuple[T, T]): ... + self.assertIs(MyTup[int]().__class__, MyTup) + self.assertIs(MyTup[int]().__orig_class__, MyTup[int]) + class MyCall(Callable[..., T]): + def __call__(self): return None + self.assertIs(MyCall[T]().__class__, MyCall) + self.assertIs(MyCall[T]().__orig_class__, MyCall[T]) + class MyDict(typing.Dict[T, T]): ... + self.assertIs(MyDict[int]().__class__, MyDict) + self.assertIs(MyDict[int]().__orig_class__, MyDict[int]) + class MyDef(typing.DefaultDict[str, T]): ... + self.assertIs(MyDef[int]().__class__, MyDef) + self.assertIs(MyDef[int]().__orig_class__, MyDef[int]) + # ChainMap was added in 3.3 + if sys.version_info >= (3, 3): + class MyChain(typing.ChainMap[str, T]): ... + self.assertIs(MyChain[int]().__class__, MyChain) + self.assertIs(MyChain[int]().__orig_class__, MyChain[int]) + + def test_all_repr_eq_any(self): + objs = (getattr(typing, el) for el in typing.__all__) + for obj in objs: + self.assertNotEqual(repr(obj), '') + self.assertEqual(obj, obj) + if getattr(obj, '__parameters__', None) and len(obj.__parameters__) == 1: + self.assertEqual(obj[Any].__args__, (Any,)) + if isinstance(obj, type): + for base in obj.__mro__: + self.assertNotEqual(repr(base), '') + self.assertEqual(base, base) + + def test_substitution_helper(self): + T = TypeVar('T') + KT = TypeVar('KT') + VT = TypeVar('VT') + class Map(Generic[KT, VT]): + def meth(self, k: KT, v: VT): ... + StrMap = Map[str, T] + obj = StrMap[int]() + + new_args = typing._subs_tree(obj.__orig_class__) + new_annots = {k: typing._replace_arg(v, type(obj).__parameters__, new_args) + for k, v in obj.meth.__annotations__.items()} + + self.assertEqual(new_annots, {'k': str, 'v': int}) + + def test_pickle(self): + global C # pickle wants to reference the class by name + T = TypeVar('T') + + class B(Generic[T]): + pass + + class C(B[int]): + pass + + c = C() + c.foo = 42 + c.bar = 'abc' + for proto in range(pickle.HIGHEST_PROTOCOL + 1): + z = pickle.dumps(c, proto) + x = pickle.loads(z) + self.assertEqual(x.foo, 42) + self.assertEqual(x.bar, 'abc') + self.assertEqual(x.__dict__, {'foo': 42, 'bar': 'abc'}) + simples = [Any, Union, Tuple, Callable, ClassVar, List, typing.Iterable] + for s in simples: + for proto in range(pickle.HIGHEST_PROTOCOL + 1): + z = pickle.dumps(s, proto) + x = pickle.loads(z) + self.assertEqual(s, x) + + def test_copy_and_deepcopy(self): + T = TypeVar('T') + class Node(Generic[T]): ... + things = [Union[T, int], Tuple[T, int], Callable[..., T], Callable[[int], int], + Tuple[Any, Any], Node[T], Node[int], Node[Any], typing.Iterable[T], + typing.Iterable[Any], typing.Iterable[int], typing.Dict[int, str], + typing.Dict[T, Any], ClassVar[int], ClassVar[List[T]], Tuple['T', 'T'], + Union['T', int], List['T'], typing.Mapping['T', int]] + for t in things + [Any]: + self.assertEqual(t, copy(t)) + self.assertEqual(t, deepcopy(t)) + + def test_weakref_all(self): + T = TypeVar('T') + things = [Any, Union[T, int], Callable[..., T], Tuple[Any, Any], + Optional[List[int]], typing.Mapping[int, str], + typing.re.Match[bytes], typing.Iterable['whatever']] + for t in things: + self.assertEqual(weakref.ref(t)(), t) + + def test_parameterized_slots(self): + T = TypeVar('T') + class C(Generic[T]): + __slots__ = ('potato',) + + c = C() + c_int = C[int]() + self.assertEqual(C.__slots__, C[str].__slots__) + + c.potato = 0 + c_int.potato = 0 + with self.assertRaises(AttributeError): + c.tomato = 0 + with self.assertRaises(AttributeError): + c_int.tomato = 0 + + def foo(x: C['C']): ... + self.assertEqual(get_type_hints(foo, globals(), locals())['x'], C[C]) + self.assertEqual(get_type_hints(foo, globals(), locals())['x'].__slots__, + C.__slots__) + self.assertEqual(copy(C[int]), deepcopy(C[int])) + + def test_parameterized_slots_dict(self): + T = TypeVar('T') + class D(Generic[T]): + __slots__ = {'banana': 42} + + d = D() + d_int = D[int]() + self.assertEqual(D.__slots__, D[str].__slots__) + + d.banana = 'yes' + d_int.banana = 'yes' + with self.assertRaises(AttributeError): + d.foobar = 'no' + with self.assertRaises(AttributeError): + d_int.foobar = 'no' + + def test_errors(self): + with self.assertRaises(TypeError): + B = SimpleMapping[XK, Any] + + class C(Generic[B]): + pass + + def test_repr_2(self): + PY32 = sys.version_info[:2] < (3, 3) + + class C(Generic[T]): + pass + + self.assertEqual(C.__module__, __name__) + if not PY32: + self.assertEqual(C.__qualname__, + 'GenericTests.test_repr_2..C') + self.assertEqual(repr(C).split('.')[-1], 'C') + X = C[int] + self.assertEqual(X.__module__, __name__) + if not PY32: + self.assertTrue(X.__qualname__.endswith('..C')) + self.assertEqual(repr(X).split('.')[-1], 'C[int]') + + class Y(C[int]): + pass + + self.assertEqual(Y.__module__, __name__) + if not PY32: + self.assertEqual(Y.__qualname__, + 'GenericTests.test_repr_2..Y') + self.assertEqual(repr(Y).split('.')[-1], 'Y') + + def test_eq_1(self): + self.assertEqual(Generic, Generic) + self.assertEqual(Generic[T], Generic[T]) + self.assertNotEqual(Generic[KT], Generic[VT]) + + def test_eq_2(self): + + class A(Generic[T]): + pass + + class B(Generic[T]): + pass + + self.assertEqual(A, A) + self.assertNotEqual(A, B) + self.assertEqual(A[T], A[T]) + self.assertNotEqual(A[T], B[T]) + + def test_multiple_inheritance(self): + + class A(Generic[T, VT]): + pass + + class B(Generic[KT, T]): + pass + + class C(A[T, VT], Generic[VT, T, KT], B[KT, T]): + pass + + self.assertEqual(C.__parameters__, (VT, T, KT)) + + def test_nested(self): + + G = Generic + + class Visitor(G[T]): + + a = None + + def set(self, a: T): + self.a = a + + def get(self): + return self.a + + def visit(self) -> T: + return self.a + + V = Visitor[typing.List[int]] + + class IntListVisitor(V): + + def append(self, x: int): + self.a.append(x) + + a = IntListVisitor() + a.set([]) + a.append(1) + a.append(42) + self.assertEqual(a.get(), [1, 42]) + + def test_type_erasure(self): + T = TypeVar('T') + + class Node(Generic[T]): + def __init__(self, label: T, + left: 'Node[T]' = None, + right: 'Node[T]' = None): + self.label = label # type: T + self.left = left # type: Optional[Node[T]] + self.right = right # type: Optional[Node[T]] + + def foo(x: T): + a = Node(x) + b = Node[T](x) + c = Node[Any](x) + self.assertIs(type(a), Node) + self.assertIs(type(b), Node) + self.assertIs(type(c), Node) + self.assertEqual(a.label, x) + self.assertEqual(b.label, x) + self.assertEqual(c.label, x) + + foo(42) + + def test_implicit_any(self): + T = TypeVar('T') + + class C(Generic[T]): + pass + + class D(C): + pass + + self.assertEqual(D.__parameters__, ()) + + with self.assertRaises(Exception): + D[int] + with self.assertRaises(Exception): + D[Any] + with self.assertRaises(Exception): + D[T] + + +class ClassVarTests(BaseTestCase): + + def test_basics(self): + with self.assertRaises(TypeError): + ClassVar[1] + with self.assertRaises(TypeError): + ClassVar[int, str] + with self.assertRaises(TypeError): + ClassVar[int][str] + + def test_repr(self): + self.assertEqual(repr(ClassVar), 'typing.ClassVar') + cv = ClassVar[int] + self.assertEqual(repr(cv), 'typing.ClassVar[int]') + cv = ClassVar[Employee] + self.assertEqual(repr(cv), 'typing.ClassVar[%s.Employee]' % __name__) + + def test_cannot_subclass(self): + with self.assertRaises(TypeError): + class C(type(ClassVar)): + pass + with self.assertRaises(TypeError): + class C(type(ClassVar[int])): + pass + + def test_cannot_init(self): + with self.assertRaises(TypeError): + ClassVar() + with self.assertRaises(TypeError): + type(ClassVar)() + with self.assertRaises(TypeError): + type(ClassVar[Optional[int]])() + + def test_no_isinstance(self): + with self.assertRaises(TypeError): + isinstance(1, ClassVar[int]) + with self.assertRaises(TypeError): + issubclass(int, ClassVar) + + +class CastTests(BaseTestCase): + + def test_basics(self): + self.assertEqual(cast(int, 42), 42) + self.assertEqual(cast(float, 42), 42) + self.assertIs(type(cast(float, 42)), int) + self.assertEqual(cast(Any, 42), 42) + self.assertEqual(cast(list, 42), 42) + self.assertEqual(cast(Union[str, float], 42), 42) + self.assertEqual(cast(AnyStr, 42), 42) + self.assertEqual(cast(None, 42), 42) + + def test_errors(self): + # Bogus calls are not expected to fail. + cast(42, 42) + cast('hello', 42) + + +class ForwardRefTests(BaseTestCase): + + def test_basics(self): + + class Node(Generic[T]): + + def __init__(self, label: T): + self.label = label + self.left = self.right = None + + def add_both(self, + left: 'Optional[Node[T]]', + right: 'Node[T]' = None, + stuff: int = None, + blah=None): + self.left = left + self.right = right + + def add_left(self, node: Optional['Node[T]']): + self.add_both(node, None) + + def add_right(self, node: 'Node[T]' = None): + self.add_both(None, node) + + t = Node[int] + both_hints = get_type_hints(t.add_both, globals(), locals()) + self.assertEqual(both_hints['left'], Optional[Node[T]]) + self.assertEqual(both_hints['right'], Optional[Node[T]]) + self.assertEqual(both_hints['left'], both_hints['right']) + self.assertEqual(both_hints['stuff'], Optional[int]) + self.assertNotIn('blah', both_hints) + + left_hints = get_type_hints(t.add_left, globals(), locals()) + self.assertEqual(left_hints['node'], Optional[Node[T]]) + + right_hints = get_type_hints(t.add_right, globals(), locals()) + self.assertEqual(right_hints['node'], Optional[Node[T]]) + + def test_forwardref_instance_type_error(self): + fr = typing._ForwardRef('int') + with self.assertRaises(TypeError): + isinstance(42, fr) + + def test_forwardref_subclass_type_error(self): + fr = typing._ForwardRef('int') + with self.assertRaises(TypeError): + issubclass(int, fr) + + def test_forward_equality(self): + fr = typing._ForwardRef('int') + self.assertEqual(fr, typing._ForwardRef('int')) + self.assertNotEqual(List['int'], List[int]) + + def test_forward_repr(self): + self.assertEqual(repr(List['int']), "typing.List[_ForwardRef('int')]") + + def test_union_forward(self): + + def foo(a: Union['T']): + pass + + self.assertEqual(get_type_hints(foo, globals(), locals()), + {'a': Union[T]}) + + def test_tuple_forward(self): + + def foo(a: Tuple['T']): + pass + + self.assertEqual(get_type_hints(foo, globals(), locals()), + {'a': Tuple[T]}) + + def test_callable_forward(self): + + def foo(a: Callable[['T'], 'T']): + pass + + self.assertEqual(get_type_hints(foo, globals(), locals()), + {'a': Callable[[T], T]}) + + def test_callable_with_ellipsis_forward(self): + + def foo(a: 'Callable[..., T]'): + pass + + self.assertEqual(get_type_hints(foo, globals(), locals()), + {'a': Callable[..., T]}) + + def test_syntax_error(self): + + with self.assertRaises(SyntaxError): + Generic['/T'] + + def test_delayed_syntax_error(self): + + def foo(a: 'Node[T'): + pass + + with self.assertRaises(SyntaxError): + get_type_hints(foo) + + def test_type_error(self): + + def foo(a: Tuple['42']): + pass + + with self.assertRaises(TypeError): + get_type_hints(foo) + + def test_name_error(self): + + def foo(a: 'Noode[T]'): + pass + + with self.assertRaises(NameError): + get_type_hints(foo, locals()) + + def test_no_type_check(self): + + @no_type_check + def foo(a: 'whatevers') -> {}: + pass + + th = get_type_hints(foo) + self.assertEqual(th, {}) + + def test_no_type_check_class(self): + + @no_type_check + class C: + def foo(a: 'whatevers') -> {}: + pass + + cth = get_type_hints(C.foo) + self.assertEqual(cth, {}) + ith = get_type_hints(C().foo) + self.assertEqual(ith, {}) + + def test_no_type_check_no_bases(self): + class C: + def meth(self, x: int): ... + @no_type_check + class D(C): + c = C + # verify that @no_type_check never affects bases + self.assertEqual(get_type_hints(C.meth), {'x': int}) + + def test_meta_no_type_check(self): + + @no_type_check_decorator + def magic_decorator(deco): + return deco + + self.assertEqual(magic_decorator.__name__, 'magic_decorator') + + @magic_decorator + def foo(a: 'whatevers') -> {}: + pass + + @magic_decorator + class C: + def foo(a: 'whatevers') -> {}: + pass + + self.assertEqual(foo.__name__, 'foo') + th = get_type_hints(foo) + self.assertEqual(th, {}) + cth = get_type_hints(C.foo) + self.assertEqual(cth, {}) + ith = get_type_hints(C().foo) + self.assertEqual(ith, {}) + + def test_default_globals(self): + code = ("class C:\n" + " def foo(self, a: 'C') -> 'D': pass\n" + "class D:\n" + " def bar(self, b: 'D') -> C: pass\n" + ) + ns = {} + exec(code, ns) + hints = get_type_hints(ns['C'].foo) + self.assertEqual(hints, {'a': ns['C'], 'return': ns['D']}) + + +class OverloadTests(BaseTestCase): + + def test_overload_fails(self): + from typing import overload + + with self.assertRaises(RuntimeError): + + @overload + def blah(): + pass + + blah() + + def test_overload_succeeds(self): + from typing import overload + + @overload + def blah(): + pass + + def blah(): + pass + + blah() + + +ASYNCIO = sys.version_info[:2] >= (3, 5) + +ASYNCIO_TESTS = """ +import asyncio + +T_a = TypeVar('T_a') + +class AwaitableWrapper(typing.Awaitable[T_a]): + + def __init__(self, value): + self.value = value + + def __await__(self) -> typing.Iterator[T_a]: + yield + return self.value + +class AsyncIteratorWrapper(typing.AsyncIterator[T_a]): + + def __init__(self, value: typing.Iterable[T_a]): + self.value = value + + def __aiter__(self) -> typing.AsyncIterator[T_a]: + return self + + @asyncio.coroutine + def __anext__(self) -> T_a: + data = yield from self.value + if data: + return data + else: + raise StopAsyncIteration +""" + +if ASYNCIO: + try: + exec(ASYNCIO_TESTS) + except ImportError: + ASYNCIO = False +else: + # fake names for the sake of static analysis + asyncio = None + AwaitableWrapper = AsyncIteratorWrapper = object + +PY36 = sys.version_info[:2] >= (3, 6) + +PY36_TESTS = """ +from test import ann_module, ann_module2, ann_module3 + +class A: + y: float +class B(A): + x: ClassVar[Optional['B']] = None + y: int + b: int +class CSub(B): + z: ClassVar['CSub'] = B() +class G(Generic[T]): + lst: ClassVar[List[T]] = [] + +class NoneAndForward: + parent: 'NoneAndForward' + meaning: None + +class CoolEmployee(NamedTuple): + name: str + cool: int + +class CoolEmployeeWithDefault(NamedTuple): + name: str + cool: int = 0 + +class XMeth(NamedTuple): + x: int + def double(self): + return 2 * self.x + +class XRepr(NamedTuple): + x: int + y: int = 1 + def __str__(self): + return f'{self.x} -> {self.y}' + def __add__(self, other): + return 0 +""" + +if PY36: + exec(PY36_TESTS) +else: + # fake names for the sake of static analysis + ann_module = ann_module2 = ann_module3 = None + A = B = CSub = G = CoolEmployee = CoolEmployeeWithDefault = object + XMeth = XRepr = NoneAndForward = object + +gth = get_type_hints + + +class GetTypeHintTests(BaseTestCase): + def test_get_type_hints_from_various_objects(self): + # For invalid objects should fail with TypeError (not AttributeError etc). + with self.assertRaises(TypeError): + gth(123) + with self.assertRaises(TypeError): + gth('abc') + with self.assertRaises(TypeError): + gth(None) + + @skipUnless(PY36, 'Python 3.6 required') + def test_get_type_hints_modules(self): + ann_module_type_hints = {1: 2, 'f': Tuple[int, int], 'x': int, 'y': str} + self.assertEqual(gth(ann_module), ann_module_type_hints) + self.assertEqual(gth(ann_module2), {}) + self.assertEqual(gth(ann_module3), {}) + + @skipUnless(PY36, 'Python 3.6 required') + def test_get_type_hints_classes(self): + self.assertEqual(gth(ann_module.C, ann_module.__dict__), + {'y': Optional[ann_module.C]}) + self.assertIsInstance(gth(ann_module.j_class), dict) + self.assertEqual(gth(ann_module.M), {'123': 123, 'o': type}) + self.assertEqual(gth(ann_module.D), + {'j': str, 'k': str, 'y': Optional[ann_module.C]}) + self.assertEqual(gth(ann_module.Y), {'z': int}) + self.assertEqual(gth(ann_module.h_class), + {'y': Optional[ann_module.C]}) + self.assertEqual(gth(ann_module.S), {'x': str, 'y': str}) + self.assertEqual(gth(ann_module.foo), {'x': int}) + self.assertEqual(gth(NoneAndForward, globals()), + {'parent': NoneAndForward, 'meaning': type(None)}) + + @skipUnless(PY36, 'Python 3.6 required') + def test_respect_no_type_check(self): + @no_type_check + class NoTpCheck: + class Inn: + def __init__(self, x: 'not a type'): ... + self.assertTrue(NoTpCheck.__no_type_check__) + self.assertTrue(NoTpCheck.Inn.__init__.__no_type_check__) + self.assertEqual(gth(ann_module2.NTC.meth), {}) + class ABase(Generic[T]): + def meth(x: int): ... + @no_type_check + class Der(ABase): ... + self.assertEqual(gth(ABase.meth), {'x': int}) + + def test_get_type_hints_for_builtins(self): + # Should not fail for built-in classes and functions. + self.assertEqual(gth(int), {}) + self.assertEqual(gth(type), {}) + self.assertEqual(gth(dir), {}) + self.assertEqual(gth(len), {}) + self.assertEqual(gth(object.__str__), {}) + self.assertEqual(gth(object().__str__), {}) + self.assertEqual(gth(str.join), {}) + + def test_previous_behavior(self): + def testf(x, y): ... + testf.__annotations__['x'] = 'int' + self.assertEqual(gth(testf), {'x': int}) + def testg(x: None): ... + self.assertEqual(gth(testg), {'x': type(None)}) + + def test_get_type_hints_for_object_with_annotations(self): + class A: ... + class B: ... + b = B() + b.__annotations__ = {'x': 'A'} + self.assertEqual(gth(b, locals()), {'x': A}) + + @skipUnless(PY36, 'Python 3.6 required') + def test_get_type_hints_ClassVar(self): + self.assertEqual(gth(ann_module2.CV, ann_module2.__dict__), + {'var': typing.ClassVar[ann_module2.CV]}) + self.assertEqual(gth(B, globals()), + {'y': int, 'x': ClassVar[Optional[B]], 'b': int}) + self.assertEqual(gth(CSub, globals()), + {'z': ClassVar[CSub], 'y': int, 'b': int, + 'x': ClassVar[Optional[B]]}) + self.assertEqual(gth(G), {'lst': ClassVar[List[T]]}) + + +class CollectionsAbcTests(BaseTestCase): + + def test_hashable(self): + self.assertIsInstance(42, typing.Hashable) + self.assertNotIsInstance([], typing.Hashable) + + def test_iterable(self): + self.assertIsInstance([], typing.Iterable) + # Due to ABC caching, the second time takes a separate code + # path and could fail. So call this a few times. + self.assertIsInstance([], typing.Iterable) + self.assertIsInstance([], typing.Iterable) + self.assertNotIsInstance(42, typing.Iterable) + # Just in case, also test issubclass() a few times. + self.assertIsSubclass(list, typing.Iterable) + self.assertIsSubclass(list, typing.Iterable) + + def test_iterator(self): + it = iter([]) + self.assertIsInstance(it, typing.Iterator) + self.assertNotIsInstance(42, typing.Iterator) + + @skipUnless(ASYNCIO, 'Python 3.5 and multithreading required') + def test_awaitable(self): + ns = {} + exec( + "async def foo() -> typing.Awaitable[int]:\n" + " return await AwaitableWrapper(42)\n", + globals(), ns) + foo = ns['foo'] + g = foo() + self.assertIsInstance(g, typing.Awaitable) + self.assertNotIsInstance(foo, typing.Awaitable) + g.send(None) # Run foo() till completion, to avoid warning. + + @skipUnless(ASYNCIO, 'Python 3.5 and multithreading required') + def test_coroutine(self): + ns = {} + exec( + "async def foo():\n" + " return\n", + globals(), ns) + foo = ns['foo'] + g = foo() + self.assertIsInstance(g, typing.Coroutine) + with self.assertRaises(TypeError): + isinstance(g, typing.Coroutine[int]) + self.assertNotIsInstance(foo, typing.Coroutine) + try: + g.send(None) + except StopIteration: + pass + + @skipUnless(ASYNCIO, 'Python 3.5 and multithreading required') + def test_async_iterable(self): + base_it = range(10) # type: Iterator[int] + it = AsyncIteratorWrapper(base_it) + self.assertIsInstance(it, typing.AsyncIterable) + self.assertIsInstance(it, typing.AsyncIterable) + self.assertNotIsInstance(42, typing.AsyncIterable) + + @skipUnless(ASYNCIO, 'Python 3.5 and multithreading required') + def test_async_iterator(self): + base_it = range(10) # type: Iterator[int] + it = AsyncIteratorWrapper(base_it) + self.assertIsInstance(it, typing.AsyncIterator) + self.assertNotIsInstance(42, typing.AsyncIterator) + + def test_sized(self): + self.assertIsInstance([], typing.Sized) + self.assertNotIsInstance(42, typing.Sized) + + def test_container(self): + self.assertIsInstance([], typing.Container) + self.assertNotIsInstance(42, typing.Container) + + def test_collection(self): + if hasattr(typing, 'Collection'): + self.assertIsInstance(tuple(), typing.Collection) + self.assertIsInstance(frozenset(), typing.Collection) + self.assertIsSubclass(dict, typing.Collection) + self.assertNotIsInstance(42, typing.Collection) + + def test_abstractset(self): + self.assertIsInstance(set(), typing.AbstractSet) + self.assertNotIsInstance(42, typing.AbstractSet) + + def test_mutableset(self): + self.assertIsInstance(set(), typing.MutableSet) + self.assertNotIsInstance(frozenset(), typing.MutableSet) + + def test_mapping(self): + self.assertIsInstance({}, typing.Mapping) + self.assertNotIsInstance(42, typing.Mapping) + + def test_mutablemapping(self): + self.assertIsInstance({}, typing.MutableMapping) + self.assertNotIsInstance(42, typing.MutableMapping) + + def test_sequence(self): + self.assertIsInstance([], typing.Sequence) + self.assertNotIsInstance(42, typing.Sequence) + + def test_mutablesequence(self): + self.assertIsInstance([], typing.MutableSequence) + self.assertNotIsInstance((), typing.MutableSequence) + + def test_bytestring(self): + self.assertIsInstance(b'', typing.ByteString) + self.assertIsInstance(bytearray(b''), typing.ByteString) + + def test_list(self): + self.assertIsSubclass(list, typing.List) + + def test_deque(self): + self.assertIsSubclass(collections.deque, typing.Deque) + class MyDeque(typing.Deque[int]): ... + self.assertIsInstance(MyDeque(), collections.deque) + + def test_counter(self): + self.assertIsSubclass(collections.Counter, typing.Counter) + + def test_set(self): + self.assertIsSubclass(set, typing.Set) + self.assertNotIsSubclass(frozenset, typing.Set) + + def test_frozenset(self): + self.assertIsSubclass(frozenset, typing.FrozenSet) + self.assertNotIsSubclass(set, typing.FrozenSet) + + def test_dict(self): + self.assertIsSubclass(dict, typing.Dict) + + def test_no_list_instantiation(self): + with self.assertRaises(TypeError): + typing.List() + with self.assertRaises(TypeError): + typing.List[T]() + with self.assertRaises(TypeError): + typing.List[int]() + + def test_list_subclass(self): + + class MyList(typing.List[int]): + pass + + a = MyList() + self.assertIsInstance(a, MyList) + self.assertIsInstance(a, typing.Sequence) + + self.assertIsSubclass(MyList, list) + self.assertNotIsSubclass(list, MyList) + + def test_no_dict_instantiation(self): + with self.assertRaises(TypeError): + typing.Dict() + with self.assertRaises(TypeError): + typing.Dict[KT, VT]() + with self.assertRaises(TypeError): + typing.Dict[str, int]() + + def test_dict_subclass(self): + + class MyDict(typing.Dict[str, int]): + pass + + d = MyDict() + self.assertIsInstance(d, MyDict) + self.assertIsInstance(d, typing.MutableMapping) + + self.assertIsSubclass(MyDict, dict) + self.assertNotIsSubclass(dict, MyDict) + + def test_defaultdict_instantiation(self): + self.assertIs(type(typing.DefaultDict()), collections.defaultdict) + self.assertIs(type(typing.DefaultDict[KT, VT]()), collections.defaultdict) + self.assertIs(type(typing.DefaultDict[str, int]()), collections.defaultdict) + + def test_defaultdict_subclass(self): + + class MyDefDict(typing.DefaultDict[str, int]): + pass + + dd = MyDefDict() + self.assertIsInstance(dd, MyDefDict) + + self.assertIsSubclass(MyDefDict, collections.defaultdict) + self.assertNotIsSubclass(collections.defaultdict, MyDefDict) + + @skipUnless(sys.version_info >= (3, 3), 'ChainMap was added in 3.3') + def test_chainmap_instantiation(self): + self.assertIs(type(typing.ChainMap()), collections.ChainMap) + self.assertIs(type(typing.ChainMap[KT, VT]()), collections.ChainMap) + self.assertIs(type(typing.ChainMap[str, int]()), collections.ChainMap) + class CM(typing.ChainMap[KT, VT]): ... + self.assertIs(type(CM[int, str]()), CM) + + @skipUnless(sys.version_info >= (3, 3), 'ChainMap was added in 3.3') + def test_chainmap_subclass(self): + + class MyChainMap(typing.ChainMap[str, int]): + pass + + cm = MyChainMap() + self.assertIsInstance(cm, MyChainMap) + + self.assertIsSubclass(MyChainMap, collections.ChainMap) + self.assertNotIsSubclass(collections.ChainMap, MyChainMap) + + def test_deque_instantiation(self): + self.assertIs(type(typing.Deque()), collections.deque) + self.assertIs(type(typing.Deque[T]()), collections.deque) + self.assertIs(type(typing.Deque[int]()), collections.deque) + class D(typing.Deque[T]): ... + self.assertIs(type(D[int]()), D) + + def test_counter_instantiation(self): + self.assertIs(type(typing.Counter()), collections.Counter) + self.assertIs(type(typing.Counter[T]()), collections.Counter) + self.assertIs(type(typing.Counter[int]()), collections.Counter) + class C(typing.Counter[T]): ... + self.assertIs(type(C[int]()), C) + + def test_counter_subclass_instantiation(self): + + class MyCounter(typing.Counter[int]): + pass + + d = MyCounter() + self.assertIsInstance(d, MyCounter) + self.assertIsInstance(d, typing.Counter) + self.assertIsInstance(d, collections.Counter) + + def test_no_set_instantiation(self): + with self.assertRaises(TypeError): + typing.Set() + with self.assertRaises(TypeError): + typing.Set[T]() + with self.assertRaises(TypeError): + typing.Set[int]() + + def test_set_subclass_instantiation(self): + + class MySet(typing.Set[int]): + pass + + d = MySet() + self.assertIsInstance(d, MySet) + + def test_no_frozenset_instantiation(self): + with self.assertRaises(TypeError): + typing.FrozenSet() + with self.assertRaises(TypeError): + typing.FrozenSet[T]() + with self.assertRaises(TypeError): + typing.FrozenSet[int]() + + def test_frozenset_subclass_instantiation(self): + + class MyFrozenSet(typing.FrozenSet[int]): + pass + + d = MyFrozenSet() + self.assertIsInstance(d, MyFrozenSet) + + def test_no_tuple_instantiation(self): + with self.assertRaises(TypeError): + Tuple() + with self.assertRaises(TypeError): + Tuple[T]() + with self.assertRaises(TypeError): + Tuple[int]() + + def test_generator(self): + def foo(): + yield 42 + g = foo() + self.assertIsSubclass(type(g), typing.Generator) + + def test_no_generator_instantiation(self): + with self.assertRaises(TypeError): + typing.Generator() + with self.assertRaises(TypeError): + typing.Generator[T, T, T]() + with self.assertRaises(TypeError): + typing.Generator[int, int, int]() + + @skipUnless(PY36, 'Python 3.6 required') + def test_async_generator(self): + ns = {} + exec("async def f():\n" + " yield 42\n", globals(), ns) + g = ns['f']() + self.assertIsSubclass(type(g), typing.AsyncGenerator) + + @skipUnless(PY36, 'Python 3.6 required') + def test_no_async_generator_instantiation(self): + with self.assertRaises(TypeError): + typing.AsyncGenerator() + with self.assertRaises(TypeError): + typing.AsyncGenerator[T, T]() + with self.assertRaises(TypeError): + typing.AsyncGenerator[int, int]() + + def test_subclassing(self): + + class MMA(typing.MutableMapping): + pass + + with self.assertRaises(TypeError): # It's abstract + MMA() + + class MMC(MMA): + def __getitem__(self, k): + return None + def __setitem__(self, k, v): + pass + def __delitem__(self, k): + pass + def __iter__(self): + return iter(()) + def __len__(self): + return 0 + + self.assertEqual(len(MMC()), 0) + assert callable(MMC.update) + self.assertIsInstance(MMC(), typing.Mapping) + + class MMB(typing.MutableMapping[KT, VT]): + def __getitem__(self, k): + return None + def __setitem__(self, k, v): + pass + def __delitem__(self, k): + pass + def __iter__(self): + return iter(()) + def __len__(self): + return 0 + + self.assertEqual(len(MMB()), 0) + self.assertEqual(len(MMB[str, str]()), 0) + self.assertEqual(len(MMB[KT, VT]()), 0) + + self.assertNotIsSubclass(dict, MMA) + self.assertNotIsSubclass(dict, MMB) + + self.assertIsSubclass(MMA, typing.Mapping) + self.assertIsSubclass(MMB, typing.Mapping) + self.assertIsSubclass(MMC, typing.Mapping) + + self.assertIsInstance(MMB[KT, VT](), typing.Mapping) + self.assertIsInstance(MMB[KT, VT](), collections.Mapping) + + self.assertIsSubclass(MMA, collections.Mapping) + self.assertIsSubclass(MMB, collections.Mapping) + self.assertIsSubclass(MMC, collections.Mapping) + + self.assertIsSubclass(MMB[str, str], typing.Mapping) + self.assertIsSubclass(MMC, MMA) + + class I(typing.Iterable): ... + self.assertNotIsSubclass(list, I) + + class G(typing.Generator[int, int, int]): ... + def g(): yield 0 + self.assertIsSubclass(G, typing.Generator) + self.assertIsSubclass(G, typing.Iterable) + if hasattr(collections, 'Generator'): + self.assertIsSubclass(G, collections.Generator) + self.assertIsSubclass(G, collections.Iterable) + self.assertNotIsSubclass(type(g), G) + + @skipUnless(PY36, 'Python 3.6 required') + def test_subclassing_async_generator(self): + class G(typing.AsyncGenerator[int, int]): + def asend(self, value): + pass + def athrow(self, typ, val=None, tb=None): + pass + + ns = {} + exec('async def g(): yield 0', globals(), ns) + g = ns['g'] + self.assertIsSubclass(G, typing.AsyncGenerator) + self.assertIsSubclass(G, typing.AsyncIterable) + self.assertIsSubclass(G, collections.AsyncGenerator) + self.assertIsSubclass(G, collections.AsyncIterable) + self.assertNotIsSubclass(type(g), G) + + instance = G() + self.assertIsInstance(instance, typing.AsyncGenerator) + self.assertIsInstance(instance, typing.AsyncIterable) + self.assertIsInstance(instance, collections.AsyncGenerator) + self.assertIsInstance(instance, collections.AsyncIterable) + self.assertNotIsInstance(type(g), G) + self.assertNotIsInstance(g, G) + + def test_subclassing_subclasshook(self): + + class Base(typing.Iterable): + @classmethod + def __subclasshook__(cls, other): + if other.__name__ == 'Foo': + return True + else: + return False + + class C(Base): ... + class Foo: ... + class Bar: ... + self.assertIsSubclass(Foo, Base) + self.assertIsSubclass(Foo, C) + self.assertNotIsSubclass(Bar, C) + + def test_subclassing_register(self): + + class A(typing.Container): ... + class B(A): ... + + class C: ... + A.register(C) + self.assertIsSubclass(C, A) + self.assertNotIsSubclass(C, B) + + class D: ... + B.register(D) + self.assertIsSubclass(D, A) + self.assertIsSubclass(D, B) + + class M(): ... + collections.MutableMapping.register(M) + self.assertIsSubclass(M, typing.Mapping) + + def test_collections_as_base(self): + + class M(collections.Mapping): ... + self.assertIsSubclass(M, typing.Mapping) + self.assertIsSubclass(M, typing.Iterable) + + class S(collections.MutableSequence): ... + self.assertIsSubclass(S, typing.MutableSequence) + self.assertIsSubclass(S, typing.Iterable) + + class I(collections.Iterable): ... + self.assertIsSubclass(I, typing.Iterable) + + class A(collections.Mapping, metaclass=abc.ABCMeta): ... + class B: ... + A.register(B) + self.assertIsSubclass(B, typing.Mapping) + + +class OtherABCTests(BaseTestCase): + + @skipUnless(hasattr(typing, 'ContextManager'), + 'requires typing.ContextManager') + def test_contextmanager(self): + @contextlib.contextmanager + def manager(): + yield 42 + + cm = manager() + self.assertIsInstance(cm, typing.ContextManager) + self.assertNotIsInstance(42, typing.ContextManager) + + +class TypeTests(BaseTestCase): + + def test_type_basic(self): + + class User: pass + class BasicUser(User): pass + class ProUser(User): pass + + def new_user(user_class: Type[User]) -> User: + return user_class() + + new_user(BasicUser) + + def test_type_typevar(self): + + class User: pass + class BasicUser(User): pass + class ProUser(User): pass + + U = TypeVar('U', bound=User) + + def new_user(user_class: Type[U]) -> U: + return user_class() + + new_user(BasicUser) + + def test_type_optional(self): + A = Optional[Type[BaseException]] + + def foo(a: A) -> Optional[BaseException]: + if a is None: + return None + else: + return a() + + assert isinstance(foo(KeyboardInterrupt), KeyboardInterrupt) + assert foo(None) is None + + +class NewTypeTests(BaseTestCase): + + def test_basic(self): + UserId = NewType('UserId', int) + UserName = NewType('UserName', str) + self.assertIsInstance(UserId(5), int) + self.assertIsInstance(UserName('Joe'), str) + self.assertEqual(UserId(5) + 1, 6) + + def test_errors(self): + UserId = NewType('UserId', int) + UserName = NewType('UserName', str) + with self.assertRaises(TypeError): + issubclass(UserId, int) + with self.assertRaises(TypeError): + class D(UserName): + pass + + +class NamedTupleTests(BaseTestCase): + + def test_basics(self): + Emp = NamedTuple('Emp', [('name', str), ('id', int)]) + self.assertIsSubclass(Emp, tuple) + joe = Emp('Joe', 42) + jim = Emp(name='Jim', id=1) + self.assertIsInstance(joe, Emp) + self.assertIsInstance(joe, tuple) + self.assertEqual(joe.name, 'Joe') + self.assertEqual(joe.id, 42) + self.assertEqual(jim.name, 'Jim') + self.assertEqual(jim.id, 1) + self.assertEqual(Emp.__name__, 'Emp') + self.assertEqual(Emp._fields, ('name', 'id')) + self.assertEqual(Emp.__annotations__, + collections.OrderedDict([('name', str), ('id', int)])) + self.assertIs(Emp._field_types, Emp.__annotations__) + + def test_namedtuple_pyversion(self): + if sys.version_info[:2] < (3, 6): + with self.assertRaises(TypeError): + NamedTuple('Name', one=int, other=str) + with self.assertRaises(TypeError): + class NotYet(NamedTuple): + whatever = 0 + + @skipUnless(PY36, 'Python 3.6 required') + def test_annotation_usage(self): + tim = CoolEmployee('Tim', 9000) + self.assertIsInstance(tim, CoolEmployee) + self.assertIsInstance(tim, tuple) + self.assertEqual(tim.name, 'Tim') + self.assertEqual(tim.cool, 9000) + self.assertEqual(CoolEmployee.__name__, 'CoolEmployee') + self.assertEqual(CoolEmployee._fields, ('name', 'cool')) + self.assertEqual(CoolEmployee.__annotations__, + collections.OrderedDict(name=str, cool=int)) + self.assertIs(CoolEmployee._field_types, CoolEmployee.__annotations__) + + @skipUnless(PY36, 'Python 3.6 required') + def test_annotation_usage_with_default(self): + jelle = CoolEmployeeWithDefault('Jelle') + self.assertIsInstance(jelle, CoolEmployeeWithDefault) + self.assertIsInstance(jelle, tuple) + self.assertEqual(jelle.name, 'Jelle') + self.assertEqual(jelle.cool, 0) + cooler_employee = CoolEmployeeWithDefault('Sjoerd', 1) + self.assertEqual(cooler_employee.cool, 1) + + self.assertEqual(CoolEmployeeWithDefault.__name__, 'CoolEmployeeWithDefault') + self.assertEqual(CoolEmployeeWithDefault._fields, ('name', 'cool')) + self.assertEqual(CoolEmployeeWithDefault._field_types, dict(name=str, cool=int)) + self.assertEqual(CoolEmployeeWithDefault._field_defaults, dict(cool=0)) + + with self.assertRaises(TypeError): + exec(""" +class NonDefaultAfterDefault(NamedTuple): + x: int = 3 + y: int +""") + + @skipUnless(PY36, 'Python 3.6 required') + def test_annotation_usage_with_methods(self): + self.assertEqual(XMeth(1).double(), 2) + self.assertEqual(XMeth(42).x, XMeth(42)[0]) + self.assertEqual(str(XRepr(42)), '42 -> 1') + self.assertEqual(XRepr(1, 2) + XRepr(3), 0) + + with self.assertRaises(AttributeError): + exec(""" +class XMethBad(NamedTuple): + x: int + def _fields(self): + return 'no chance for this' +""") + + @skipUnless(PY36, 'Python 3.6 required') + def test_namedtuple_keyword_usage(self): + LocalEmployee = NamedTuple("LocalEmployee", name=str, age=int) + nick = LocalEmployee('Nick', 25) + self.assertIsInstance(nick, tuple) + self.assertEqual(nick.name, 'Nick') + self.assertEqual(LocalEmployee.__name__, 'LocalEmployee') + self.assertEqual(LocalEmployee._fields, ('name', 'age')) + self.assertEqual(LocalEmployee.__annotations__, dict(name=str, age=int)) + self.assertIs(LocalEmployee._field_types, LocalEmployee.__annotations__) + with self.assertRaises(TypeError): + NamedTuple('Name', [('x', int)], y=str) + with self.assertRaises(TypeError): + NamedTuple('Name', x=1, y='a') + + def test_pickle(self): + global Emp # pickle wants to reference the class by name + Emp = NamedTuple('Emp', [('name', str), ('id', int)]) + jane = Emp('jane', 37) + for proto in range(pickle.HIGHEST_PROTOCOL + 1): + z = pickle.dumps(jane, proto) + jane2 = pickle.loads(z) + self.assertEqual(jane2, jane) + + +class IOTests(BaseTestCase): + + def test_io(self): + + def stuff(a: IO) -> AnyStr: + return a.readline() + + a = stuff.__annotations__['a'] + self.assertEqual(a.__parameters__, (AnyStr,)) + + def test_textio(self): + + def stuff(a: TextIO) -> str: + return a.readline() + + a = stuff.__annotations__['a'] + self.assertEqual(a.__parameters__, ()) + + def test_binaryio(self): + + def stuff(a: BinaryIO) -> bytes: + return a.readline() + + a = stuff.__annotations__['a'] + self.assertEqual(a.__parameters__, ()) + + def test_io_submodule(self): + from typing.io import IO, TextIO, BinaryIO, __all__, __name__ + self.assertIs(IO, typing.IO) + self.assertIs(TextIO, typing.TextIO) + self.assertIs(BinaryIO, typing.BinaryIO) + self.assertEqual(set(__all__), set(['IO', 'TextIO', 'BinaryIO'])) + self.assertEqual(__name__, 'typing.io') + + +class RETests(BaseTestCase): + # Much of this is really testing _TypeAlias. + + def test_basics(self): + pat = re.compile('[a-z]+', re.I) + self.assertIsSubclass(pat.__class__, Pattern) + self.assertIsSubclass(type(pat), Pattern) + self.assertIsInstance(pat, Pattern) + + mat = pat.search('12345abcde.....') + self.assertIsSubclass(mat.__class__, Match) + self.assertIsSubclass(type(mat), Match) + self.assertIsInstance(mat, Match) + + # these should just work + Pattern[Union[str, bytes]] + Match[Union[bytes, str]] + + def test_alias_equality(self): + self.assertEqual(Pattern[str], Pattern[str]) + self.assertNotEqual(Pattern[str], Pattern[bytes]) + self.assertNotEqual(Pattern[str], Match[str]) + self.assertNotEqual(Pattern[str], str) + + def test_errors(self): + with self.assertRaises(TypeError): + # Doesn't fit AnyStr. + Pattern[int] + with self.assertRaises(TypeError): + # Can't change type vars? + Match[T] + m = Match[Union[str, bytes]] + with self.assertRaises(TypeError): + # Too complicated? + m[str] + with self.assertRaises(TypeError): + # We don't support isinstance(). + isinstance(42, Pattern[str]) + with self.assertRaises(TypeError): + # We don't support issubclass(). + issubclass(Pattern[bytes], Pattern[str]) + + def test_repr(self): + self.assertEqual(repr(Pattern), 'Pattern[~AnyStr]') + self.assertEqual(repr(Pattern[str]), 'Pattern[str]') + self.assertEqual(repr(Pattern[bytes]), 'Pattern[bytes]') + self.assertEqual(repr(Match), 'Match[~AnyStr]') + self.assertEqual(repr(Match[str]), 'Match[str]') + self.assertEqual(repr(Match[bytes]), 'Match[bytes]') + + def test_re_submodule(self): + from typing.re import Match, Pattern, __all__, __name__ + self.assertIs(Match, typing.Match) + self.assertIs(Pattern, typing.Pattern) + self.assertEqual(set(__all__), set(['Match', 'Pattern'])) + self.assertEqual(__name__, 'typing.re') + + def test_cannot_subclass(self): + with self.assertRaises(TypeError) as ex: + + class A(typing.Match): + pass + + self.assertEqual(str(ex.exception), + "Cannot subclass typing._TypeAlias") + + +class AllTests(BaseTestCase): + """Tests for __all__.""" + + def test_all(self): + from typing import __all__ as a + # Just spot-check the first and last of every category. + self.assertIn('AbstractSet', a) + self.assertIn('ValuesView', a) + self.assertIn('cast', a) + self.assertIn('overload', a) + if hasattr(contextlib, 'AbstractContextManager'): + self.assertIn('ContextManager', a) + # Check that io and re are not exported. + self.assertNotIn('io', a) + self.assertNotIn('re', a) + # Spot-check that stdlib modules aren't exported. + self.assertNotIn('os', a) + self.assertNotIn('sys', a) + # Check that Text is defined. + self.assertIn('Text', a) + + +if __name__ == '__main__': + main() diff --git a/lib-typing/3.2/typing.py b/lib-typing/3.2/typing.py new file mode 100644 index 000000000000..9a0f49099a31 --- /dev/null +++ b/lib-typing/3.2/typing.py @@ -0,0 +1,2335 @@ +import abc +from abc import abstractmethod, abstractproperty +import collections +import contextlib +import functools +import re as stdlib_re # Avoid confusion with the re we export. +import sys +import types +try: + import collections.abc as collections_abc +except ImportError: + import collections as collections_abc # Fallback for PY3.2. +try: + from types import SlotWrapperType, MethodWrapperType, MethodDescriptorType +except ImportError: + SlotWrapperType = type(object.__init__) + MethodWrapperType = type(object().__str__) + MethodDescriptorType = type(str.join) + + +# Please keep __all__ alphabetized within each category. +__all__ = [ + # Super-special typing primitives. + 'Any', + 'Callable', + 'ClassVar', + 'Generic', + 'Optional', + 'Tuple', + 'Type', + 'TypeVar', + 'Union', + + # ABCs (from collections.abc). + 'AbstractSet', # collections.abc.Set. + 'GenericMeta', # subclass of abc.ABCMeta and a metaclass + # for 'Generic' and ABCs below. + 'ByteString', + 'Container', + 'Hashable', + 'ItemsView', + 'Iterable', + 'Iterator', + 'KeysView', + 'Mapping', + 'MappingView', + 'MutableMapping', + 'MutableSequence', + 'MutableSet', + 'Sequence', + 'Sized', + 'ValuesView', + # The following are added depending on presence + # of their non-generic counterparts in stdlib: + # Awaitable, + # AsyncIterator, + # AsyncIterable, + # Coroutine, + # Collection, + # ContextManager, + # AsyncGenerator, + + # Structural checks, a.k.a. protocols. + 'Reversible', + 'SupportsAbs', + 'SupportsFloat', + 'SupportsInt', + 'SupportsRound', + + # Concrete collection types. + 'Counter', + 'Deque', + 'Dict', + 'DefaultDict', + 'List', + 'Set', + 'FrozenSet', + 'NamedTuple', # Not really a type. + 'Generator', + + # One-off things. + 'AnyStr', + 'cast', + 'get_type_hints', + 'NewType', + 'no_type_check', + 'no_type_check_decorator', + 'overload', + 'Text', + 'TYPE_CHECKING', +] + +# The pseudo-submodules 're' and 'io' are part of the public +# namespace, but excluded from __all__ because they might stomp on +# legitimate imports of those modules. + + +def _qualname(x): + if sys.version_info[:2] >= (3, 3): + return x.__qualname__ + else: + # Fall back to just name. + return x.__name__ + + +def _trim_name(nm): + whitelist = ('_TypeAlias', '_ForwardRef', '_TypingBase', '_FinalTypingBase') + if nm.startswith('_') and nm not in whitelist: + nm = nm[1:] + return nm + + +class TypingMeta(type): + """Metaclass for most types defined in typing module + (not a part of public API). + + This overrides __new__() to require an extra keyword parameter + '_root', which serves as a guard against naive subclassing of the + typing classes. Any legitimate class defined using a metaclass + derived from TypingMeta must pass _root=True. + + This also defines a dummy constructor (all the work for most typing + constructs is done in __new__) and a nicer repr(). + """ + + _is_protocol = False + + def __new__(cls, name, bases, namespace, *, _root=False): + if not _root: + raise TypeError("Cannot subclass %s" % + (', '.join(map(_type_repr, bases)) or '()')) + return super().__new__(cls, name, bases, namespace) + + def __init__(self, *args, **kwds): + pass + + def _eval_type(self, globalns, localns): + """Override this in subclasses to interpret forward references. + + For example, List['C'] is internally stored as + List[_ForwardRef('C')], which should evaluate to List[C], + where C is an object found in globalns or localns (searching + localns first, of course). + """ + return self + + def _get_type_vars(self, tvars): + pass + + def __repr__(self): + qname = _trim_name(_qualname(self)) + return '%s.%s' % (self.__module__, qname) + + +class _TypingBase(metaclass=TypingMeta, _root=True): + """Internal indicator of special typing constructs.""" + + __slots__ = ('__weakref__',) + + def __init__(self, *args, **kwds): + pass + + def __new__(cls, *args, **kwds): + """Constructor. + + This only exists to give a better error message in case + someone tries to subclass a special typing object (not a good idea). + """ + if (len(args) == 3 and + isinstance(args[0], str) and + isinstance(args[1], tuple)): + # Close enough. + raise TypeError("Cannot subclass %r" % cls) + return super().__new__(cls) + + # Things that are not classes also need these. + def _eval_type(self, globalns, localns): + return self + + def _get_type_vars(self, tvars): + pass + + def __repr__(self): + cls = type(self) + qname = _trim_name(_qualname(cls)) + return '%s.%s' % (cls.__module__, qname) + + def __call__(self, *args, **kwds): + raise TypeError("Cannot instantiate %r" % type(self)) + + +class _FinalTypingBase(_TypingBase, _root=True): + """Internal mix-in class to prevent instantiation. + + Prevents instantiation unless _root=True is given in class call. + It is used to create pseudo-singleton instances Any, Union, Optional, etc. + """ + + __slots__ = () + + def __new__(cls, *args, _root=False, **kwds): + self = super().__new__(cls, *args, **kwds) + if _root is True: + return self + raise TypeError("Cannot instantiate %r" % cls) + + def __reduce__(self): + return _trim_name(type(self).__name__) + + +class _ForwardRef(_TypingBase, _root=True): + """Internal wrapper to hold a forward reference.""" + + __slots__ = ('__forward_arg__', '__forward_code__', + '__forward_evaluated__', '__forward_value__') + + def __init__(self, arg): + super().__init__(arg) + if not isinstance(arg, str): + raise TypeError('Forward reference must be a string -- got %r' % (arg,)) + try: + code = compile(arg, '', 'eval') + except SyntaxError: + raise SyntaxError('Forward reference must be an expression -- got %r' % + (arg,)) + self.__forward_arg__ = arg + self.__forward_code__ = code + self.__forward_evaluated__ = False + self.__forward_value__ = None + + def _eval_type(self, globalns, localns): + if not self.__forward_evaluated__ or localns is not globalns: + if globalns is None and localns is None: + globalns = localns = {} + elif globalns is None: + globalns = localns + elif localns is None: + localns = globalns + self.__forward_value__ = _type_check( + eval(self.__forward_code__, globalns, localns), + "Forward references must evaluate to types.") + self.__forward_evaluated__ = True + return self.__forward_value__ + + def __eq__(self, other): + if not isinstance(other, _ForwardRef): + return NotImplemented + return (self.__forward_arg__ == other.__forward_arg__ and + self.__forward_value__ == other.__forward_value__) + + def __hash__(self): + return hash((self.__forward_arg__, self.__forward_value__)) + + def __instancecheck__(self, obj): + raise TypeError("Forward references cannot be used with isinstance().") + + def __subclasscheck__(self, cls): + raise TypeError("Forward references cannot be used with issubclass().") + + def __repr__(self): + return '_ForwardRef(%r)' % (self.__forward_arg__,) + + +class _TypeAlias(_TypingBase, _root=True): + """Internal helper class for defining generic variants of concrete types. + + Note that this is not a type; let's call it a pseudo-type. It cannot + be used in instance and subclass checks in parameterized form, i.e. + ``isinstance(42, Match[str])`` raises ``TypeError`` instead of returning + ``False``. + """ + + __slots__ = ('name', 'type_var', 'impl_type', 'type_checker') + + def __init__(self, name, type_var, impl_type, type_checker): + """Initializer. + + Args: + name: The name, e.g. 'Pattern'. + type_var: The type parameter, e.g. AnyStr, or the + specific type, e.g. str. + impl_type: The implementation type. + type_checker: Function that takes an impl_type instance. + and returns a value that should be a type_var instance. + """ + assert isinstance(name, str), repr(name) + assert isinstance(impl_type, type), repr(impl_type) + assert not isinstance(impl_type, TypingMeta), repr(impl_type) + assert isinstance(type_var, (type, _TypingBase)), repr(type_var) + self.name = name + self.type_var = type_var + self.impl_type = impl_type + self.type_checker = type_checker + + def __repr__(self): + return "%s[%s]" % (self.name, _type_repr(self.type_var)) + + def __getitem__(self, parameter): + if not isinstance(self.type_var, TypeVar): + raise TypeError("%s cannot be further parameterized." % self) + if self.type_var.__constraints__ and isinstance(parameter, type): + if not issubclass(parameter, self.type_var.__constraints__): + raise TypeError("%s is not a valid substitution for %s." % + (parameter, self.type_var)) + if isinstance(parameter, TypeVar) and parameter is not self.type_var: + raise TypeError("%s cannot be re-parameterized." % self) + return self.__class__(self.name, parameter, + self.impl_type, self.type_checker) + + def __eq__(self, other): + if not isinstance(other, _TypeAlias): + return NotImplemented + return self.name == other.name and self.type_var == other.type_var + + def __hash__(self): + return hash((self.name, self.type_var)) + + def __instancecheck__(self, obj): + if not isinstance(self.type_var, TypeVar): + raise TypeError("Parameterized type aliases cannot be used " + "with isinstance().") + return isinstance(obj, self.impl_type) + + def __subclasscheck__(self, cls): + if not isinstance(self.type_var, TypeVar): + raise TypeError("Parameterized type aliases cannot be used " + "with issubclass().") + return issubclass(cls, self.impl_type) + + +def _get_type_vars(types, tvars): + for t in types: + if isinstance(t, TypingMeta) or isinstance(t, _TypingBase): + t._get_type_vars(tvars) + + +def _type_vars(types): + tvars = [] + _get_type_vars(types, tvars) + return tuple(tvars) + + +def _eval_type(t, globalns, localns): + if isinstance(t, TypingMeta) or isinstance(t, _TypingBase): + return t._eval_type(globalns, localns) + return t + + +def _type_check(arg, msg): + """Check that the argument is a type, and return it (internal helper). + + As a special case, accept None and return type(None) instead. + Also, _TypeAlias instances (e.g. Match, Pattern) are acceptable. + + The msg argument is a human-readable error message, e.g. + + "Union[arg, ...]: arg should be a type." + + We append the repr() of the actual value (truncated to 100 chars). + """ + if arg is None: + return type(None) + if isinstance(arg, str): + arg = _ForwardRef(arg) + if ( + isinstance(arg, _TypingBase) and type(arg).__name__ == '_ClassVar' or + not isinstance(arg, (type, _TypingBase)) and not callable(arg) + ): + raise TypeError(msg + " Got %.100r." % (arg,)) + # Bare Union etc. are not valid as type arguments + if ( + type(arg).__name__ in ('_Union', '_Optional') and + not getattr(arg, '__origin__', None) or + isinstance(arg, TypingMeta) and _gorg(arg) in (Generic, _Protocol) + ): + raise TypeError("Plain %s is not valid as type argument" % arg) + return arg + + +def _type_repr(obj): + """Return the repr() of an object, special-casing types (internal helper). + + If obj is a type, we return a shorter version than the default + type.__repr__, based on the module and qualified name, which is + typically enough to uniquely identify a type. For everything + else, we fall back on repr(obj). + """ + if isinstance(obj, type) and not isinstance(obj, TypingMeta): + if obj.__module__ == 'builtins': + return _qualname(obj) + return '%s.%s' % (obj.__module__, _qualname(obj)) + if obj is ...: + return('...') + if isinstance(obj, types.FunctionType): + return obj.__name__ + return repr(obj) + + +class _Any(_FinalTypingBase, _root=True): + """Special type indicating an unconstrained type. + + - Any is compatible with every type. + - Any assumed to have all methods. + - All values assumed to be instances of Any. + + Note that all the above statements are true from the point of view of + static type checkers. At runtime, Any should not be used with instance + or class checks. + """ + + __slots__ = () + + def __instancecheck__(self, obj): + raise TypeError("Any cannot be used with isinstance().") + + def __subclasscheck__(self, cls): + raise TypeError("Any cannot be used with issubclass().") + + +Any = _Any(_root=True) + + +class TypeVar(_TypingBase, _root=True): + """Type variable. + + Usage:: + + T = TypeVar('T') # Can be anything + A = TypeVar('A', str, bytes) # Must be str or bytes + + Type variables exist primarily for the benefit of static type + checkers. They serve as the parameters for generic types as well + as for generic function definitions. See class Generic for more + information on generic types. Generic functions work as follows: + + def repeat(x: T, n: int) -> List[T]: + '''Return a list containing n references to x.''' + return [x]*n + + def longest(x: A, y: A) -> A: + '''Return the longest of two strings.''' + return x if len(x) >= len(y) else y + + The latter example's signature is essentially the overloading + of (str, str) -> str and (bytes, bytes) -> bytes. Also note + that if the arguments are instances of some subclass of str, + the return type is still plain str. + + At runtime, isinstance(x, T) and issubclass(C, T) will raise TypeError. + + Type variables defined with covariant=True or contravariant=True + can be used do declare covariant or contravariant generic types. + See PEP 484 for more details. By default generic types are invariant + in all type variables. + + Type variables can be introspected. e.g.: + + T.__name__ == 'T' + T.__constraints__ == () + T.__covariant__ == False + T.__contravariant__ = False + A.__constraints__ == (str, bytes) + """ + + __slots__ = ('__name__', '__bound__', '__constraints__', + '__covariant__', '__contravariant__') + + def __init__(self, name, *constraints, bound=None, + covariant=False, contravariant=False): + super().__init__(name, *constraints, bound=bound, + covariant=covariant, contravariant=contravariant) + self.__name__ = name + if covariant and contravariant: + raise ValueError("Bivariant types are not supported.") + self.__covariant__ = bool(covariant) + self.__contravariant__ = bool(contravariant) + if constraints and bound is not None: + raise TypeError("Constraints cannot be combined with bound=...") + if constraints and len(constraints) == 1: + raise TypeError("A single constraint is not allowed") + msg = "TypeVar(name, constraint, ...): constraints must be types." + self.__constraints__ = tuple(_type_check(t, msg) for t in constraints) + if bound: + self.__bound__ = _type_check(bound, "Bound must be a type.") + else: + self.__bound__ = None + + def _get_type_vars(self, tvars): + if self not in tvars: + tvars.append(self) + + def __repr__(self): + if self.__covariant__: + prefix = '+' + elif self.__contravariant__: + prefix = '-' + else: + prefix = '~' + return prefix + self.__name__ + + def __instancecheck__(self, instance): + raise TypeError("Type variables cannot be used with isinstance().") + + def __subclasscheck__(self, cls): + raise TypeError("Type variables cannot be used with issubclass().") + + +# Some unconstrained type variables. These are used by the container types. +# (These are not for export.) +T = TypeVar('T') # Any type. +KT = TypeVar('KT') # Key type. +VT = TypeVar('VT') # Value type. +T_co = TypeVar('T_co', covariant=True) # Any type covariant containers. +V_co = TypeVar('V_co', covariant=True) # Any type covariant containers. +VT_co = TypeVar('VT_co', covariant=True) # Value type covariant containers. +T_contra = TypeVar('T_contra', contravariant=True) # Ditto contravariant. + +# A useful type variable with constraints. This represents string types. +# (This one *is* for export!) +AnyStr = TypeVar('AnyStr', bytes, str) + + +def _replace_arg(arg, tvars, args): + """An internal helper function: replace arg if it is a type variable + found in tvars with corresponding substitution from args or + with corresponding substitution sub-tree if arg is a generic type. + """ + + if tvars is None: + tvars = [] + if hasattr(arg, '_subs_tree') and isinstance(arg, (GenericMeta, _TypingBase)): + return arg._subs_tree(tvars, args) + if isinstance(arg, TypeVar): + for i, tvar in enumerate(tvars): + if arg == tvar: + return args[i] + return arg + + +# Special typing constructs Union, Optional, Generic, Callable and Tuple +# use three special attributes for internal bookkeeping of generic types: +# * __parameters__ is a tuple of unique free type parameters of a generic +# type, for example, Dict[T, T].__parameters__ == (T,); +# * __origin__ keeps a reference to a type that was subscripted, +# e.g., Union[T, int].__origin__ == Union; +# * __args__ is a tuple of all arguments used in subscripting, +# e.g., Dict[T, int].__args__ == (T, int). + + +def _subs_tree(cls, tvars=None, args=None): + """An internal helper function: calculate substitution tree + for generic cls after replacing its type parameters with + substitutions in tvars -> args (if any). + Repeat the same following __origin__'s. + + Return a list of arguments with all possible substitutions + performed. Arguments that are generic classes themselves are represented + as tuples (so that no new classes are created by this function). + For example: _subs_tree(List[Tuple[int, T]][str]) == [(Tuple, int, str)] + """ + + if cls.__origin__ is None: + return cls + # Make of chain of origins (i.e. cls -> cls.__origin__) + current = cls.__origin__ + orig_chain = [] + while current.__origin__ is not None: + orig_chain.append(current) + current = current.__origin__ + # Replace type variables in __args__ if asked ... + tree_args = [] + for arg in cls.__args__: + tree_args.append(_replace_arg(arg, tvars, args)) + # ... then continue replacing down the origin chain. + for ocls in orig_chain: + new_tree_args = [] + for arg in ocls.__args__: + new_tree_args.append(_replace_arg(arg, ocls.__parameters__, tree_args)) + tree_args = new_tree_args + return tree_args + + +def _remove_dups_flatten(parameters): + """An internal helper for Union creation and substitution: flatten Union's + among parameters, then remove duplicates and strict subclasses. + """ + + # Flatten out Union[Union[...], ...]. + params = [] + for p in parameters: + if isinstance(p, _Union) and p.__origin__ is Union: + params.extend(p.__args__) + elif isinstance(p, tuple) and len(p) > 0 and p[0] is Union: + params.extend(p[1:]) + else: + params.append(p) + # Weed out strict duplicates, preserving the first of each occurrence. + all_params = set(params) + if len(all_params) < len(params): + new_params = [] + for t in params: + if t in all_params: + new_params.append(t) + all_params.remove(t) + params = new_params + assert not all_params, all_params + # Weed out subclasses. + # E.g. Union[int, Employee, Manager] == Union[int, Employee]. + # If object is present it will be sole survivor among proper classes. + # Never discard type variables. + # (In particular, Union[str, AnyStr] != AnyStr.) + all_params = set(params) + for t1 in params: + if not isinstance(t1, type): + continue + if any(isinstance(t2, type) and issubclass(t1, t2) + for t2 in all_params - {t1} + if not (isinstance(t2, GenericMeta) and + t2.__origin__ is not None)): + all_params.remove(t1) + return tuple(t for t in params if t in all_params) + + +def _check_generic(cls, parameters): + # Check correct count for parameters of a generic cls (internal helper). + if not cls.__parameters__: + raise TypeError("%s is not a generic class" % repr(cls)) + alen = len(parameters) + elen = len(cls.__parameters__) + if alen != elen: + raise TypeError("Too %s parameters for %s; actual %s, expected %s" % + ("many" if alen > elen else "few", repr(cls), alen, elen)) + + +_cleanups = [] + + +def _tp_cache(func): + """Internal wrapper caching __getitem__ of generic types with a fallback to + original function for non-hashable arguments. + """ + + cached = functools.lru_cache()(func) + _cleanups.append(cached.cache_clear) + + @functools.wraps(func) + def inner(*args, **kwds): + try: + return cached(*args, **kwds) + except TypeError: + pass # All real errors (not unhashable args) are raised below. + return func(*args, **kwds) + return inner + + +class _Union(_FinalTypingBase, _root=True): + """Union type; Union[X, Y] means either X or Y. + + To define a union, use e.g. Union[int, str]. Details: + + - The arguments must be types and there must be at least one. + + - None as an argument is a special case and is replaced by + type(None). + + - Unions of unions are flattened, e.g.:: + + Union[Union[int, str], float] == Union[int, str, float] + + - Unions of a single argument vanish, e.g.:: + + Union[int] == int # The constructor actually returns int + + - Redundant arguments are skipped, e.g.:: + + Union[int, str, int] == Union[int, str] + + - When comparing unions, the argument order is ignored, e.g.:: + + Union[int, str] == Union[str, int] + + - When two arguments have a subclass relationship, the least + derived argument is kept, e.g.:: + + class Employee: pass + class Manager(Employee): pass + Union[int, Employee, Manager] == Union[int, Employee] + Union[Manager, int, Employee] == Union[int, Employee] + Union[Employee, Manager] == Employee + + - Similar for object:: + + Union[int, object] == object + + - You cannot subclass or instantiate a union. + + - You can use Optional[X] as a shorthand for Union[X, None]. + """ + + __slots__ = ('__parameters__', '__args__', '__origin__', '__tree_hash__') + + def __new__(cls, parameters=None, origin=None, *args, _root=False): + self = super().__new__(cls, parameters, origin, *args, _root=_root) + if origin is None: + self.__parameters__ = None + self.__args__ = None + self.__origin__ = None + self.__tree_hash__ = hash(frozenset(('Union',))) + return self + if not isinstance(parameters, tuple): + raise TypeError("Expected parameters=") + if origin is Union: + parameters = _remove_dups_flatten(parameters) + # It's not a union if there's only one type left. + if len(parameters) == 1: + return parameters[0] + self.__parameters__ = _type_vars(parameters) + self.__args__ = parameters + self.__origin__ = origin + # Pre-calculate the __hash__ on instantiation. + # This improves speed for complex substitutions. + subs_tree = self._subs_tree() + if isinstance(subs_tree, tuple): + self.__tree_hash__ = hash(frozenset(subs_tree)) + else: + self.__tree_hash__ = hash(subs_tree) + return self + + def _eval_type(self, globalns, localns): + if self.__args__ is None: + return self + ev_args = tuple(_eval_type(t, globalns, localns) for t in self.__args__) + ev_origin = _eval_type(self.__origin__, globalns, localns) + if ev_args == self.__args__ and ev_origin == self.__origin__: + # Everything is already evaluated. + return self + return self.__class__(ev_args, ev_origin, _root=True) + + def _get_type_vars(self, tvars): + if self.__origin__ and self.__parameters__: + _get_type_vars(self.__parameters__, tvars) + + def __repr__(self): + if self.__origin__ is None: + return super().__repr__() + tree = self._subs_tree() + if not isinstance(tree, tuple): + return repr(tree) + return tree[0]._tree_repr(tree) + + def _tree_repr(self, tree): + arg_list = [] + for arg in tree[1:]: + if not isinstance(arg, tuple): + arg_list.append(_type_repr(arg)) + else: + arg_list.append(arg[0]._tree_repr(arg)) + return super().__repr__() + '[%s]' % ', '.join(arg_list) + + @_tp_cache + def __getitem__(self, parameters): + if parameters == (): + raise TypeError("Cannot take a Union of no types.") + if not isinstance(parameters, tuple): + parameters = (parameters,) + if self.__origin__ is None: + msg = "Union[arg, ...]: each arg must be a type." + else: + msg = "Parameters to generic types must be types." + parameters = tuple(_type_check(p, msg) for p in parameters) + if self is not Union: + _check_generic(self, parameters) + return self.__class__(parameters, origin=self, _root=True) + + def _subs_tree(self, tvars=None, args=None): + if self is Union: + return Union # Nothing to substitute + tree_args = _subs_tree(self, tvars, args) + tree_args = _remove_dups_flatten(tree_args) + if len(tree_args) == 1: + return tree_args[0] # Union of a single type is that type + return (Union,) + tree_args + + def __eq__(self, other): + if isinstance(other, _Union): + return self.__tree_hash__ == other.__tree_hash__ + elif self is not Union: + return self._subs_tree() == other + else: + return self is other + + def __hash__(self): + return self.__tree_hash__ + + def __instancecheck__(self, obj): + raise TypeError("Unions cannot be used with isinstance().") + + def __subclasscheck__(self, cls): + raise TypeError("Unions cannot be used with issubclass().") + + +Union = _Union(_root=True) + + +class _Optional(_FinalTypingBase, _root=True): + """Optional type. + + Optional[X] is equivalent to Union[X, None]. + """ + + __slots__ = () + + @_tp_cache + def __getitem__(self, arg): + arg = _type_check(arg, "Optional[t] requires a single type.") + return Union[arg, type(None)] + + +Optional = _Optional(_root=True) + + +def _gorg(a): + """Return the farthest origin of a generic class (internal helper).""" + assert isinstance(a, GenericMeta) + while a.__origin__ is not None: + a = a.__origin__ + return a + + +def _geqv(a, b): + """Return whether two generic classes are equivalent (internal helper). + + The intention is to consider generic class X and any of its + parameterized forms (X[T], X[int], etc.) as equivalent. + + However, X is not equivalent to a subclass of X. + + The relation is reflexive, symmetric and transitive. + """ + assert isinstance(a, GenericMeta) and isinstance(b, GenericMeta) + # Reduce each to its origin. + return _gorg(a) is _gorg(b) + + +def _next_in_mro(cls): + """Helper for Generic.__new__. + + Returns the class after the last occurrence of Generic or + Generic[...] in cls.__mro__. + """ + next_in_mro = object + # Look for the last occurrence of Generic or Generic[...]. + for i, c in enumerate(cls.__mro__[:-1]): + if isinstance(c, GenericMeta) and _gorg(c) is Generic: + next_in_mro = cls.__mro__[i + 1] + return next_in_mro + + +def _make_subclasshook(cls): + """Construct a __subclasshook__ callable that incorporates + the associated __extra__ class in subclass checks performed + against cls. + """ + if isinstance(cls.__extra__, abc.ABCMeta): + # The logic mirrors that of ABCMeta.__subclasscheck__. + # Registered classes need not be checked here because + # cls and its extra share the same _abc_registry. + def __extrahook__(subclass): + res = cls.__extra__.__subclasshook__(subclass) + if res is not NotImplemented: + return res + if cls.__extra__ in subclass.__mro__: + return True + for scls in cls.__extra__.__subclasses__(): + if isinstance(scls, GenericMeta): + continue + if issubclass(subclass, scls): + return True + return NotImplemented + else: + # For non-ABC extras we'll just call issubclass(). + def __extrahook__(subclass): + if cls.__extra__ and issubclass(subclass, cls.__extra__): + return True + return NotImplemented + return __extrahook__ + + +def _no_slots_copy(dct): + """Internal helper: copy class __dict__ and clean slots class variables. + (They will be re-created if necessary by normal class machinery.) + """ + dict_copy = dict(dct) + if '__slots__' in dict_copy: + for slot in dict_copy['__slots__']: + dict_copy.pop(slot, None) + return dict_copy + + +class GenericMeta(TypingMeta, abc.ABCMeta): + """Metaclass for generic types. + + This is a metaclass for typing.Generic and generic ABCs defined in + typing module. User defined subclasses of GenericMeta can override + __new__ and invoke super().__new__. Note that GenericMeta.__new__ + has strict rules on what is allowed in its bases argument: + * plain Generic is disallowed in bases; + * Generic[...] should appear in bases at most once; + * if Generic[...] is present, then it should list all type variables + that appear in other bases. + In addition, type of all generic bases is erased, e.g., C[int] is + stripped to plain C. + """ + + def __new__(cls, name, bases, namespace, + tvars=None, args=None, origin=None, extra=None, orig_bases=None): + """Create a new generic class. GenericMeta.__new__ accepts + keyword arguments that are used for internal bookkeeping, therefore + an override should pass unused keyword arguments to super(). + """ + if tvars is not None: + # Called from __getitem__() below. + assert origin is not None + assert all(isinstance(t, TypeVar) for t in tvars), tvars + else: + # Called from class statement. + assert tvars is None, tvars + assert args is None, args + assert origin is None, origin + + # Get the full set of tvars from the bases. + tvars = _type_vars(bases) + # Look for Generic[T1, ..., Tn]. + # If found, tvars must be a subset of it. + # If not found, tvars is it. + # Also check for and reject plain Generic, + # and reject multiple Generic[...]. + gvars = None + for base in bases: + if base is Generic: + raise TypeError("Cannot inherit from plain Generic") + if (isinstance(base, GenericMeta) and + base.__origin__ is Generic): + if gvars is not None: + raise TypeError( + "Cannot inherit from Generic[...] multiple types.") + gvars = base.__parameters__ + if gvars is None: + gvars = tvars + else: + tvarset = set(tvars) + gvarset = set(gvars) + if not tvarset <= gvarset: + raise TypeError( + "Some type variables (%s) " + "are not listed in Generic[%s]" % + (", ".join(str(t) for t in tvars if t not in gvarset), + ", ".join(str(g) for g in gvars))) + tvars = gvars + + initial_bases = bases + if extra is not None and type(extra) is abc.ABCMeta and extra not in bases: + bases = (extra,) + bases + bases = tuple(_gorg(b) if isinstance(b, GenericMeta) else b for b in bases) + + # remove bare Generic from bases if there are other generic bases + if any(isinstance(b, GenericMeta) and b is not Generic for b in bases): + bases = tuple(b for b in bases if b is not Generic) + namespace.update({'__origin__': origin, '__extra__': extra}) + self = super().__new__(cls, name, bases, namespace, _root=True) + + self.__parameters__ = tvars + # Be prepared that GenericMeta will be subclassed by TupleMeta + # and CallableMeta, those two allow ..., (), or [] in __args___. + self.__args__ = tuple(... if a is _TypingEllipsis else + () if a is _TypingEmpty else + a for a in args) if args else None + # Speed hack (https://github.com/python/typing/issues/196). + self.__next_in_mro__ = _next_in_mro(self) + # Preserve base classes on subclassing (__bases__ are type erased now). + if orig_bases is None: + self.__orig_bases__ = initial_bases + + # This allows unparameterized generic collections to be used + # with issubclass() and isinstance() in the same way as their + # collections.abc counterparts (e.g., isinstance([], Iterable)). + if ( + '__subclasshook__' not in namespace and extra or + # allow overriding + getattr(self.__subclasshook__, '__name__', '') == '__extrahook__' + ): + self.__subclasshook__ = _make_subclasshook(self) + if isinstance(extra, abc.ABCMeta): + self._abc_registry = extra._abc_registry + self._abc_cache = extra._abc_cache + elif origin is not None: + self._abc_registry = origin._abc_registry + self._abc_cache = origin._abc_cache + + if origin and hasattr(origin, '__qualname__'): # Fix for Python 3.2. + self.__qualname__ = origin.__qualname__ + self.__tree_hash__ = (hash(self._subs_tree()) if origin else + super(GenericMeta, self).__hash__()) + return self + + # _abc_negative_cache and _abc_negative_cache_version + # realised as descriptors, since GenClass[t1, t2, ...] always + # share subclass info with GenClass. + # This is an important memory optimization. + @property + def _abc_negative_cache(self): + if isinstance(self.__extra__, abc.ABCMeta): + return self.__extra__._abc_negative_cache + return _gorg(self)._abc_generic_negative_cache + + @_abc_negative_cache.setter + def _abc_negative_cache(self, value): + if self.__origin__ is None: + if isinstance(self.__extra__, abc.ABCMeta): + self.__extra__._abc_negative_cache = value + else: + self._abc_generic_negative_cache = value + + @property + def _abc_negative_cache_version(self): + if isinstance(self.__extra__, abc.ABCMeta): + return self.__extra__._abc_negative_cache_version + return _gorg(self)._abc_generic_negative_cache_version + + @_abc_negative_cache_version.setter + def _abc_negative_cache_version(self, value): + if self.__origin__ is None: + if isinstance(self.__extra__, abc.ABCMeta): + self.__extra__._abc_negative_cache_version = value + else: + self._abc_generic_negative_cache_version = value + + def _get_type_vars(self, tvars): + if self.__origin__ and self.__parameters__: + _get_type_vars(self.__parameters__, tvars) + + def _eval_type(self, globalns, localns): + ev_origin = (self.__origin__._eval_type(globalns, localns) + if self.__origin__ else None) + ev_args = tuple(_eval_type(a, globalns, localns) for a + in self.__args__) if self.__args__ else None + if ev_origin == self.__origin__ and ev_args == self.__args__: + return self + return self.__class__(self.__name__, + self.__bases__, + _no_slots_copy(self.__dict__), + tvars=_type_vars(ev_args) if ev_args else None, + args=ev_args, + origin=ev_origin, + extra=self.__extra__, + orig_bases=self.__orig_bases__) + + def __repr__(self): + if self.__origin__ is None: + return super().__repr__() + return self._tree_repr(self._subs_tree()) + + def _tree_repr(self, tree): + arg_list = [] + for arg in tree[1:]: + if arg == (): + arg_list.append('()') + elif not isinstance(arg, tuple): + arg_list.append(_type_repr(arg)) + else: + arg_list.append(arg[0]._tree_repr(arg)) + return super().__repr__() + '[%s]' % ', '.join(arg_list) + + def _subs_tree(self, tvars=None, args=None): + if self.__origin__ is None: + return self + tree_args = _subs_tree(self, tvars, args) + return (_gorg(self),) + tuple(tree_args) + + def __eq__(self, other): + if not isinstance(other, GenericMeta): + return NotImplemented + if self.__origin__ is None or other.__origin__ is None: + return self is other + return self.__tree_hash__ == other.__tree_hash__ + + def __hash__(self): + return self.__tree_hash__ + + @_tp_cache + def __getitem__(self, params): + if not isinstance(params, tuple): + params = (params,) + if not params and not _gorg(self) is Tuple: + raise TypeError( + "Parameter list to %s[...] cannot be empty" % _qualname(self)) + msg = "Parameters to generic types must be types." + params = tuple(_type_check(p, msg) for p in params) + if self is Generic: + # Generic can only be subscripted with unique type variables. + if not all(isinstance(p, TypeVar) for p in params): + raise TypeError( + "Parameters to Generic[...] must all be type variables") + if len(set(params)) != len(params): + raise TypeError( + "Parameters to Generic[...] must all be unique") + tvars = params + args = params + elif self in (Tuple, Callable): + tvars = _type_vars(params) + args = params + elif self is _Protocol: + # _Protocol is internal, don't check anything. + tvars = params + args = params + elif self.__origin__ in (Generic, _Protocol): + # Can't subscript Generic[...] or _Protocol[...]. + raise TypeError("Cannot subscript already-subscripted %s" % + repr(self)) + else: + # Subscripting a regular Generic subclass. + _check_generic(self, params) + tvars = _type_vars(params) + args = params + + prepend = (self,) if self.__origin__ is None else () + return self.__class__(self.__name__, + prepend + self.__bases__, + _no_slots_copy(self.__dict__), + tvars=tvars, + args=args, + origin=self, + extra=self.__extra__, + orig_bases=self.__orig_bases__) + + def __subclasscheck__(self, cls): + if self.__origin__ is not None: + if sys._getframe(1).f_globals['__name__'] not in ['abc', 'functools']: + raise TypeError("Parameterized generics cannot be used with class " + "or instance checks") + return False + if self is Generic: + raise TypeError("Class %r cannot be used with class " + "or instance checks" % self) + return super().__subclasscheck__(cls) + + def __instancecheck__(self, instance): + # Since we extend ABC.__subclasscheck__ and + # ABC.__instancecheck__ inlines the cache checking done by the + # latter, we must extend __instancecheck__ too. For simplicity + # we just skip the cache check -- instance checks for generic + # classes are supposed to be rare anyways. + return issubclass(instance.__class__, self) + + def __copy__(self): + return self.__class__(self.__name__, self.__bases__, + _no_slots_copy(self.__dict__), + self.__parameters__, self.__args__, self.__origin__, + self.__extra__, self.__orig_bases__) + + def __setattr__(self, attr, value): + # We consider all the subscripted genrics as proxies for original class + if ( + attr.startswith('__') and attr.endswith('__') or + attr.startswith('_abc_') + ): + super(GenericMeta, self).__setattr__(attr, value) + else: + super(GenericMeta, _gorg(self)).__setattr__(attr, value) + + +# Prevent checks for Generic to crash when defining Generic. +Generic = None + + +def _generic_new(base_cls, cls, *args, **kwds): + # Assure type is erased on instantiation, + # but attempt to store it in __orig_class__ + if cls.__origin__ is None: + return base_cls.__new__(cls) + else: + origin = _gorg(cls) + obj = base_cls.__new__(origin) + try: + obj.__orig_class__ = cls + except AttributeError: + pass + obj.__init__(*args, **kwds) + return obj + + +class Generic(metaclass=GenericMeta): + """Abstract base class for generic types. + + A generic type is typically declared by inheriting from + this class parameterized with one or more type variables. + For example, a generic mapping type might be defined as:: + + class Mapping(Generic[KT, VT]): + def __getitem__(self, key: KT) -> VT: + ... + # Etc. + + This class can then be used as follows:: + + def lookup_name(mapping: Mapping[KT, VT], key: KT, default: VT) -> VT: + try: + return mapping[key] + except KeyError: + return default + """ + + __slots__ = () + + def __new__(cls, *args, **kwds): + if _geqv(cls, Generic): + raise TypeError("Type Generic cannot be instantiated; " + "it can be used only as a base class") + return _generic_new(cls.__next_in_mro__, cls, *args, **kwds) + + +class _TypingEmpty: + """Internal placeholder for () or []. Used by TupleMeta and CallableMeta + to allow empty list/tuple in specific places, without allowing them + to sneak in where prohibited. + """ + + +class _TypingEllipsis: + """Internal placeholder for ... (ellipsis).""" + + +class TupleMeta(GenericMeta): + """Metaclass for Tuple (internal).""" + + @_tp_cache + def __getitem__(self, parameters): + if self.__origin__ is not None or not _geqv(self, Tuple): + # Normal generic rules apply if this is not the first subscription + # or a subscription of a subclass. + return super().__getitem__(parameters) + if parameters == (): + return super().__getitem__((_TypingEmpty,)) + if not isinstance(parameters, tuple): + parameters = (parameters,) + if len(parameters) == 2 and parameters[1] is ...: + msg = "Tuple[t, ...]: t must be a type." + p = _type_check(parameters[0], msg) + return super().__getitem__((p, _TypingEllipsis)) + msg = "Tuple[t0, t1, ...]: each t must be a type." + parameters = tuple(_type_check(p, msg) for p in parameters) + return super().__getitem__(parameters) + + def __instancecheck__(self, obj): + if self.__args__ is None: + return isinstance(obj, tuple) + raise TypeError("Parameterized Tuple cannot be used " + "with isinstance().") + + def __subclasscheck__(self, cls): + if self.__args__ is None: + return issubclass(cls, tuple) + raise TypeError("Parameterized Tuple cannot be used " + "with issubclass().") + + +class Tuple(tuple, extra=tuple, metaclass=TupleMeta): + """Tuple type; Tuple[X, Y] is the cross-product type of X and Y. + + Example: Tuple[T1, T2] is a tuple of two elements corresponding + to type variables T1 and T2. Tuple[int, float, str] is a tuple + of an int, a float and a string. + + To specify a variable-length tuple of homogeneous type, use Tuple[T, ...]. + """ + + __slots__ = () + + def __new__(cls, *args, **kwds): + if _geqv(cls, Tuple): + raise TypeError("Type Tuple cannot be instantiated; " + "use tuple() instead") + return _generic_new(tuple, cls, *args, **kwds) + + +class CallableMeta(GenericMeta): + """Metaclass for Callable (internal).""" + + def __repr__(self): + if self.__origin__ is None: + return super().__repr__() + return self._tree_repr(self._subs_tree()) + + def _tree_repr(self, tree): + if _gorg(self) is not Callable: + return super()._tree_repr(tree) + # For actual Callable (not its subclass) we override + # super()._tree_repr() for nice formatting. + arg_list = [] + for arg in tree[1:]: + if not isinstance(arg, tuple): + arg_list.append(_type_repr(arg)) + else: + arg_list.append(arg[0]._tree_repr(arg)) + if arg_list[0] == '...': + return repr(tree[0]) + '[..., %s]' % arg_list[1] + return (repr(tree[0]) + + '[[%s], %s]' % (', '.join(arg_list[:-1]), arg_list[-1])) + + def __getitem__(self, parameters): + """A thin wrapper around __getitem_inner__ to provide the latter + with hashable arguments to improve speed. + """ + + if self.__origin__ is not None or not _geqv(self, Callable): + return super().__getitem__(parameters) + if not isinstance(parameters, tuple) or len(parameters) != 2: + raise TypeError("Callable must be used as " + "Callable[[arg, ...], result].") + args, result = parameters + if args is Ellipsis: + parameters = (Ellipsis, result) + else: + if not isinstance(args, list): + raise TypeError("Callable[args, result]: args must be a list." + " Got %.100r." % (args,)) + parameters = (tuple(args), result) + return self.__getitem_inner__(parameters) + + @_tp_cache + def __getitem_inner__(self, parameters): + args, result = parameters + msg = "Callable[args, result]: result must be a type." + result = _type_check(result, msg) + if args is Ellipsis: + return super().__getitem__((_TypingEllipsis, result)) + msg = "Callable[[arg, ...], result]: each arg must be a type." + args = tuple(_type_check(arg, msg) for arg in args) + parameters = args + (result,) + return super().__getitem__(parameters) + + +class Callable(extra=collections_abc.Callable, metaclass=CallableMeta): + """Callable type; Callable[[int], str] is a function of (int) -> str. + + The subscription syntax must always be used with exactly two + values: the argument list and the return type. The argument list + must be a list of types or ellipsis; the return type must be a single type. + + There is no syntax to indicate optional or keyword arguments, + such function types are rarely used as callback types. + """ + + __slots__ = () + + def __new__(cls, *args, **kwds): + if _geqv(cls, Callable): + raise TypeError("Type Callable cannot be instantiated; " + "use a non-abstract subclass instead") + return _generic_new(cls.__next_in_mro__, cls, *args, **kwds) + + +class _ClassVar(_FinalTypingBase, _root=True): + """Special type construct to mark class variables. + + An annotation wrapped in ClassVar indicates that a given + attribute is intended to be used as a class variable and + should not be set on instances of that class. Usage:: + + class Starship: + stats: ClassVar[Dict[str, int]] = {} # class variable + damage: int = 10 # instance variable + + ClassVar accepts only types and cannot be further subscribed. + + Note that ClassVar is not a class itself, and should not + be used with isinstance() or issubclass(). + """ + + __slots__ = ('__type__',) + + def __init__(self, tp=None, **kwds): + self.__type__ = tp + + def __getitem__(self, item): + cls = type(self) + if self.__type__ is None: + return cls(_type_check(item, + '{} accepts only single type.'.format(cls.__name__[1:])), + _root=True) + raise TypeError('{} cannot be further subscripted' + .format(cls.__name__[1:])) + + def _eval_type(self, globalns, localns): + new_tp = _eval_type(self.__type__, globalns, localns) + if new_tp == self.__type__: + return self + return type(self)(new_tp, _root=True) + + def __repr__(self): + r = super().__repr__() + if self.__type__ is not None: + r += '[{}]'.format(_type_repr(self.__type__)) + return r + + def __hash__(self): + return hash((type(self).__name__, self.__type__)) + + def __eq__(self, other): + if not isinstance(other, _ClassVar): + return NotImplemented + if self.__type__ is not None: + return self.__type__ == other.__type__ + return self is other + + +ClassVar = _ClassVar(_root=True) + + +def cast(typ, val): + """Cast a value to a type. + + This returns the value unchanged. To the type checker this + signals that the return value has the designated type, but at + runtime we intentionally don't check anything (we want this + to be as fast as possible). + """ + return val + + +def _get_defaults(func): + """Internal helper to extract the default arguments, by name.""" + try: + code = func.__code__ + except AttributeError: + # Some built-in functions don't have __code__, __defaults__, etc. + return {} + pos_count = code.co_argcount + arg_names = code.co_varnames + arg_names = arg_names[:pos_count] + defaults = func.__defaults__ or () + kwdefaults = func.__kwdefaults__ + res = dict(kwdefaults) if kwdefaults else {} + pos_offset = pos_count - len(defaults) + for name, value in zip(arg_names[pos_offset:], defaults): + assert name not in res + res[name] = value + return res + + +_allowed_types = (types.FunctionType, types.BuiltinFunctionType, + types.MethodType, types.ModuleType, + SlotWrapperType, MethodWrapperType, MethodDescriptorType) + + +def get_type_hints(obj, globalns=None, localns=None): + """Return type hints for an object. + + This is often the same as obj.__annotations__, but it handles + forward references encoded as string literals, and if necessary + adds Optional[t] if a default value equal to None is set. + + The argument may be a module, class, method, or function. The annotations + are returned as a dictionary. For classes, annotations include also + inherited members. + + TypeError is raised if the argument is not of a type that can contain + annotations, and an empty dictionary is returned if no annotations are + present. + + BEWARE -- the behavior of globalns and localns is counterintuitive + (unless you are familiar with how eval() and exec() work). The + search order is locals first, then globals. + + - If no dict arguments are passed, an attempt is made to use the + globals from obj, and these are also used as the locals. If the + object does not appear to have globals, an exception is raised. + + - If one dict argument is passed, it is used for both globals and + locals. + + - If two dict arguments are passed, they specify globals and + locals, respectively. + """ + + if getattr(obj, '__no_type_check__', None): + return {} + if globalns is None: + globalns = getattr(obj, '__globals__', {}) + if localns is None: + localns = globalns + elif localns is None: + localns = globalns + # Classes require a special treatment. + if isinstance(obj, type): + hints = {} + for base in reversed(obj.__mro__): + ann = base.__dict__.get('__annotations__', {}) + for name, value in ann.items(): + if value is None: + value = type(None) + if isinstance(value, str): + value = _ForwardRef(value) + value = _eval_type(value, globalns, localns) + hints[name] = value + return hints + hints = getattr(obj, '__annotations__', None) + if hints is None: + # Return empty annotations for something that _could_ have them. + if isinstance(obj, _allowed_types): + return {} + else: + raise TypeError('{!r} is not a module, class, method, ' + 'or function.'.format(obj)) + defaults = _get_defaults(obj) + hints = dict(hints) + for name, value in hints.items(): + if value is None: + value = type(None) + if isinstance(value, str): + value = _ForwardRef(value) + value = _eval_type(value, globalns, localns) + if name in defaults and defaults[name] is None: + value = Optional[value] + hints[name] = value + return hints + + +def no_type_check(arg): + """Decorator to indicate that annotations are not type hints. + + The argument must be a class or function; if it is a class, it + applies recursively to all methods and classes defined in that class + (but not to methods defined in its superclasses or subclasses). + + This mutates the function(s) or class(es) in place. + """ + if isinstance(arg, type): + arg_attrs = arg.__dict__.copy() + for attr, val in arg.__dict__.items(): + if val in arg.__bases__: + arg_attrs.pop(attr) + for obj in arg_attrs.values(): + if isinstance(obj, types.FunctionType): + obj.__no_type_check__ = True + if isinstance(obj, type): + no_type_check(obj) + try: + arg.__no_type_check__ = True + except TypeError: # built-in classes + pass + return arg + + +def no_type_check_decorator(decorator): + """Decorator to give another decorator the @no_type_check effect. + + This wraps the decorator with something that wraps the decorated + function in @no_type_check. + """ + + @functools.wraps(decorator) + def wrapped_decorator(*args, **kwds): + func = decorator(*args, **kwds) + func = no_type_check(func) + return func + + return wrapped_decorator + + +def _overload_dummy(*args, **kwds): + """Helper for @overload to raise when called.""" + raise NotImplementedError( + "You should not call an overloaded function. " + "A series of @overload-decorated functions " + "outside a stub module should always be followed " + "by an implementation that is not @overload-ed.") + + +def overload(func): + """Decorator for overloaded functions/methods. + + In a stub file, place two or more stub definitions for the same + function in a row, each decorated with @overload. For example: + + @overload + def utf8(value: None) -> None: ... + @overload + def utf8(value: bytes) -> bytes: ... + @overload + def utf8(value: str) -> bytes: ... + + In a non-stub file (i.e. a regular .py file), do the same but + follow it with an implementation. The implementation should *not* + be decorated with @overload. For example: + + @overload + def utf8(value: None) -> None: ... + @overload + def utf8(value: bytes) -> bytes: ... + @overload + def utf8(value: str) -> bytes: ... + def utf8(value): + # implementation goes here + """ + return _overload_dummy + + +class _ProtocolMeta(GenericMeta): + """Internal metaclass for _Protocol. + + This exists so _Protocol classes can be generic without deriving + from Generic. + """ + + def __instancecheck__(self, obj): + if _Protocol not in self.__bases__: + return super().__instancecheck__(obj) + raise TypeError("Protocols cannot be used with isinstance().") + + def __subclasscheck__(self, cls): + if not self._is_protocol: + # No structural checks since this isn't a protocol. + return NotImplemented + + if self is _Protocol: + # Every class is a subclass of the empty protocol. + return True + + # Find all attributes defined in the protocol. + attrs = self._get_protocol_attrs() + + for attr in attrs: + if not any(attr in d.__dict__ for d in cls.__mro__): + return False + return True + + def _get_protocol_attrs(self): + # Get all Protocol base classes. + protocol_bases = [] + for c in self.__mro__: + if getattr(c, '_is_protocol', False) and c.__name__ != '_Protocol': + protocol_bases.append(c) + + # Get attributes included in protocol. + attrs = set() + for base in protocol_bases: + for attr in base.__dict__.keys(): + # Include attributes not defined in any non-protocol bases. + for c in self.__mro__: + if (c is not base and attr in c.__dict__ and + not getattr(c, '_is_protocol', False)): + break + else: + if (not attr.startswith('_abc_') and + attr != '__abstractmethods__' and + attr != '__annotations__' and + attr != '__weakref__' and + attr != '_is_protocol' and + attr != '__dict__' and + attr != '__args__' and + attr != '__slots__' and + attr != '_get_protocol_attrs' and + attr != '__next_in_mro__' and + attr != '__parameters__' and + attr != '__origin__' and + attr != '__orig_bases__' and + attr != '__extra__' and + attr != '__tree_hash__' and + attr != '__module__'): + attrs.add(attr) + + return attrs + + +class _Protocol(metaclass=_ProtocolMeta): + """Internal base class for protocol classes. + + This implements a simple-minded structural issubclass check + (similar but more general than the one-offs in collections.abc + such as Hashable). + """ + + __slots__ = () + + _is_protocol = True + + +# Various ABCs mimicking those in collections.abc. +# A few are simply re-exported for completeness. + +Hashable = collections_abc.Hashable # Not generic. + + +if hasattr(collections_abc, 'Awaitable'): + class Awaitable(Generic[T_co], extra=collections_abc.Awaitable): + __slots__ = () + + __all__.append('Awaitable') + + +if hasattr(collections_abc, 'Coroutine'): + class Coroutine(Awaitable[V_co], Generic[T_co, T_contra, V_co], + extra=collections_abc.Coroutine): + __slots__ = () + + __all__.append('Coroutine') + + +if hasattr(collections_abc, 'AsyncIterable'): + + class AsyncIterable(Generic[T_co], extra=collections_abc.AsyncIterable): + __slots__ = () + + class AsyncIterator(AsyncIterable[T_co], + extra=collections_abc.AsyncIterator): + __slots__ = () + + __all__.append('AsyncIterable') + __all__.append('AsyncIterator') + + +class Iterable(Generic[T_co], extra=collections_abc.Iterable): + __slots__ = () + + +class Iterator(Iterable[T_co], extra=collections_abc.Iterator): + __slots__ = () + + +class SupportsInt(_Protocol): + __slots__ = () + + @abstractmethod + def __int__(self) -> int: + pass + + +class SupportsFloat(_Protocol): + __slots__ = () + + @abstractmethod + def __float__(self) -> float: + pass + + +class SupportsComplex(_Protocol): + __slots__ = () + + @abstractmethod + def __complex__(self) -> complex: + pass + + +class SupportsBytes(_Protocol): + __slots__ = () + + @abstractmethod + def __bytes__(self) -> bytes: + pass + + +class SupportsAbs(_Protocol[T_co]): + __slots__ = () + + @abstractmethod + def __abs__(self) -> T_co: + pass + + +class SupportsRound(_Protocol[T_co]): + __slots__ = () + + @abstractmethod + def __round__(self, ndigits: int = 0) -> T_co: + pass + + +if hasattr(collections_abc, 'Reversible'): + class Reversible(Iterable[T_co], extra=collections_abc.Reversible): + __slots__ = () +else: + class Reversible(_Protocol[T_co]): + __slots__ = () + + @abstractmethod + def __reversed__(self) -> 'Iterator[T_co]': + pass + + +Sized = collections_abc.Sized # Not generic. + + +class Container(Generic[T_co], extra=collections_abc.Container): + __slots__ = () + + +if hasattr(collections_abc, 'Collection'): + class Collection(Sized, Iterable[T_co], Container[T_co], + extra=collections_abc.Collection): + __slots__ = () + + __all__.append('Collection') + + +# Callable was defined earlier. + +if hasattr(collections_abc, 'Collection'): + class AbstractSet(Collection[T_co], + extra=collections_abc.Set): + __slots__ = () +else: + class AbstractSet(Sized, Iterable[T_co], Container[T_co], + extra=collections_abc.Set): + __slots__ = () + + +class MutableSet(AbstractSet[T], extra=collections_abc.MutableSet): + __slots__ = () + + +# NOTE: It is only covariant in the value type. +if hasattr(collections_abc, 'Collection'): + class Mapping(Collection[KT], Generic[KT, VT_co], + extra=collections_abc.Mapping): + __slots__ = () +else: + class Mapping(Sized, Iterable[KT], Container[KT], Generic[KT, VT_co], + extra=collections_abc.Mapping): + __slots__ = () + + +class MutableMapping(Mapping[KT, VT], extra=collections_abc.MutableMapping): + __slots__ = () + + +if hasattr(collections_abc, 'Reversible'): + if hasattr(collections_abc, 'Collection'): + class Sequence(Reversible[T_co], Collection[T_co], + extra=collections_abc.Sequence): + __slots__ = () + else: + class Sequence(Sized, Reversible[T_co], Container[T_co], + extra=collections_abc.Sequence): + __slots__ = () +else: + class Sequence(Sized, Iterable[T_co], Container[T_co], + extra=collections_abc.Sequence): + __slots__ = () + + +class MutableSequence(Sequence[T], extra=collections_abc.MutableSequence): + __slots__ = () + + +class ByteString(Sequence[int], extra=collections_abc.ByteString): + __slots__ = () + + +class List(list, MutableSequence[T], extra=list): + + __slots__ = () + + def __new__(cls, *args, **kwds): + if _geqv(cls, List): + raise TypeError("Type List cannot be instantiated; " + "use list() instead") + return _generic_new(list, cls, *args, **kwds) + + +class Deque(collections.deque, MutableSequence[T], extra=collections.deque): + + __slots__ = () + + def __new__(cls, *args, **kwds): + if _geqv(cls, Deque): + return collections.deque(*args, **kwds) + return _generic_new(collections.deque, cls, *args, **kwds) + + +class Set(set, MutableSet[T], extra=set): + + __slots__ = () + + def __new__(cls, *args, **kwds): + if _geqv(cls, Set): + raise TypeError("Type Set cannot be instantiated; " + "use set() instead") + return _generic_new(set, cls, *args, **kwds) + + +class FrozenSet(frozenset, AbstractSet[T_co], extra=frozenset): + __slots__ = () + + def __new__(cls, *args, **kwds): + if _geqv(cls, FrozenSet): + raise TypeError("Type FrozenSet cannot be instantiated; " + "use frozenset() instead") + return _generic_new(frozenset, cls, *args, **kwds) + + +class MappingView(Sized, Iterable[T_co], extra=collections_abc.MappingView): + __slots__ = () + + +class KeysView(MappingView[KT], AbstractSet[KT], + extra=collections_abc.KeysView): + __slots__ = () + + +class ItemsView(MappingView[Tuple[KT, VT_co]], + AbstractSet[Tuple[KT, VT_co]], + Generic[KT, VT_co], + extra=collections_abc.ItemsView): + __slots__ = () + + +class ValuesView(MappingView[VT_co], extra=collections_abc.ValuesView): + __slots__ = () + + +if hasattr(contextlib, 'AbstractContextManager'): + class ContextManager(Generic[T_co], extra=contextlib.AbstractContextManager): + __slots__ = () + __all__.append('ContextManager') + + +class Dict(dict, MutableMapping[KT, VT], extra=dict): + + __slots__ = () + + def __new__(cls, *args, **kwds): + if _geqv(cls, Dict): + raise TypeError("Type Dict cannot be instantiated; " + "use dict() instead") + return _generic_new(dict, cls, *args, **kwds) + + +class DefaultDict(collections.defaultdict, MutableMapping[KT, VT], + extra=collections.defaultdict): + + __slots__ = () + + def __new__(cls, *args, **kwds): + if _geqv(cls, DefaultDict): + return collections.defaultdict(*args, **kwds) + return _generic_new(collections.defaultdict, cls, *args, **kwds) + + +class Counter(collections.Counter, Dict[T, int], extra=collections.Counter): + + __slots__ = () + + def __new__(cls, *args, **kwds): + if _geqv(cls, Counter): + return collections.Counter(*args, **kwds) + return _generic_new(collections.Counter, cls, *args, **kwds) + + +if hasattr(collections, 'ChainMap'): + # ChainMap only exists in 3.3+ + __all__.append('ChainMap') + + class ChainMap(collections.ChainMap, MutableMapping[KT, VT], + extra=collections.ChainMap): + + __slots__ = () + + def __new__(cls, *args, **kwds): + if _geqv(cls, ChainMap): + return collections.ChainMap(*args, **kwds) + return _generic_new(collections.ChainMap, cls, *args, **kwds) + + +# Determine what base class to use for Generator. +if hasattr(collections_abc, 'Generator'): + # Sufficiently recent versions of 3.5 have a Generator ABC. + _G_base = collections_abc.Generator +else: + # Fall back on the exact type. + _G_base = types.GeneratorType + + +class Generator(Iterator[T_co], Generic[T_co, T_contra, V_co], + extra=_G_base): + __slots__ = () + + def __new__(cls, *args, **kwds): + if _geqv(cls, Generator): + raise TypeError("Type Generator cannot be instantiated; " + "create a subclass instead") + return _generic_new(_G_base, cls, *args, **kwds) + + +if hasattr(collections_abc, 'AsyncGenerator'): + class AsyncGenerator(AsyncIterator[T_co], Generic[T_co, T_contra], + extra=collections_abc.AsyncGenerator): + __slots__ = () + + __all__.append('AsyncGenerator') + + +# Internal type variable used for Type[]. +CT_co = TypeVar('CT_co', covariant=True, bound=type) + + +# This is not a real generic class. Don't use outside annotations. +class Type(Generic[CT_co], extra=type): + """A special construct usable to annotate class objects. + + For example, suppose we have the following classes:: + + class User: ... # Abstract base for User classes + class BasicUser(User): ... + class ProUser(User): ... + class TeamUser(User): ... + + And a function that takes a class argument that's a subclass of + User and returns an instance of the corresponding class:: + + U = TypeVar('U', bound=User) + def new_user(user_class: Type[U]) -> U: + user = user_class() + # (Here we could write the user object to a database) + return user + + joe = new_user(BasicUser) + + At this point the type checker knows that joe has type BasicUser. + """ + + __slots__ = () + + +def _make_nmtuple(name, types): + msg = "NamedTuple('Name', [(f0, t0), (f1, t1), ...]); each t must be a type" + types = [(n, _type_check(t, msg)) for n, t in types] + nm_tpl = collections.namedtuple(name, [n for n, t in types]) + # Prior to PEP 526, only _field_types attribute was assigned. + # Now, both __annotations__ and _field_types are used to maintain compatibility. + nm_tpl.__annotations__ = nm_tpl._field_types = collections.OrderedDict(types) + try: + nm_tpl.__module__ = sys._getframe(2).f_globals.get('__name__', '__main__') + except (AttributeError, ValueError): + pass + return nm_tpl + + +_PY36 = sys.version_info[:2] >= (3, 6) + +# attributes prohibited to set in NamedTuple class syntax +_prohibited = ('__new__', '__init__', '__slots__', '__getnewargs__', + '_fields', '_field_defaults', '_field_types', + '_make', '_replace', '_asdict') + +_special = ('__module__', '__name__', '__qualname__', '__annotations__') + + +class NamedTupleMeta(type): + + def __new__(cls, typename, bases, ns): + if ns.get('_root', False): + return super().__new__(cls, typename, bases, ns) + if not _PY36: + raise TypeError("Class syntax for NamedTuple is only supported" + " in Python 3.6+") + types = ns.get('__annotations__', {}) + nm_tpl = _make_nmtuple(typename, types.items()) + defaults = [] + defaults_dict = {} + for field_name in types: + if field_name in ns: + default_value = ns[field_name] + defaults.append(default_value) + defaults_dict[field_name] = default_value + elif defaults: + raise TypeError("Non-default namedtuple field {field_name} cannot " + "follow default field(s) {default_names}" + .format(field_name=field_name, + default_names=', '.join(defaults_dict.keys()))) + nm_tpl.__new__.__defaults__ = tuple(defaults) + nm_tpl._field_defaults = defaults_dict + # update from user namespace without overriding special namedtuple attributes + for key in ns: + if key in _prohibited: + raise AttributeError("Cannot overwrite NamedTuple attribute " + key) + elif key not in _special and key not in nm_tpl._fields: + setattr(nm_tpl, key, ns[key]) + return nm_tpl + + +class NamedTuple(metaclass=NamedTupleMeta): + """Typed version of namedtuple. + + Usage in Python versions >= 3.6:: + + class Employee(NamedTuple): + name: str + id: int + + This is equivalent to:: + + Employee = collections.namedtuple('Employee', ['name', 'id']) + + The resulting class has extra __annotations__ and _field_types + attributes, giving an ordered dict mapping field names to types. + __annotations__ should be preferred, while _field_types + is kept to maintain pre PEP 526 compatibility. (The field names + are in the _fields attribute, which is part of the namedtuple + API.) Alternative equivalent keyword syntax is also accepted:: + + Employee = NamedTuple('Employee', name=str, id=int) + + In Python versions <= 3.5 use:: + + Employee = NamedTuple('Employee', [('name', str), ('id', int)]) + """ + _root = True + + def __new__(self, typename, fields=None, **kwargs): + if kwargs and not _PY36: + raise TypeError("Keyword syntax for NamedTuple is only supported" + " in Python 3.6+") + if fields is None: + fields = kwargs.items() + elif kwargs: + raise TypeError("Either list of fields or keywords" + " can be provided to NamedTuple, not both") + return _make_nmtuple(typename, fields) + + +def NewType(name, tp): + """NewType creates simple unique types with almost zero + runtime overhead. NewType(name, tp) is considered a subtype of tp + by static type checkers. At runtime, NewType(name, tp) returns + a dummy function that simply returns its argument. Usage:: + + UserId = NewType('UserId', int) + + def name_by_id(user_id: UserId) -> str: + ... + + UserId('user') # Fails type check + + name_by_id(42) # Fails type check + name_by_id(UserId(42)) # OK + + num = UserId(5) + 1 # type: int + """ + + def new_type(x): + return x + + new_type.__name__ = name + new_type.__supertype__ = tp + return new_type + + +# Python-version-specific alias (Python 2: unicode; Python 3: str) +Text = str + + +# Constant that's True when type checking, but False here. +TYPE_CHECKING = False + + +class IO(Generic[AnyStr]): + """Generic base class for TextIO and BinaryIO. + + This is an abstract, generic version of the return of open(). + + NOTE: This does not distinguish between the different possible + classes (text vs. binary, read vs. write vs. read/write, + append-only, unbuffered). The TextIO and BinaryIO subclasses + below capture the distinctions between text vs. binary, which is + pervasive in the interface; however we currently do not offer a + way to track the other distinctions in the type system. + """ + + __slots__ = () + + @abstractproperty + def mode(self) -> str: + pass + + @abstractproperty + def name(self) -> str: + pass + + @abstractmethod + def close(self) -> None: + pass + + @abstractmethod + def closed(self) -> bool: + pass + + @abstractmethod + def fileno(self) -> int: + pass + + @abstractmethod + def flush(self) -> None: + pass + + @abstractmethod + def isatty(self) -> bool: + pass + + @abstractmethod + def read(self, n: int = -1) -> AnyStr: + pass + + @abstractmethod + def readable(self) -> bool: + pass + + @abstractmethod + def readline(self, limit: int = -1) -> AnyStr: + pass + + @abstractmethod + def readlines(self, hint: int = -1) -> List[AnyStr]: + pass + + @abstractmethod + def seek(self, offset: int, whence: int = 0) -> int: + pass + + @abstractmethod + def seekable(self) -> bool: + pass + + @abstractmethod + def tell(self) -> int: + pass + + @abstractmethod + def truncate(self, size: int = None) -> int: + pass + + @abstractmethod + def writable(self) -> bool: + pass + + @abstractmethod + def write(self, s: AnyStr) -> int: + pass + + @abstractmethod + def writelines(self, lines: List[AnyStr]) -> None: + pass + + @abstractmethod + def __enter__(self) -> 'IO[AnyStr]': + pass + + @abstractmethod + def __exit__(self, type, value, traceback) -> None: + pass + + +class BinaryIO(IO[bytes]): + """Typed version of the return of open() in binary mode.""" + + __slots__ = () + + @abstractmethod + def write(self, s: Union[bytes, bytearray]) -> int: + pass + + @abstractmethod + def __enter__(self) -> 'BinaryIO': + pass + + +class TextIO(IO[str]): + """Typed version of the return of open() in text mode.""" + + __slots__ = () + + @abstractproperty + def buffer(self) -> BinaryIO: + pass + + @abstractproperty + def encoding(self) -> str: + pass + + @abstractproperty + def errors(self) -> Optional[str]: + pass + + @abstractproperty + def line_buffering(self) -> bool: + pass + + @abstractproperty + def newlines(self) -> Any: + pass + + @abstractmethod + def __enter__(self) -> 'TextIO': + pass + + +class io: + """Wrapper namespace for IO generic classes.""" + + __all__ = ['IO', 'TextIO', 'BinaryIO'] + IO = IO + TextIO = TextIO + BinaryIO = BinaryIO + + +io.__name__ = __name__ + '.io' +sys.modules[io.__name__] = io + + +Pattern = _TypeAlias('Pattern', AnyStr, type(stdlib_re.compile('')), + lambda p: p.pattern) +Match = _TypeAlias('Match', AnyStr, type(stdlib_re.match('', '')), + lambda m: m.re.pattern) + + +class re: + """Wrapper namespace for re type aliases.""" + + __all__ = ['Pattern', 'Match'] + Pattern = Pattern + Match = Match + + +re.__name__ = __name__ + '.re' +sys.modules[re.__name__] = re diff --git a/mypy/__main__.py b/mypy/__main__.py index 625242d100be..0a6f79261a53 100644 --- a/mypy/__main__.py +++ b/mypy/__main__.py @@ -2,10 +2,4 @@ from mypy.main import main - -def console_entry() -> None: - main(None) - - -if __name__ == '__main__': - main(None) +main(None) diff --git a/mypy/checker.py b/mypy/checker.py index fcd334edbcce..0a08aca51cb4 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -616,8 +616,7 @@ def is_implicit_any(t: Type) -> bool: self.check_reverse_op_method(item, typ, name) elif name in ('__getattr__', '__getattribute__'): self.check_getattr_method(typ, defn) - elif name == '__setattr__': - self.check_setattr_method(typ, defn) + # Refuse contravariant return type variable if isinstance(typ.ret_type, TypeVarType): if typ.ret_type.variance == CONTRAVARIANT: @@ -917,15 +916,6 @@ def check_getattr_method(self, typ: CallableType, context: Context) -> None: if not is_subtype(typ, method_type): self.msg.invalid_signature(typ, context) - def check_setattr_method(self, typ: CallableType, context: Context) -> None: - method_type = CallableType([AnyType(), self.named_type('builtins.str'), AnyType()], - [nodes.ARG_POS, nodes.ARG_POS, nodes.ARG_POS], - [None, None, None], - NoneTyp(), - self.named_type('builtins.function')) - if not is_subtype(typ, method_type): - self.msg.invalid_signature(typ, context) - def expand_typevars(self, defn: FuncItem, typ: CallableType) -> List[Tuple[FuncItem, CallableType]]: # TODO use generator @@ -1874,8 +1864,7 @@ def check_return_stmt(self, s: ReturnStmt) -> None: if isinstance(typ, AnyType): # (Unless you asked to be warned in that case, and the # function is not declared to return Any) - if (self.options.warn_return_any and - not is_proper_subtype(AnyType(), return_type)): + if not isinstance(return_type, AnyType) and self.options.warn_return_any: self.warn(messages.RETURN_ANY.format(return_type), s) return diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index 074438e53761..016668b46105 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -45,7 +45,6 @@ from mypy.typevars import fill_typevars from mypy.visitor import ExpressionVisitor from mypy.funcplugins import get_function_plugin_callbacks, PluginCallback -from mypy.typeanal import make_optional_type from mypy import experiments @@ -154,13 +153,7 @@ def analyze_ref_expr(self, e: RefExpr, lvalue: bool = False) -> Type: result = type_object_type(node, self.named_type) elif isinstance(node, MypyFile): # Reference to a module object. - try: - result = self.named_type('types.ModuleType') - except KeyError: - # In test cases might 'types' may not be available. - # Fall back to a dummy 'object' type instead to - # avoid a crash. - result = self.named_type('builtins.object') + result = self.named_type('types.ModuleType') elif isinstance(node, Decorator): result = self.analyze_var_ref(node.var, e) else: @@ -1935,11 +1928,10 @@ def analyze_super(self, e: SuperExpr, is_lvalue: bool) -> Type: return AnyType() def visit_slice_expr(self, e: SliceExpr) -> Type: - expected = make_optional_type(self.named_type('builtins.int')) for index in [e.begin_index, e.end_index, e.stride]: if index: t = self.accept(index) - self.chk.check_subtype(t, expected, + self.chk.check_subtype(t, self.named_type('builtins.int'), index, messages.INVALID_SLICE_INDEX) return self.named_type('builtins.slice') diff --git a/mypy/checkmember.py b/mypy/checkmember.py index 4525b1446bda..d4dca6b6441d 100644 --- a/mypy/checkmember.py +++ b/mypy/checkmember.py @@ -245,15 +245,6 @@ def analyze_member_var_access(name: str, itype: Instance, info: TypeInfo, getattr_type = expand_type_by_instance(bound_method, typ) if isinstance(getattr_type, CallableType): return getattr_type.ret_type - else: - setattr_meth = info.get_method('__setattr__') - if setattr_meth and setattr_meth.info.fullname() != 'builtins.object': - setattr_func = function_type(setattr_meth, builtin_type('builtins.function')) - bound_type = bind_self(setattr_func, original_type) - typ = map_instance_to_supertype(itype, setattr_meth.info) - setattr_type = expand_type_by_instance(bound_type, typ) - if isinstance(setattr_type, CallableType) and len(setattr_type.arg_types) > 0: - return setattr_type.arg_types[-1] if itype.type.fallback_to_any: return AnyType() diff --git a/mypy/fastparse.py b/mypy/fastparse.py index 10ad642dcdf0..bbf20c14be16 100644 --- a/mypy/fastparse.py +++ b/mypy/fastparse.py @@ -30,7 +30,6 @@ from mypy import experiments from mypy import messages from mypy.errors import Errors -from mypy.options import Options try: from typed_ast import ast3 @@ -61,12 +60,14 @@ def parse(source: Union[str, bytes], fnam: str = None, errors: Errors = None, - options: Options = Options()) -> MypyFile: - + pyversion: Tuple[int, int] = defaults.PYTHON3_VERSION, + custom_typing_module: str = None) -> MypyFile: """Parse a source file, without doing any semantic analysis. Return the parse tree. If errors is not provided, raise ParseError on failure. Otherwise, use the errors object to report parse errors. + + The pyversion (major, minor) argument determines the Python syntax variant. """ raise_on_error = False if errors is None: @@ -75,16 +76,14 @@ def parse(source: Union[str, bytes], fnam: str = None, errors: Errors = None, errors.set_file('' if fnam is None else fnam, None) is_stub_file = bool(fnam) and fnam.endswith('.pyi') try: - if is_stub_file: - feature_version = defaults.PYTHON3_VERSION[1] - else: - assert options.python_version[0] >= 3 - feature_version = options.python_version[1] + assert pyversion[0] >= 3 or is_stub_file + feature_version = pyversion[1] if not is_stub_file else defaults.PYTHON3_VERSION[1] ast = ast3.parse(source, fnam, 'exec', feature_version=feature_version) - tree = ASTConverter(options=options, + tree = ASTConverter(pyversion=pyversion, is_stub=is_stub_file, errors=errors, + custom_typing_module=custom_typing_module, ).visit(ast) tree.path = fnam tree.is_stub = is_stub_file @@ -139,15 +138,17 @@ def is_no_type_check_decorator(expr: ast3.expr) -> bool: class ASTConverter(ast3.NodeTransformer): # type: ignore # typeshed PR #931 def __init__(self, - options: Options, + pyversion: Tuple[int, int], is_stub: bool, - errors: Errors) -> None: + errors: Errors, + custom_typing_module: str = None) -> None: self.class_nesting = 0 self.imports = [] # type: List[ImportBase] - self.options = options + self.pyversion = pyversion self.is_stub = is_stub self.errors = errors + self.custom_typing_module = custom_typing_module def fail(self, msg: str, line: int, column: int) -> None: self.errors.report(line, column, msg) @@ -261,9 +262,9 @@ def translate_module_id(self, id: str) -> str: For example, translate '__builtin__' in Python 2 to 'builtins'. """ - if id == self.options.custom_typing_module: + if id == self.custom_typing_module: return 'typing' - elif id == '__builtin__' and self.options.python_version[0] == 2: + elif id == '__builtin__' and self.pyversion[0] == 2: # HACK: __builtin__ in Python 2 is aliases to builtins. However, the implementation # is named __builtin__.py (there is another layer of translation elsewhere). return 'builtins' @@ -390,7 +391,7 @@ def do_func_def(self, n: Union[ast3.FunctionDef, ast3.AsyncFunctionDef], return func_def def set_type_optional(self, type: Type, initializer: Expression) -> None: - if self.options.no_implicit_optional or not experiments.STRICT_OPTIONAL: + if not experiments.STRICT_OPTIONAL: return # Indicate that type should be wrapped in an Optional if arg is initialized to None. optional = isinstance(initializer, NameExpr) and initializer.name == 'None' @@ -845,56 +846,50 @@ def visit_Num(self, n: ast3.Num) -> Union[IntExpr, FloatExpr, ComplexExpr]: # Str(string s) @with_line def visit_Str(self, n: ast3.Str) -> Union[UnicodeExpr, StrExpr]: - # Hack: assume all string literals in Python 2 stubs are normal - # strs (i.e. not unicode). All stubs are parsed with the Python 3 - # parser, which causes unprefixed string literals to be interpreted - # as unicode instead of bytes. This hack is generally okay, - # because mypy considers str literals to be compatible with - # unicode. - return StrExpr(n.s) + if self.pyversion[0] >= 3 or self.is_stub: + # Hack: assume all string literals in Python 2 stubs are normal + # strs (i.e. not unicode). All stubs are parsed with the Python 3 + # parser, which causes unprefixed string literals to be interpreted + # as unicode instead of bytes. This hack is generally okay, + # because mypy considers str literals to be compatible with + # unicode. + return StrExpr(n.s) + else: + return UnicodeExpr(n.s) # Only available with typed_ast >= 0.6.2 if hasattr(ast3, 'JoinedStr'): # JoinedStr(expr* values) @with_line def visit_JoinedStr(self, n: ast3.JoinedStr) -> Expression: - # Each of n.values is a str or FormattedValue; we just concatenate - # them all using ''.join. - empty_string = StrExpr('') - empty_string.set_line(n.lineno, n.col_offset) - strs_to_join = ListExpr(self.translate_expr_list(n.values)) - strs_to_join.set_line(empty_string) - join_method = MemberExpr(empty_string, 'join') - join_method.set_line(empty_string) - result_expression = CallExpr(join_method, - [strs_to_join], - [ARG_POS]) - return result_expression - - # FormattedValue(expr value) - @with_line - def visit_FormattedValue(self, n: ast3.FormattedValue) -> Expression: - # A FormattedValue is a component of a JoinedStr, or it can exist - # on its own. We translate them to individual '{}'.format(value) - # calls -- we don't bother with the conversion/format_spec fields. - exp = self.visit(n.value) - exp.set_line(n.lineno, n.col_offset) - format_string = StrExpr('{}') + arg_count = len(n.values) + format_string = StrExpr('{}' * arg_count) format_string.set_line(n.lineno, n.col_offset) format_method = MemberExpr(format_string, 'format') format_method.set_line(format_string) + format_args = self.translate_expr_list(n.values) + format_arg_kinds = [ARG_POS] * arg_count result_expression = CallExpr(format_method, - [exp], - [ARG_POS]) + format_args, + format_arg_kinds) return result_expression + # FormattedValue(expr value) + @with_line + def visit_FormattedValue(self, n: ast3.FormattedValue) -> Expression: + return self.visit(n.value) + # Bytes(bytes s) @with_line def visit_Bytes(self, n: ast3.Bytes) -> Union[BytesExpr, StrExpr]: # The following line is a bit hacky, but is the best way to maintain # compatibility with how mypy currently parses the contents of bytes literals. contents = str(n.s)[2:-1] - return BytesExpr(contents) + + if self.pyversion[0] >= 3: + return BytesExpr(contents) + else: + return StrExpr(contents) # NameConstant(singleton value) def visit_NameConstant(self, n: ast3.NameConstant) -> NameExpr: diff --git a/mypy/fastparse2.py b/mypy/fastparse2.py index 109dfe407cf2..b7d5e9d400db 100644 --- a/mypy/fastparse2.py +++ b/mypy/fastparse2.py @@ -38,11 +38,11 @@ from mypy.types import ( Type, CallableType, AnyType, UnboundType, EllipsisType ) +from mypy import defaults from mypy import experiments from mypy import messages from mypy.errors import Errors from mypy.fastparse import TypeConverter, parse_type_comment -from mypy.options import Options try: from typed_ast import ast27 @@ -74,11 +74,14 @@ def parse(source: Union[str, bytes], fnam: str = None, errors: Errors = None, - options: Options = Options()) -> MypyFile: + pyversion: Tuple[int, int] = defaults.PYTHON3_VERSION, + custom_typing_module: str = None) -> MypyFile: """Parse a source file, without doing any semantic analysis. Return the parse tree. If errors is not provided, raise ParseError on failure. Otherwise, use the errors object to report parse errors. + + The pyversion (major, minor) argument determines the Python syntax variant. """ raise_on_error = False if errors is None: @@ -87,11 +90,12 @@ def parse(source: Union[str, bytes], fnam: str = None, errors: Errors = None, errors.set_file('' if fnam is None else fnam, None) is_stub_file = bool(fnam) and fnam.endswith('.pyi') try: - assert options.python_version[0] < 3 and not is_stub_file + assert pyversion[0] < 3 and not is_stub_file ast = ast27.parse(source, fnam, 'exec') - tree = ASTConverter(options=options, + tree = ASTConverter(pyversion=pyversion, is_stub=is_stub_file, errors=errors, + custom_typing_module=custom_typing_module, ).visit(ast) assert isinstance(tree, MypyFile) tree.path = fnam @@ -133,15 +137,17 @@ def is_no_type_check_decorator(expr: ast27.expr) -> bool: class ASTConverter(ast27.NodeTransformer): def __init__(self, - options: Options, + pyversion: Tuple[int, int], is_stub: bool, - errors: Errors) -> None: + errors: Errors, + custom_typing_module: str = None) -> None: self.class_nesting = 0 self.imports = [] # type: List[ImportBase] - self.options = options + self.pyversion = pyversion self.is_stub = is_stub self.errors = errors + self.custom_typing_module = custom_typing_module def fail(self, msg: str, line: int, column: int) -> None: self.errors.report(line, column, msg) @@ -256,9 +262,9 @@ def translate_module_id(self, id: str) -> str: For example, translate '__builtin__' in Python 2 to 'builtins'. """ - if id == self.options.custom_typing_module: + if id == self.custom_typing_module: return 'typing' - elif id == '__builtin__': + elif id == '__builtin__' and self.pyversion[0] == 2: # HACK: __builtin__ in Python 2 is aliases to builtins. However, the implementation # is named __builtin__.py (there is another layer of translation elsewhere). return 'builtins' @@ -364,7 +370,7 @@ def visit_FunctionDef(self, n: ast27.FunctionDef) -> Statement: return func_def def set_type_optional(self, type: Type, initializer: Expression) -> None: - if self.options.no_implicit_optional or not experiments.STRICT_OPTIONAL: + if not experiments.STRICT_OPTIONAL: return # Indicate that type should be wrapped in an Optional if arg is initialized to None. optional = isinstance(initializer, NameExpr) and initializer.name == 'None' @@ -864,9 +870,16 @@ def visit_Str(self, s: ast27.Str) -> Expression: # The following line is a bit hacky, but is the best way to maintain # compatibility with how mypy currently parses the contents of bytes literals. contents = str(n)[2:-1] - return StrExpr(contents) + + if self.pyversion[0] >= 3: + return BytesExpr(contents) + else: + return StrExpr(contents) else: - return UnicodeExpr(s.s) + if self.pyversion[0] >= 3 or self.is_stub: + return StrExpr(s.s) + else: + return UnicodeExpr(s.s) # Ellipsis def visit_Ellipsis(self, n: ast27.Ellipsis) -> EllipsisExpr: diff --git a/mypy/funcplugins.py b/mypy/funcplugins.py index b1113ab30ae9..7d5c25248e0a 100644 --- a/mypy/funcplugins.py +++ b/mypy/funcplugins.py @@ -44,7 +44,7 @@ def open_callback( named_generic_type: Callable[[str, List[Type]], Type]) -> Type: """Infer a better return type for 'open'. - Infer TextIO or BinaryIO as the return value if the mode argument is not + Infer IO[str] or IO[bytes] as the return value if the mode argument is not given or is a literal. """ mode = None @@ -55,9 +55,10 @@ def open_callback( if mode is not None: assert isinstance(inferred_return_type, Instance) if 'b' in mode: - return named_generic_type('typing.BinaryIO', []) + arg = named_generic_type('builtins.bytes', []) else: - return named_generic_type('typing.TextIO', []) + arg = named_generic_type('builtins.str', []) + return Instance(inferred_return_type.type, [arg]) return inferred_return_type diff --git a/mypy/main.py b/mypy/main.py index f73cbeb99972..a5511671c966 100644 --- a/mypy/main.py +++ b/mypy/main.py @@ -24,10 +24,6 @@ PY_EXTENSIONS = tuple(PYTHON_EXTENSIONS) -class InvalidPackageName(Exception): - """Exception indicating that a package name was invalid.""" - - def main(script_path: str, args: List[str] = None) -> None: """Main entry point to the type checker. @@ -251,8 +247,6 @@ def add_invertible_flag(flag: str, add_invertible_flag('--show-error-context', default=False, dest='show_error_context', help='Precede errors with "note:" messages explaining context') - add_invertible_flag('--no-implicit-optional', default=False, strict_flag=True, - help="don't assume arguments with default values of None are Optional") parser.add_argument('-i', '--incremental', action='store_true', help="enable module cache") parser.add_argument('--quick-and-dirty', action='store_true', @@ -463,15 +457,9 @@ def add_invertible_flag(flag: str, targets = [] for f in special_opts.files: if f.endswith(PY_EXTENSIONS): - try: - targets.append(BuildSource(f, crawl_up(f)[1], None)) - except InvalidPackageName as e: - fail(str(e)) + targets.append(BuildSource(f, crawl_up(f)[1], None)) elif os.path.isdir(f): - try: - sub_targets = expand_dir(f) - except InvalidPackageName as e: - fail(str(e)) + sub_targets = expand_dir(f) if not sub_targets: fail("There are no .py[i] files in directory '{}'" .format(f)) @@ -538,14 +526,10 @@ def crawl_up(arg: str) -> Tuple[str, str]: dir, base = os.path.split(dir) if not base: break - # Ensure that base is a valid python module name - if not base.isidentifier(): - raise InvalidPackageName('{} is not a valid Python package name'.format(base)) if mod == '__init__' or not mod: mod = base else: mod = base + '.' + mod - return dir, mod diff --git a/mypy/options.py b/mypy/options.py index 5e841bee0c6e..8c8764200800 100644 --- a/mypy/options.py +++ b/mypy/options.py @@ -29,7 +29,6 @@ class Options: "warn_return_any", "ignore_errors", "strict_boolean", - "no_implicit_optional", } OPTIONS_AFFECTING_CACHE = PER_MODULE_OPTIONS | {"strict_optional", "quick_and_dirty"} @@ -93,9 +92,6 @@ def __init__(self) -> None: # Alternate way to show/hide strict-None-checking related errors self.show_none_errors = True - # Don't assume arguments with default values of None are Optional - self.no_implicit_optional = False - # Use script name instead of __main__ self.scripts_are_modules = False diff --git a/mypy/parse.py b/mypy/parse.py index 2e02269f5e46..13fd58be3f60 100644 --- a/mypy/parse.py +++ b/mypy/parse.py @@ -22,10 +22,12 @@ def parse(source: Union[str, bytes], return mypy.fastparse.parse(source, fnam=fnam, errors=errors, - options=options) + pyversion=options.python_version, + custom_typing_module=options.custom_typing_module) else: import mypy.fastparse2 return mypy.fastparse2.parse(source, fnam=fnam, errors=errors, - options=options) + pyversion=options.python_version, + custom_typing_module=options.custom_typing_module) diff --git a/mypy/report.py b/mypy/report.py index 157aa1caab6f..74b44ac1f995 100644 --- a/mypy/report.py +++ b/mypy/report.py @@ -334,7 +334,7 @@ def on_file(self, etree.SubElement(root, 'line', number=str(lineno), precision=stats.precision_names[status], - content=line_text.rstrip('\n')) + content=line_text[:-1]) # Assumes a layout similar to what XmlReporter uses. xslt_path = os.path.relpath('mypy-html.xslt', path) transform_pi = etree.ProcessingInstruction('xml-stylesheet', diff --git a/mypy/semanal.py b/mypy/semanal.py index b4adfb85f3cb..d16be31e8e18 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -163,21 +163,6 @@ '_make', '_replace', '_asdict', '_source', '__annotations__') -# Map from the full name of a missing definition to the test fixture (under -# test-data/unit/fixtures/) that provides the definition. This is used for -# generating better error messages when running mypy tests only. -SUGGESTED_TEST_FIXTURES = { - 'typing.List': 'list.pyi', - 'typing.Dict': 'dict.pyi', - 'typing.Set': 'set.pyi', - 'builtins.bool': 'bool.pyi', - 'builtins.Exception': 'exception.pyi', - 'builtins.BaseException': 'exception.pyi', - 'builtins.isinstance': 'isinstancelist.pyi', - 'builtins.property': 'property.pyi', - 'builtins.classmethod': 'classmethod.pyi', -} - class SemanticAnalyzer(NodeVisitor): """Semantically analyze parsed mypy files. @@ -564,10 +549,9 @@ def analyze_property_with_multi_part_definition(self, defn: OverloadedFuncDef) - first_item.var.is_settable_property = True # Get abstractness from the original definition. item.func.is_abstract = first_item.func.is_abstract + item.func.accept(self) else: self.fail("Decorated property not supported", item) - if isinstance(item, Decorator): - item.func.accept(self) def analyze_function(self, defn: FuncItem) -> None: is_method = self.is_class_scope() @@ -1389,31 +1373,20 @@ def process_import_over_existing_name(self, def normalize_type_alias(self, node: SymbolTableNode, ctx: Context) -> SymbolTableNode: normalized = False - fullname = node.fullname - if fullname in type_aliases: + if node.fullname in type_aliases: # Node refers to an aliased type such as typing.List; normalize. - node = self.lookup_qualified(type_aliases[fullname], ctx) - if node is None: - self.add_fixture_note(fullname, ctx) - return None + node = self.lookup_qualified(type_aliases[node.fullname], ctx) normalized = True - if fullname in collections_type_aliases: + if node.fullname in collections_type_aliases: # Similar, but for types from the collections module like typing.DefaultDict self.add_module_symbol('collections', '__mypy_collections__', False, ctx) - node = self.lookup_qualified(collections_type_aliases[fullname], ctx) + node = self.lookup_qualified(collections_type_aliases[node.fullname], ctx) normalized = True if normalized: node = SymbolTableNode(node.kind, node.node, node.mod_id, node.type_override, normalized=True) return node - def add_fixture_note(self, fullname: str, ctx: Context) -> None: - self.note('Maybe your test fixture does not define "{}"?'.format(fullname), ctx) - if fullname in SUGGESTED_TEST_FIXTURES: - self.note( - 'Consider adding [builtins fixtures/{}] to your test description'.format( - SUGGESTED_TEST_FIXTURES[fullname]), ctx) - def correct_relative_import(self, node: Union[ImportFrom, ImportAll]) -> str: if node.relative == 0: return node.id @@ -1547,8 +1520,6 @@ def visit_assignment_stmt(self, s: AssignmentStmt) -> None: self.process_namedtuple_definition(s) self.process_typeddict_definition(s) self.process_enum_call(s) - if not s.type: - self.process_module_assignment(s.lvalues, s.rvalue, s) if (len(s.lvalues) == 1 and isinstance(s.lvalues[0], NameExpr) and s.lvalues[0].name == '__all__' and s.lvalues[0].kind == GDEF and @@ -1851,6 +1822,10 @@ def check_newtype_args(self, name: str, call: CallExpr, context: Context) -> Opt return None old_type = self.anal_type(unanalyzed_type) + if isinstance(old_type, Instance) and old_type.type.is_newtype: + self.fail("Argument 2 to NewType(...) cannot be another NewType", context) + has_failed = True + return None if has_failed else old_type def build_newtype_typeinfo(self, name: str, old_type: Type, base_type: Instance) -> TypeInfo: @@ -2385,66 +2360,6 @@ def is_classvar(self, typ: Type) -> bool: def fail_invalid_classvar(self, context: Context) -> None: self.fail('ClassVar can only be used for assignments in class body', context) - def process_module_assignment(self, lvals: List[Expression], rval: Expression, - ctx: AssignmentStmt) -> None: - """Propagate module references across assignments. - - Recursively handles the simple form of iterable unpacking; doesn't - handle advanced unpacking with *rest, dictionary unpacking, etc. - - In an expression like x = y = z, z is the rval and lvals will be [x, - y]. - - """ - if all(isinstance(v, (TupleExpr, ListExpr)) for v in lvals + [rval]): - # rval and all lvals are either list or tuple, so we are dealing - # with unpacking assignment like `x, y = a, b`. Mypy didn't - # understand our all(isinstance(...)), so cast them as - # Union[TupleExpr, ListExpr] so mypy knows it is safe to access - # their .items attribute. - seq_lvals = cast(List[Union[TupleExpr, ListExpr]], lvals) - seq_rval = cast(Union[TupleExpr, ListExpr], rval) - # given an assignment like: - # (x, y) = (m, n) = (a, b) - # we now have: - # seq_lvals = [(x, y), (m, n)] - # seq_rval = (a, b) - # We now zip this into: - # elementwise_assignments = [(a, x, m), (b, y, n)] - # where each elementwise assignment includes one element of rval and the - # corresponding element of each lval. Basically we unpack - # (x, y) = (m, n) = (a, b) - # into elementwise assignments - # x = m = a - # y = n = b - # and then we recursively call this method for each of those assignments. - # If the rval and all lvals are not all of the same length, zip will just ignore - # extra elements, so no error will be raised here; mypy will later complain - # about the length mismatch in type-checking. - elementwise_assignments = zip(seq_rval.items, *[v.items for v in seq_lvals]) - for rv, *lvs in elementwise_assignments: - self.process_module_assignment(lvs, rv, ctx) - elif isinstance(rval, NameExpr): - rnode = self.lookup(rval.name, ctx) - if rnode and rnode.kind == MODULE_REF: - for lval in lvals: - if not isinstance(lval, NameExpr): - continue - # respect explicitly annotated type - if (isinstance(lval.node, Var) and lval.node.type is not None): - continue - lnode = self.lookup(lval.name, ctx) - if lnode: - if lnode.kind == MODULE_REF and lnode.node is not rnode.node: - self.fail( - "Cannot assign multiple modules to name '{}' " - "without explicit 'types.ModuleType' annotation".format(lval.name), - ctx) - # never create module alias except on initial var definition - elif lval.is_def: - lnode.kind = MODULE_REF - lnode.node = rnode.node - def process_enum_call(self, s: AssignmentStmt) -> None: """Check if s defines an Enum; if yes, store the definition in symbol table.""" if len(s.lvalues) != 1 or not isinstance(s.lvalues[0], NameExpr): @@ -3016,34 +2931,21 @@ def visit_member_expr(self, expr: MemberExpr) -> None: if full_name in obsolete_name_mapping: self.fail("Module%s has no attribute %r (it's now called %r)" % ( mod_name, expr.name, obsolete_name_mapping[full_name]), expr) - elif isinstance(base, RefExpr): - # This branch handles the case C.bar (or cls.bar or self.bar inside - # a classmethod/method), where C is a class and bar is a type - # definition or a module resulting from `import bar` (or a module - # assignment) inside class C. We look up bar in the class' TypeInfo - # namespace. This is done only when bar is a module or a type; - # other things (e.g. methods) are handled by other code in - # checkmember. - type_info = None - if isinstance(base.node, TypeInfo): - # C.bar where C is a class - type_info = base.node - elif isinstance(base.node, Var) and self.type and self.function_stack: - # check for self.bar or cls.bar in method/classmethod - func_def = self.function_stack[-1] - if not func_def.is_static and isinstance(func_def.type, CallableType): - formal_arg = func_def.type.argument_by_name(base.node.name()) - if formal_arg and formal_arg.pos == 0: - type_info = self.type - if type_info: - n = type_info.names.get(expr.name) - if n is not None and (n.kind == MODULE_REF or isinstance(n.node, TypeInfo)): - n = self.normalize_type_alias(n, expr) - if not n: - return - expr.kind = n.kind - expr.fullname = n.fullname - expr.node = n.node + elif isinstance(base, RefExpr) and isinstance(base.node, TypeInfo): + n = base.node.names.get(expr.name) + if n is not None and (n.kind == MODULE_REF or isinstance(n.node, TypeInfo)): + # This branch handles the case C.bar where C is a class and + # bar is a type definition or a module resulting from + # `import bar` inside class C. Here base.node is a TypeInfo, + # and again we look up the name in its namespace. + # This is done only when bar is a module or a type; other + # things (e.g. methods) are handled by other code in checkmember. + n = self.normalize_type_alias(n, expr) + if not n: + return + expr.kind = n.kind + expr.fullname = n.fullname + expr.node = n.node def visit_op_expr(self, expr: OpExpr) -> None: expr.left.accept(self) @@ -3426,12 +3328,6 @@ def name_not_defined(self, name: str, ctx: Context) -> None: if extra: message += ' {}'.format(extra) self.fail(message, ctx) - if 'builtins.{}'.format(name) in SUGGESTED_TEST_FIXTURES: - # The user probably has a missing definition in a test fixture. Let's verify. - fullname = 'builtins.{}'.format(name) - if self.lookup_fully_qualified_or_none(fullname) is None: - # Yes. Generate a helpful note. - self.add_fixture_note(fullname, ctx) def name_already_defined(self, name: str, ctx: Context) -> None: self.fail("Name '{}' already defined".format(name), ctx) diff --git a/mypy/subtypes.py b/mypy/subtypes.py index b03843fba9a4..8ca6421a0a91 100644 --- a/mypy/subtypes.py +++ b/mypy/subtypes.py @@ -69,6 +69,11 @@ def is_subtype(left: Type, right: Type, elif is_subtype_of_item: return True # otherwise, fall through + # Treat builtins.type the same as Type[Any] + elif is_named_instance(left, 'builtins.type'): + return is_subtype(TypeType(AnyType()), right) + elif is_named_instance(right, 'builtins.type'): + return is_subtype(left, TypeType(AnyType())) return left.accept(SubtypeVisitor(right, type_parameter_checker, ignore_pos_arg_names=ignore_pos_arg_names)) @@ -153,18 +158,16 @@ def visit_instance(self, left: Instance) -> bool: item = right.item if isinstance(item, TupleType): item = item.fallback - if is_named_instance(left, 'builtins.type'): - return is_subtype(TypeType(AnyType()), right) - if left.type.is_metaclass(): - if isinstance(item, AnyType): - return True - if isinstance(item, Instance): - # Special-case enum since we don't have better way of expressing it - if (is_named_instance(left, 'enum.EnumMeta') - and is_named_instance(item, 'enum.Enum')): - return True - return is_named_instance(item, 'builtins.object') - return False + if isinstance(item, Instance): + return is_subtype(left, item.type.metaclass_type) + elif isinstance(item, AnyType): + # Special case: all metaclasses are subtypes of Type[Any] + mro = left.type.mro or [] + return any(base.fullname() == 'builtins.type' for base in mro) + else: + return False + else: + return False def visit_type_var(self, left: TypeVarType) -> bool: right = self.right @@ -260,8 +263,8 @@ def visit_overloaded(self, left: Overloaded) -> bool: elif isinstance(right, TypeType): # All the items must have the same type object status, so # it's sufficient to query only (any) one of them. - # This is unsound, we don't check all the __init__ signatures. - return left.is_type_obj() and is_subtype(left.items()[0], right) + # This is unsound, we don't check the __init__ signature. + return left.is_type_obj() and is_subtype(left.items()[0].ret_type, right.item) else: return False @@ -281,14 +284,11 @@ def visit_type_type(self, left: TypeType) -> bool: # This is unsound, we don't check the __init__ signature. return is_subtype(left.item, right.ret_type) if isinstance(right, Instance): - if right.type.fullname() in ['builtins.object', 'builtins.type']: + if right.type.fullname() == 'builtins.object': + # treat builtins.object the same as Any. return True item = left.item - if isinstance(item, TypeVarType): - item = item.upper_bound - if isinstance(item, Instance): - metaclass = item.type.metaclass_type - return metaclass is not None and is_subtype(metaclass, right) + return isinstance(item, Instance) and is_subtype(item, right.type.metaclass_type) return False diff --git a/mypy/test/data.py b/mypy/test/data.py index ccee92eac276..b72d9c5ea5ac 100644 --- a/mypy/test/data.py +++ b/mypy/test/data.py @@ -54,7 +54,6 @@ def parse_test_cases( output_files = [] # type: List[Tuple[str, str]] # path and contents for output files tcout = [] # type: List[str] # Regular output errors tcout2 = {} # type: Dict[int, List[str]] # Output errors for incremental, runs 2+ - deleted_paths = {} # type: Dict[int, Set[str]] # from run number of paths stale_modules = {} # type: Dict[int, Set[str]] # from run number to module names rechecked_modules = {} # type: Dict[ int, Set[str]] # from run number module names while i < len(p) and p[i].id != 'case': @@ -68,7 +67,7 @@ def parse_test_cases( elif p[i].id == 'outfile': output_files.append(file_entry) elif p[i].id in ('builtins', 'builtins_py2'): - # Use an alternative stub file for the builtins module. + # Use a custom source file for the std module. arg = p[i].arg assert arg is not None mpath = join(os.path.dirname(path), arg) @@ -79,13 +78,6 @@ def parse_test_cases( fnam = '__builtin__.pyi' with open(mpath) as f: files.append((join(base_path, fnam), f.read())) - elif p[i].id == 'typing': - # Use an alternative stub file for the typing module. - arg = p[i].arg - assert arg is not None - src_path = join(os.path.dirname(path), arg) - with open(src_path) as f: - files.append((join(base_path, 'typing.pyi'), f.read())) elif re.match(r'stale[0-9]*$', p[i].id): if p[i].id == 'stale': passnum = 1 @@ -107,16 +99,6 @@ def parse_test_cases( rechecked_modules[passnum] = set() else: rechecked_modules[passnum] = {item.strip() for item in arg.split(',')} - elif p[i].id == 'delete': - # File to delete during a multi-step test case - arg = p[i].arg - assert arg is not None - m = re.match(r'(.*)\.([0-9]+)$', arg) - assert m, 'Invalid delete section: {}'.format(arg) - num = int(m.group(2)) - assert num >= 2, "Can't delete during step {}".format(num) - full = join(base_path, m.group(1)) - deleted_paths.setdefault(num, set()).add(full) elif p[i].id == 'out' or p[i].id == 'out1': tcout = p[i].data if native_sep and os.path.sep == '\\': @@ -160,7 +142,7 @@ def parse_test_cases( tc = DataDrivenTestCase(p[i0].arg, input, tcout, tcout2, path, p[i0].line, lastline, perform, files, output_files, stale_modules, - rechecked_modules, deleted_paths, native_sep) + rechecked_modules, native_sep) out.append(tc) if not ok: raise ValueError( @@ -198,7 +180,6 @@ def __init__(self, output_files: List[Tuple[str, str]], expected_stale_modules: Dict[int, Set[str]], expected_rechecked_modules: Dict[int, Set[str]], - deleted_paths: Dict[int, Set[str]], native_sep: bool = False, ) -> None: super().__init__(name) @@ -213,30 +194,24 @@ def __init__(self, self.output_files = output_files self.expected_stale_modules = expected_stale_modules self.expected_rechecked_modules = expected_rechecked_modules - self.deleted_paths = deleted_paths self.native_sep = native_sep def set_up(self) -> None: super().set_up() encountered_files = set() self.clean_up = [] - all_deleted = [] # type: List[str] - for paths in self.deleted_paths.values(): - all_deleted += paths for path, content in self.files: dir = os.path.dirname(path) for d in self.add_dirs(dir): self.clean_up.append((True, d)) with open(path, 'w') as f: f.write(content) - if path not in all_deleted: - # TODO: Don't assume that deleted files don't get reintroduced. - self.clean_up.append((False, path)) + self.clean_up.append((False, path)) encountered_files.add(path) if re.search(r'\.[2-9]$', path): # Make sure new files introduced in the second and later runs are accounted for renamed_path = path[:-2] - if renamed_path not in encountered_files and renamed_path not in all_deleted: + if renamed_path not in encountered_files: encountered_files.add(renamed_path) self.clean_up.append((False, renamed_path)) for path, _ in self.output_files: diff --git a/mypy/test/helpers.py b/mypy/test/helpers.py index 2f28ab1e47a5..32d889bd20cf 100644 --- a/mypy/test/helpers.py +++ b/mypy/test/helpers.py @@ -1,9 +1,8 @@ -import os -import re import sys -import time +import re +import os -from typing import List, Dict, Tuple, Callable, Any +from typing import List, Dict, Tuple from mypy import defaults from mypy.myunit import AssertionFailure @@ -284,26 +283,3 @@ def normalize_error_messages(messages: List[str]) -> List[str]: for m in messages: a.append(m.replace(os.sep, '/')) return a - - -def retry_on_error(func: Callable[[], Any], max_wait: float = 1.0) -> None: - """Retry callback with exponential backoff when it raises OSError. - - If the function still generates an error after max_wait seconds, propagate - the exception. - - This can be effective against random file system operation failures on - Windows. - """ - t0 = time.time() - wait_time = 0.01 - while True: - try: - func() - return - except OSError: - wait_time = min(wait_time * 2, t0 + max_wait - time.time()) - if wait_time <= 0.01: - # Done enough waiting, the error seems persistent. - raise - time.sleep(wait_time) diff --git a/mypy/test/testcheck.py b/mypy/test/testcheck.py index 91a818ac0f01..df28afcaff85 100644 --- a/mypy/test/testcheck.py +++ b/mypy/test/testcheck.py @@ -17,7 +17,7 @@ from mypy.test.data import parse_test_cases, DataDrivenTestCase, DataSuite from mypy.test.helpers import ( assert_string_arrays_equal, normalize_error_messages, - retry_on_error, testcase_pyversion, update_testcase_output, + testcase_pyversion, update_testcase_output, ) from mypy.errors import CompileError from mypy.options import Options @@ -75,7 +75,6 @@ 'check-underscores.test', 'check-classvar.test', 'check-enum.test', - 'check-incomplete-fixture.test', ] @@ -148,18 +147,13 @@ def run_case_once(self, testcase: DataDrivenTestCase, incremental_step: int = 0) if file.endswith('.' + str(incremental_step)): full = os.path.join(dn, file) target = full[:-2] - # Use retries to work around potential flakiness on Windows (AppVeyor). - retry_on_error(lambda: shutil.copy(full, target)) + shutil.copy(full, target) # In some systems, mtime has a resolution of 1 second which can cause # annoying-to-debug issues when a file has the same size after a # change. We manually set the mtime to circumvent this. new_time = os.stat(target).st_mtime + 1 os.utime(target, times=(new_time, new_time)) - # Delete files scheduled to be deleted in [delete .num] sections. - for path in testcase.deleted_paths.get(incremental_step, set()): - # Use retries to work around potential flakiness on Windows (AppVeyor). - retry_on_error(lambda: os.remove(path)) # Parse options after moving files (in case mypy.ini is being moved). options = self.parse_options(original_program_text, testcase, incremental_step) diff --git a/mypy/test/testpythoneval.py b/mypy/test/testpythoneval.py index 602692e073e4..85e9aa3751d7 100644 --- a/mypy/test/testpythoneval.py +++ b/mypy/test/testpythoneval.py @@ -84,8 +84,14 @@ def test_python_evaluation(testcase: DataDrivenTestCase) -> None: # This uses the same PYTHONPATH as the current process. returncode, out = run(mypy_cmdline) if returncode == 0: - # Execute the program. - returncode, interp_out = run([interpreter, program]) + # Set up module path for the execution. + # This needs the typing module but *not* the mypy module. + vers_dir = '2.7' if py2 else '3.2' + typing_path = os.path.join(testcase.old_cwd, 'lib-typing', vers_dir) + assert os.path.isdir(typing_path) + env = os.environ.copy() + env['PYTHONPATH'] = typing_path + returncode, interp_out = run([interpreter, program], env=env) out += interp_out # Remove temp file. os.remove(program_path) diff --git a/mypy/waiter.py b/mypy/waiter.py index e8ba99d4efd8..0f1759fefab8 100644 --- a/mypy/waiter.py +++ b/mypy/waiter.py @@ -9,7 +9,7 @@ from multiprocessing import cpu_count import pipes import re -from subprocess import Popen, STDOUT, DEVNULL +from subprocess import Popen, STDOUT import sys import tempfile import time @@ -25,22 +25,16 @@ class LazySubprocess: """Wrapper around a subprocess that runs a test task.""" def __init__(self, name: str, args: List[str], *, cwd: str = None, - env: Dict[str, str] = None, passthrough: Optional[int] = None) -> None: + env: Dict[str, str] = None) -> None: self.name = name self.args = args self.cwd = cwd self.env = env self.start_time = None # type: float self.end_time = None # type: float - # None means no passthrough - # otherwise, it represents verbosity level - self.passthrough = passthrough def start(self) -> None: - if self.passthrough is None or self.passthrough < 0: - self.outfile = tempfile.TemporaryFile() - else: - self.outfile = None + self.outfile = tempfile.TemporaryFile() self.start_time = time.perf_counter() self.process = Popen(self.args, cwd=self.cwd, env=self.env, stdout=self.outfile, stderr=STDOUT) @@ -53,8 +47,6 @@ def status(self) -> Optional[int]: return self.process.returncode def read_output(self) -> str: - if not self.outfile: - return '' file = self.outfile file.seek(0) # Assume it's ascii to avoid unicode headaches (and portability issues). diff --git a/runtests.py b/runtests.py index 634b4ce83f2e..83a6ffa0d3da 100755 --- a/runtests.py +++ b/runtests.py @@ -1,6 +1,29 @@ #!/usr/bin/env python3 """Mypy test runner.""" +if False: + import typing + +if True: + # When this is run as a script, `typing` is not available yet. + import sys + from os.path import join, isdir + + def get_versions(): # type: () -> typing.List[str] + major = sys.version_info[0] + minor = sys.version_info[1] + if major == 2: + return ['2.7'] + else: + # generates list of python versions to use. + # For Python2, this is only [2.7]. + # Otherwise, it is [3.4, 3.3, 3.2, 3.1, 3.0]. + return ['%d.%d' % (major, i) for i in range(minor, -1, -1)] + + sys.path[0:0] = [v for v in [join('lib-typing', v) for v in get_versions()] if isdir(v)] + # Now `typing` is available. + + from typing import Dict, List, Optional, Set, Iterable from mypy.waiter import Waiter, LazySubprocess @@ -10,21 +33,8 @@ import itertools import os -from os.path import join, isdir import re -import sys - - -def get_versions(): # type: () -> List[str] - major = sys.version_info[0] - minor = sys.version_info[1] - if major == 2: - return ['2.7'] - else: - # generates list of python versions to use. - # For Python2, this is only [2.7]. - # Otherwise, it is [3.4, 3.3, 3.2, 3.1, 3.0]. - return ['%d.%d' % (major, i) for i in range(minor, -1, -1)] +import json # Ideally, all tests would be `discover`able so that they can be driven @@ -101,8 +111,7 @@ def add_pytest(self, name: str, pytest_args: List[str], coverage: bool = False) else: args = [sys.executable, '-m', 'pytest'] + pytest_args - self.waiter.add(LazySubprocess(full_name, args, env=self.env, passthrough=self.verbosity), - sequential=True) + self.waiter.add(LazySubprocess(full_name, args, env=self.env), sequential=True) def add_python(self, name: str, *args: str, cwd: Optional[str] = None) -> None: name = 'run %s' % name @@ -412,13 +421,6 @@ def main() -> None: pyt_arglist.append('--lf') if ff: pyt_arglist.append('--ff') - if verbosity >= 1: - pyt_arglist.extend(['-v'] * verbosity) - elif verbosity < 0: - pyt_arglist.extend(['-q'] * (-verbosity)) - if parallel_limit: - if '-n' not in pyt_arglist: - pyt_arglist.append('-n{}'.format(parallel_limit)) driver = Driver(whitelist=whitelist, blacklist=blacklist, lf=lf, ff=ff, arglist=arglist, pyt_arglist=pyt_arglist, verbosity=verbosity, @@ -427,6 +429,7 @@ def main() -> None: driver.prepend_path('PATH', [join(driver.cwd, 'scripts')]) driver.prepend_path('MYPYPATH', [driver.cwd]) driver.prepend_path('PYTHONPATH', [driver.cwd]) + driver.prepend_path('PYTHONPATH', [join(driver.cwd, 'lib-typing', v) for v in driver.versions]) driver.add_flake8() add_pytest(driver) diff --git a/scripts/stubgen b/scripts/stubgen old mode 100644 new mode 100755 diff --git a/setup.cfg b/setup.cfg index 27244e880337..0ec9131aa413 100644 --- a/setup.cfg +++ b/setup.cfg @@ -13,6 +13,8 @@ exclude = .cache, # Sphinx configuration is irrelevant docs/source/conf.py, + # external library with incompatible style + lib-typing/*, # conflicting styles misc/*, # external library with incompatible style diff --git a/setup.py b/setup.py index efc23f4a6fe4..78c6a639ad0a 100644 --- a/setup.py +++ b/setup.py @@ -94,6 +94,9 @@ def run(self): package_dir = {'mypy': 'mypy'} +scripts = ['scripts/mypy', 'scripts/stubgen'] +if os.name == 'nt': + scripts.append('scripts/mypy.bat') # These requirements are used when installing by other means than bdist_wheel. # E.g. "pip3 install ." or @@ -116,8 +119,7 @@ def run(self): package_dir=package_dir, py_modules=[], packages=['mypy'], - entry_points={'console_scripts': ['mypy=mypy.__main__:console_entry', - 'stubgen=mypy.stubgen:main']}, + scripts=scripts, data_files=data_files, classifiers=classifiers, cmdclass={'build_py': CustomPythonBuild}, diff --git a/test-data/stdlib-samples/3.2/test/test_genericpath.py b/test-data/stdlib-samples/3.2/test/test_genericpath.py index df0e10701d39..43b78e77db61 100644 --- a/test-data/stdlib-samples/3.2/test/test_genericpath.py +++ b/test-data/stdlib-samples/3.2/test/test_genericpath.py @@ -23,7 +23,7 @@ def safe_rmdir(dirname: str) -> None: class GenericTest(unittest.TestCase): # The path module to be tested - pathmodule = genericpath # type: Any + pathmodule = genericpath # type: Any common_attributes = ['commonprefix', 'getsize', 'getatime', 'getctime', 'getmtime', 'exists', 'isdir', 'isfile'] attributes = [] # type: List[str] diff --git a/test-data/unit/README.md b/test-data/unit/README.md index 693e7f4d8719..64737e8ca3b4 100644 --- a/test-data/unit/README.md +++ b/test-data/unit/README.md @@ -61,12 +61,9 @@ Where the stubs for builtins come from for a given test: - The builtins used by default in unit tests live in `test-data/unit/lib-stub`. -- Individual test cases can override the builtins stubs by using - `[builtins fixtures/foo.pyi]`; this targets files in `test-data/unit/fixtures`. - Feel free to modify existing files there or create new ones as you deem fit. - -- Test cases can also use `[typing fixtures/typing-full.pyi]` to use a more - complete stub for `typing` that contains the async types, among other things. +- Individual test cases can override the stubs by using `[builtins fixtures/foo.pyi]`; + this targets files in `test-data/unit/fixtures`. Feel free to modify existing files + there or create new ones as you deem fit. - Feel free to add additional stubs to that `fixtures` directory, but generally don't expand files in `lib-stub` without first discussing the @@ -81,11 +78,6 @@ First install any additional dependencies needed for testing: $ python3 -m pip install -U -r test-requirements.txt -You must also have a Python 2.7 binary installed that can import the `typing` -module: - - $ python2 -m pip install -U typing - To run all tests, run the script `runtests.py` in the mypy repository: $ ./runtests.py @@ -118,14 +110,13 @@ finer control over which unit tests are run and how, you can run `py.test` or $ ./runtests.py mypy.test.testlex -a -v -a '*backslash*' You can also run the type checker for manual testing without -installing it by setting up the Python module search path suitably: +installing anything by setting up the Python module search path +suitably (the lib-typing/3.2 path entry is not needed for Python 3.5 +or when you have manually installed the `typing` module): - $ export PYTHONPATH=$PWD + $ export PYTHONPATH=$PWD:$PWD/lib-typing/3.2 $ python -m mypy PROGRAM.py -You will have to manually install the `typing` module if you're running Python -3.4 or earlier. - You can add the entry scripts to PATH for a single python3 version: $ export PATH=$PWD/scripts diff --git a/test-data/unit/check-async-await.test b/test-data/unit/check-async-await.test index f8ac01d8c830..672bf2b408b8 100644 --- a/test-data/unit/check-async-await.test +++ b/test-data/unit/check-async-await.test @@ -6,7 +6,6 @@ async def f() -> int: pass [builtins fixtures/async_await.pyi] -[typing fixtures/typing-full.pyi] [case testAsyncDefReturn] @@ -14,14 +13,12 @@ async def f() -> int: return 0 reveal_type(f()) # E: Revealed type is 'typing.Awaitable[builtins.int]' [builtins fixtures/async_await.pyi] -[typing fixtures/typing-full.pyi] [case testAsyncDefMissingReturn] # flags: --warn-no-return async def f() -> int: make_this_not_trivial = 1 [builtins fixtures/async_await.pyi] -[typing fixtures/typing-full.pyi] [out] main:2: error: Missing return statement @@ -31,7 +28,6 @@ async def f() -> int: make_this_not_trivial = 1 return [builtins fixtures/async_await.pyi] -[typing fixtures/typing-full.pyi] [out] main:4: error: Return value expected @@ -42,7 +38,6 @@ async def f() -> int: reveal_type(x) # E: Revealed type is 'builtins.int*' return x [builtins fixtures/async_await.pyi] -[typing fixtures/typing-full.pyi] [out] [case testAwaitDefaultContext] @@ -53,7 +48,6 @@ async def f(x: T) -> T: y = await f(x) reveal_type(y) return y -[typing fixtures/typing-full.pyi] [out] main:6: error: Revealed type is 'T`-1' @@ -65,7 +59,6 @@ async def f(x: T) -> T: y = await f(x) # type: Any reveal_type(y) return y -[typing fixtures/typing-full.pyi] [out] main:6: error: Revealed type is 'Any' @@ -77,7 +70,6 @@ async def f(x: T) -> T: y = await f(x) # type: int reveal_type(y) return x -[typing fixtures/typing-full.pyi] [out] main:5: error: Argument 1 to "f" has incompatible type "T"; expected "int" main:6: error: Revealed type is 'builtins.int' @@ -91,7 +83,6 @@ def g() -> Generator[int, None, str]: async def f() -> int: x = await g() return x -[typing fixtures/typing-full.pyi] [out] main:7: error: Incompatible types in await (actual type Generator[int, None, str], expected type Awaitable[Any]) @@ -103,7 +94,6 @@ def g() -> Iterator[Any]: async def f() -> int: x = await g() return x -[typing fixtures/typing-full.pyi] [out] main:6: error: Incompatible types in await (actual type Iterator[Any], expected type Awaitable[Any]) @@ -115,7 +105,6 @@ async def f() -> int: x = await g() return x [builtins fixtures/async_await.pyi] -[typing fixtures/typing-full.pyi] [out] main:5: error: Incompatible types in await (actual type "int", expected type Awaitable[Any]) @@ -127,7 +116,6 @@ async def f() -> str: x = await g() # type: str return x [builtins fixtures/async_await.pyi] -[typing fixtures/typing-full.pyi] [out] main:5: error: Incompatible types in assignment (expression has type "int", variable has type "str") @@ -139,7 +127,6 @@ async def f() -> str: x = await g() return x [builtins fixtures/async_await.pyi] -[typing fixtures/typing-full.pyi] [out] main:6: error: Incompatible return value type (got "int", expected "str") @@ -152,7 +139,7 @@ async def f() -> None: async for x in C(): reveal_type(x) # E: Revealed type is 'builtins.int*' [builtins fixtures/async_await.pyi] -[typing fixtures/typing-full.pyi] +[out] [case testAsyncForError] @@ -161,7 +148,6 @@ async def f() -> None: async for x in [1]: pass [builtins fixtures/async_await.pyi] -[typing fixtures/typing-full.pyi] [out] main:4: error: AsyncIterable expected main:4: error: List[int] has no attribute "__aiter__" @@ -181,7 +167,6 @@ async def f() -> None: async for z in C(): # type: Union[int, str] reveal_type(z) # E: Revealed type is 'Union[builtins.int, builtins.str]' [builtins fixtures/async_await.pyi] -[typing fixtures/typing-full.pyi] [case testAsyncForComprehension] # flags: --fast-parser --python-version 3.6 @@ -221,7 +206,6 @@ async def generatorexp(obj: Iterable[int]): reveal_type(lst2) # E: Revealed type is 'typing.AsyncIterator[builtins.int*]' [builtins fixtures/async_await.pyi] -[typing fixtures/typing-full.pyi] [case testAsyncForComprehensionErrors] # flags: --fast-parser --python-version 3.6 @@ -256,7 +240,6 @@ main:20: error: Iterable[int] has no attribute "__aiter__"; maybe "__iter__"? main:21: error: Iterable expected main:21: error: asyncify[int] has no attribute "__iter__"; maybe "__aiter__"? [builtins fixtures/async_await.pyi] -[typing fixtures/typing-full.pyi] [case testAsyncWith] @@ -267,7 +250,6 @@ async def f() -> None: async with C() as x: reveal_type(x) # E: Revealed type is 'builtins.int*' [builtins fixtures/async_await.pyi] -[typing fixtures/typing-full.pyi] [case testAsyncWithError] @@ -279,7 +261,6 @@ async def f() -> None: async with C() as x: pass [builtins fixtures/async_await.pyi] -[typing fixtures/typing-full.pyi] [out] main:6: error: "C" has no attribute "__aenter__"; maybe "__enter__"? main:6: error: "C" has no attribute "__aexit__"; maybe "__exit__"? @@ -293,7 +274,7 @@ async def f() -> None: async with C() as x: # E: Incompatible types in "async with" for __aenter__ (actual type "int", expected type Awaitable[Any]) pass [builtins fixtures/async_await.pyi] -[typing fixtures/typing-full.pyi] +[out] [case testAsyncWithErrorBadAenter2] @@ -304,7 +285,7 @@ async def f() -> None: async with C() as x: # E: None has no attribute "__await__" pass [builtins fixtures/async_await.pyi] -[typing fixtures/typing-full.pyi] +[out] [case testAsyncWithErrorBadAexit] @@ -315,7 +296,7 @@ async def f() -> None: async with C() as x: # E: Incompatible types in "async with" for __aexit__ (actual type "int", expected type Awaitable[Any]) pass [builtins fixtures/async_await.pyi] -[typing fixtures/typing-full.pyi] +[out] [case testAsyncWithErrorBadAexit2] @@ -326,7 +307,7 @@ async def f() -> None: async with C() as x: # E: None has no attribute "__await__" pass [builtins fixtures/async_await.pyi] -[typing fixtures/typing-full.pyi] +[out] [case testAsyncWithTypeComments] @@ -343,7 +324,6 @@ async def f() -> None: async with C() as a: # type: int, int # E: Invalid tuple literal type pass [builtins fixtures/async_await.pyi] -[typing fixtures/typing-full.pyi] [case testNoYieldInAsyncDef] # flags: --python-version 3.5 @@ -381,7 +361,6 @@ def g() -> Generator[Any, None, str]: x = yield from f() return x [builtins fixtures/async_await.pyi] -[typing fixtures/typing-full.pyi] [out] main:6: error: "yield from" can't be applied to Awaitable[str] @@ -410,7 +389,7 @@ async def main() -> None: async for z in I(): reveal_type(z) # E: Revealed type is 'builtins.int' [builtins fixtures/async_await.pyi] -[typing fixtures/typing-full.pyi] +[out] [case testYieldTypeCheckInDecoratedCoroutine] @@ -426,7 +405,7 @@ def f() -> Generator[int, str, int]: else: return '' # E: Incompatible return value type (got "str", expected "int") [builtins fixtures/async_await.pyi] -[typing fixtures/typing-full.pyi] +[out] -- Async generators (PEP 525), some test cases adapted from the PEP text -- --------------------------------------------------------------------- @@ -457,7 +436,6 @@ async def wrong_return() -> Generator[int, None, None]: # E: The return type of yield 3 [builtins fixtures/dict.pyi] -[typing fixtures/typing-full.pyi] [case testAsyncGeneratorReturnIterator] # flags: --python-version 3.6 @@ -473,7 +451,6 @@ async def use_gen() -> None: reveal_type(item) # E: Revealed type is 'builtins.int*' [builtins fixtures/dict.pyi] -[typing fixtures/typing-full.pyi] [case testAsyncGeneratorManualIter] # flags: --python-version 3.6 @@ -491,7 +468,6 @@ async def user() -> None: reveal_type(await gen.__anext__()) # E: Revealed type is 'builtins.int*' [builtins fixtures/dict.pyi] -[typing fixtures/typing-full.pyi] [case testAsyncGeneratorAsend] # flags: --fast-parser --python-version 3.6 @@ -512,7 +488,6 @@ async def h() -> None: reveal_type(await g.asend('hello')) # E: Revealed type is 'builtins.int*' [builtins fixtures/dict.pyi] -[typing fixtures/typing-full.pyi] [case testAsyncGeneratorAthrow] # flags: --fast-parser --python-version 3.6 @@ -531,7 +506,6 @@ async def h() -> None: reveal_type(await g.athrow(BaseException)) # E: Revealed type is 'builtins.str*' [builtins fixtures/dict.pyi] -[typing fixtures/typing-full.pyi] [case testAsyncGeneratorNoSyncIteration] # flags: --fast-parser --python-version 3.6 @@ -546,7 +520,6 @@ def h() -> None: pass [builtins fixtures/dict.pyi] -[typing fixtures/typing-full.pyi] [out] main:9: error: Iterable expected @@ -563,7 +536,6 @@ async def gen() -> AsyncGenerator[int, None]: yield from f() # E: 'yield from' in async function [builtins fixtures/dict.pyi] -[typing fixtures/typing-full.pyi] [case testAsyncGeneratorNoReturnWithValue] # flags: --fast-parser --python-version 3.6 @@ -585,7 +557,6 @@ async def return_f() -> AsyncGenerator[int, None]: return f() # E: 'return' with value in async generator is not allowed [builtins fixtures/dict.pyi] -[typing fixtures/typing-full.pyi] -- The full matrix of coroutine compatibility -- ------------------------------------------ @@ -673,5 +644,4 @@ async def decorated_host_coroutine() -> None: x = await other_coroutine() [builtins fixtures/async_await.pyi] -[typing fixtures/typing-full.pyi] [out] diff --git a/test-data/unit/check-class-namedtuple.test b/test-data/unit/check-class-namedtuple.test index ffb727972c90..7a81adb0a672 100644 --- a/test-data/unit/check-class-namedtuple.test +++ b/test-data/unit/check-class-namedtuple.test @@ -507,7 +507,6 @@ reveal_type(XMeth(1).asyncdouble()) # E: Revealed type is 'typing.Awaitable[bui reveal_type(XMeth(42).x) # E: Revealed type is 'builtins.int' reveal_type(XRepr(42).__str__()) # E: Revealed type is 'builtins.str' reveal_type(XRepr(1, 2).__add__(XRepr(3))) # E: Revealed type is 'builtins.int' -[typing fixtures/typing-full.pyi] [case testNewNamedTupleOverloading] from typing import NamedTuple, overload diff --git a/test-data/unit/check-classes.test b/test-data/unit/check-classes.test index f2b83290cb60..0b6bb6873d62 100644 --- a/test-data/unit/check-classes.test +++ b/test-data/unit/check-classes.test @@ -1651,6 +1651,7 @@ b = a.bar [out] main:9: error: Incompatible types in assignment (expression has type "A", variable has type "B") + [case testGetAttrSignature] class A: def __getattr__(self, x: str) -> A: pass @@ -1664,86 +1665,6 @@ class D: main:4: error: Invalid signature "def (__main__.B, __main__.A) -> __main__.B" main:6: error: Invalid signature "def (__main__.C, builtins.str, builtins.str) -> __main__.C" -[case testSetAttr] -from typing import Union, Any -class A: - def __setattr__(self, name: str, value: Any) -> None: ... - -a = A() -a.test = 'hello' - -class B: - def __setattr__(self, name: str, value: Union[int, str]) -> None: ... - -b = B() -b.both = 1 -b.work = '2' - -class C: - def __setattr__(self, name: str, value: str) -> None: ... - -c = C() -c.fail = 4 # E: Incompatible types in assignment (expression has type "int", variable has type "str") - -class D: - __setattr__ = 'hello' - -d = D() -d.crash = 4 # E: "D" has no attribute "crash" - -class Ex: - def __setattr__(self, name: str, value: int) -> None:... - test = '42' # type: str -e = Ex() -e.test = 'hello' -e.t = 4 - -class Super: - def __setattr__(self, name: str, value: int) -> None: ... - -class Sub(Super): - ... -s = Sub() -s.success = 4 -s.fail = 'fail' # E: Incompatible types in assignment (expression has type "str", variable has type "int") - -[case testSetAttrSignature] -class Test: - def __setattr__() -> None: ... # E: Method must have at least one argument # E: Invalid signature "def ()" -t = Test() -t.crash = 'test' # E: "Test" has no attribute "crash" - -class A: - def __setattr__(self): ... # E: Invalid signature "def (self: Any) -> Any" -a = A() -a.test = 4 # E: "A" has no attribute "test" - -class B: - def __setattr__(self, name, value: int): ... -b = B() -b.integer = 5 - -class C: - def __setattr__(self, name: int, value: int) -> None: ... # E: Invalid signature "def (__main__.C, builtins.int, builtins.int)" -c = C() -c.check = 13 - -[case testGetAttrAndSetattr] -from typing import Any -class A: - def __setattr__(self, name: str, value: Any) -> None: ... - def __getattr__(self, name: str) -> Any: ... -a = A() -a.test = 4 -t = a.test - -class B: - def __setattr__(self, name: str, value: int) -> None: ... - def __getattr__(self, name: str) -> str: ... -integer = 0 -b = B() -b.at = '3' # E: Incompatible types in assignment (expression has type "str", variable has type "int") -integer = b.at # E: Incompatible types in assignment (expression has type "str", variable has type "int") -- CallableType objects -- ---------------- @@ -3146,11 +3067,11 @@ class A(metaclass=M): pass reveal_type(A[M]) # E: Revealed type is 'builtins.int' -[case testMetaclassSelfType] +[case testMetaclassSelftype] from typing import TypeVar, Type class M(type): pass -T = TypeVar('T') +T = TypeVar('T', bound='A') class M1(M): def foo(cls: Type[T]) -> T: ... @@ -3216,80 +3137,6 @@ class M(type): class A(metaclass=M): pass reveal_type(type(A).x) # E: Revealed type is 'builtins.int' -[case testMetaclassStrictSupertypeOfTypeWithClassmethods] -from typing import Type, TypeVar -TA = TypeVar('TA', bound='A') -TTA = TypeVar('TTA', bound='Type[A]') -TM = TypeVar('TM', bound='M') - -class M(type): - def g1(cls: 'Type[A]') -> A: pass # E: The erased type of self 'Type[__main__.A]' is not a supertype of its class '__main__.M' - def g2(cls: Type[TA]) -> TA: pass # E: The erased type of self 'Type[__main__.A]' is not a supertype of its class '__main__.M' - def g3(cls: TTA) -> TTA: pass # E: The erased type of self 'Type[__main__.A]' is not a supertype of its class '__main__.M' - def g4(cls: TM) -> TM: pass -m: M - -class A(metaclass=M): - def foo(self): pass - -reveal_type(A.g1) # E: Revealed type is 'def () -> __main__.A' -reveal_type(A.g2) # E: Revealed type is 'def () -> __main__.A*' -reveal_type(A.g3) # E: Revealed type is 'def () -> def () -> __main__.A' -reveal_type(A.g4) # E: Revealed type is 'def () -> def () -> __main__.A' - -class B(metaclass=M): - def foo(self): pass - -B.g1 # Should be error: Argument 0 to "g1" of "M" has incompatible type "B"; expected Type[A] -B.g2 # Should be error: Argument 0 to "g2" of "M" has incompatible type "B"; expected Type[TA] -B.g3 # Should be error: Argument 0 to "g3" of "M" has incompatible type "B"; expected "TTA" -reveal_type(B.g4) # E: Revealed type is 'def () -> def () -> __main__.B' - -# 4 examples of unsoundness - instantiation, classmethod, staticmethod and ClassVar: - -ta: Type[A] = m # E: Incompatible types in assignment (expression has type "M", variable has type Type[A]) -a: A = ta() -reveal_type(ta.g1) # E: Revealed type is 'def () -> __main__.A' -reveal_type(ta.g2) # E: Revealed type is 'def () -> __main__.A*' -reveal_type(ta.g3) # E: Revealed type is 'def () -> Type[__main__.A]' -reveal_type(ta.g4) # E: Revealed type is 'def () -> Type[__main__.A]' - -x: M = ta -x.g1 # should be error: Argument 0 to "g1" of "M" has incompatible type "M"; expected Type[A] -x.g2 # should be error: Argument 0 to "g2" of "M" has incompatible type "M"; expected Type[TA] -x.g3 # should be error: Argument 0 to "g3" of "M" has incompatible type "M"; expected "TTA" -reveal_type(x.g4) # E: Revealed type is 'def () -> __main__.M*' - -def r(ta: Type[TA], tta: TTA) -> None: - x: M = ta - y: M = tta - -class Class(metaclass=M): - @classmethod - def f1(cls: Type[Class]) -> None: pass - @classmethod - def f2(cls: M) -> None: pass -cl: Type[Class] = m # E: Incompatible types in assignment (expression has type "M", variable has type Type[Class]) -reveal_type(cl.f1) # E: Revealed type is 'def ()' -reveal_type(cl.f2) # E: Revealed type is 'def ()' -x1: M = cl - -class Static(metaclass=M): - @staticmethod - def f() -> None: pass -s: Type[Static] = m # E: Incompatible types in assignment (expression has type "M", variable has type Type[Static]) -reveal_type(s.f) # E: Revealed type is 'def ()' -x2: M = s - -from typing import ClassVar -class Cvar(metaclass=M): - x = 1 # type: ClassVar[int] -cv: Type[Cvar] = m # E: Incompatible types in assignment (expression has type "M", variable has type Type[Cvar]) -cv.x -x3: M = cv - -[builtins fixtures/classmethod.pyi] - -- Synthetic types crashes -- ----------------------- diff --git a/test-data/unit/check-expressions.test b/test-data/unit/check-expressions.test index ae1498acdadd..1d840269da42 100644 --- a/test-data/unit/check-expressions.test +++ b/test-data/unit/check-expressions.test @@ -1006,16 +1006,6 @@ a[None:] a[:None] [builtins fixtures/slice.pyi] -[case testNoneSliceBoundsWithStrictOptional] -# flags: --strict-optional -from typing import Any -a = None # type: Any -a[None:1] -a[1:None] -a[None:] -a[:None] -[builtins fixtures/slice.pyi] - -- String interpolation -- -------------------- diff --git a/test-data/unit/check-functions.test b/test-data/unit/check-functions.test index e2ddade9887f..290d9bea7524 100644 --- a/test-data/unit/check-functions.test +++ b/test-data/unit/check-functions.test @@ -88,7 +88,6 @@ def r(x) -> None: ... r = l # E: Incompatible types in assignment (expression has type Callable[[Any, Any], None], variable has type Callable[[Any], None]) [case testSubtypingFunctionsImplicitNames] -from typing import Any def f(a, b): pass def g(c: Any, d: Any) -> Any: pass @@ -1826,7 +1825,6 @@ class A(Generic[t]): [case testRedefineFunction] -from typing import Any def f(x) -> Any: pass def g(x, y): pass def h(x): pass diff --git a/test-data/unit/check-incomplete-fixture.test b/test-data/unit/check-incomplete-fixture.test deleted file mode 100644 index 68c7c6c9aa0f..000000000000 --- a/test-data/unit/check-incomplete-fixture.test +++ /dev/null @@ -1,98 +0,0 @@ --- Test cases for reporting errors when a test case uses a fixture with --- missing definitions. At least in the most common cases this should not --- result in an uncaught exception. These tests make sure that this behavior --- does not regress. --- --- NOTE: These tests do NOT test behavior of mypy outside tests. - -[case testVariableUndefinedUsingDefaultFixture] -import m -# This used to cause a crash since types.ModuleType is not available -# by default. We fall back to 'object' now. -m.x # E: "object" has no attribute "x" -[file m.py] - -[case testListMissingFromStubs] -from typing import List -def f(x: List[int]) -> None: pass -[out] -main:1: error: Name '__builtins__.list' is not defined -main:1: note: Maybe your test fixture does not define "typing.List"? -main:1: note: Consider adding [builtins fixtures/list.pyi] to your test description - -[case testDictMissingFromStubs] -from typing import Dict -def f(x: Dict[int]) -> None: pass -[out] -main:1: error: Name '__builtins__.dict' is not defined -main:1: note: Maybe your test fixture does not define "typing.Dict"? -main:1: note: Consider adding [builtins fixtures/dict.pyi] to your test description - -[case testSetMissingFromStubs] -from typing import Set -def f(x: Set[int]) -> None: pass -[out] -main:1: error: Name '__builtins__.set' is not defined -main:1: note: Maybe your test fixture does not define "typing.Set"? -main:1: note: Consider adding [builtins fixtures/set.pyi] to your test description - -[case testBoolMissingFromStubs] -x: bool -[out] -main:1: error: Name 'bool' is not defined -main:1: note: Maybe your test fixture does not define "builtins.bool"? -main:1: note: Consider adding [builtins fixtures/bool.pyi] to your test description - -[case testBaseExceptionMissingFromStubs] -e: BaseException -[out] -main:1: error: Name 'BaseException' is not defined -main:1: note: Maybe your test fixture does not define "builtins.BaseException"? -main:1: note: Consider adding [builtins fixtures/exception.pyi] to your test description - -[case testExceptionMissingFromStubs] -e: Exception -[out] -main:1: error: Name 'Exception' is not defined -main:1: note: Maybe your test fixture does not define "builtins.Exception"? -main:1: note: Consider adding [builtins fixtures/exception.pyi] to your test description - -[case testIsinstanceMissingFromStubs] -if isinstance(1, int): - pass -[out] -main:1: error: Name 'isinstance' is not defined -main:1: note: Maybe your test fixture does not define "builtins.isinstance"? -main:1: note: Consider adding [builtins fixtures/isinstancelist.pyi] to your test description - -[case testInvalidTupleDefinitionFromStubs] -from typing import Tuple -x: Tuple[int, ...] -x[0] -for y in x: - pass -[out] --- These errors are pretty bad, but keeping this test anyway to --- avoid things getting worse. -main:2: error: "tuple" expects no type arguments, but 1 given -main:3: error: Value of type "tuple" is not indexable -main:4: error: Iterable expected -main:4: error: "tuple" has no attribute "__iter__" - -[case testClassmethodMissingFromStubs] -class A: - @classmethod - def f(cls): pass -[out] -main:2: error: Name 'classmethod' is not defined -main:2: note: Maybe your test fixture does not define "builtins.classmethod"? -main:2: note: Consider adding [builtins fixtures/classmethod.pyi] to your test description - -[case testPropertyMissingFromStubs] -class A: - @property - def f(self): pass -[out] -main:2: error: Name 'property' is not defined -main:2: note: Maybe your test fixture does not define "builtins.property"? -main:2: note: Consider adding [builtins fixtures/property.pyi] to your test description diff --git a/test-data/unit/check-incremental.test b/test-data/unit/check-incremental.test index d28cb1acf512..23fafc24edf5 100644 --- a/test-data/unit/check-incremental.test +++ b/test-data/unit/check-incremental.test @@ -3,9 +3,6 @@ -- Before the tests are run again, in step N any *.py.N files are copied to -- *.py. -- --- You can add an empty section like `[delete mod.py.2]` to delete `mod.py` --- before the second run. --- -- Errors expected in the first run should be in the `[out1]` section, and -- errors expected in the second run should be in the `[out2]` section, and so on. -- If a section is omitted, it is expected there are no errors on that run. @@ -1948,35 +1945,7 @@ main:3: error: Revealed type is 'builtins.int' main:5: error: Revealed type is 'builtins.int' -- TODO: Add another test for metaclass in import cycle (reversed from the above test). --- This currently does not work. - -[case testDeleteFile] -import n -[file n.py] -import m -[file m.py] -x = 1 -[delete m.py.2] -[rechecked n] -[stale] -[out2] -tmp/n.py:1: error: Cannot find module named 'm' -tmp/n.py:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) - -[case testDeleteFileWithinCycle] -import a -[file a.py] -import b -[file b.py] -import c -[file c.py] -import a -[file a.py.2] -import c -[delete b.py.2] -[rechecked a, c] -[stale a] -[out2] +-- This currently doesn't work. [case testThreePassesBasic] import m @@ -2035,11 +2004,7 @@ def foo(x) -> int: tmp/n.py:2: error: Too many arguments for "foo" [out3] --- --- Quick mode --- - -[case testQuickAndDirtyInterfaceChangeDoesNotPropagate] +[case testQuickAndDirty1] # flags: --quick-and-dirty import b, c [file a.py] @@ -2055,7 +2020,7 @@ def a(x): pass [rechecked a] [stale a] -[case testQuickAndDirtyDoesNotInvalidateImportCycle] +[case testQuickAndDirty2] # flags: --quick-and-dirty import b, c [file a.py] @@ -2073,7 +2038,7 @@ x = 0 [rechecked b] [stale b] -[case testQuickAndDirtySwitchToIncrementalMode] +[case testQuickAndDirty3] # flags: --quick-and-dirty # flags2: --incremental import a, b @@ -2084,7 +2049,7 @@ import a [rechecked a, b, builtins] [stale a, b, builtins] -[case testQuickAndDirtyFixErrorInExistingFunction] +[case testQuickAndDirty4] # flags: --quick-and-dirty import a, b [file a.py] @@ -2100,7 +2065,7 @@ tmp/a.py:2: error: Incompatible return value type (got "str", expected "int") [rechecked a] [stale a] -[case testQuickAndDirtyIntroduceErrorInNewFunction] +[case testQuickAndDirty5] # flags: --quick-and-dirty import a, b [file a.py] @@ -2116,7 +2081,7 @@ tmp/a.py:2: error: Incompatible return value type (got "str", expected "int") [rechecked a] [stale] -[case testQuickAndDirtyPersistingError] +[case testQuickAndDirty6] # flags: --quick-and-dirty import a, b [file a.py] @@ -2134,7 +2099,7 @@ tmp/a.py:2: error: Incompatible return value type (got "float", expected "int") [rechecked a] [stale] -[case testQuickAndDirtyIntroduceReferencesWithinCycle] +[case testQuickAndDirty7] # flags: --quick-and-dirty import a, b [file a.py] @@ -2154,7 +2119,7 @@ tmp/a.py:3: error: Revealed type is 'def () -> builtins.int' [rechecked a] [stale] -[case testQuickAndDirtyIntroduceReferencesWithinCycle2] +[case testQuickAndDirty8] # flags: --quick-and-dirty import a, b [file a.py] @@ -2174,70 +2139,14 @@ tmp/b.py:3: error: Revealed type is 'def () -> builtins.int' [rechecked b] [stale] -[case testQuickAndDirtyIntroduceReferencesWithinCycleNoError] -# flags: --quick-and-dirty -import a, b, c -[file a.py] -import b -[file b.py] -import a -class C: pass -def f() -> int: pass -[file c.py] -[file a.py.2] -import b -def g() -> b.C: pass -h = b.f -[file c.py.3] -import a -reveal_type(a.g) -reveal_type(a.h) -[out1] -[out2] -[out3] -tmp/c.py:2: error: Revealed type is 'def () -> b.C' -tmp/c.py:3: error: Revealed type is 'def () -> builtins.int' -[rechecked a] -[stale a] -[rechecked2 c] -[stale2] - -[case testQuickAndDirtyIntroduceReferencesWithinCycleNoError2] -# flags: --quick-and-dirty -import a, b, c -[file a.py] -import b -class C: pass -def f() -> int: pass -[file b.py] -import a -[file c.py] -[file b.py.2] -import a -def g() -> a.C: pass -h = a.f -[file c.py.3] -import b -reveal_type(b.g) -reveal_type(b.h) -[out1] -[out2] -[out3] -tmp/c.py:2: error: Revealed type is 'def () -> a.C' -tmp/c.py:3: error: Revealed type is 'def () -> builtins.int' -[rechecked b] -[stale b] -[rechecked2 c] -[stale2] - -- (The behavior for blockers is actually no different than in regular incremental mode) -[case testQuickAndDirtyBlockerOnFirstRound] +[case testQuickAndDirty9] # flags: --quick-and-dirty import a, b [file a.py] import b -class B(C): pass -class C(B): pass # blocker +class B: pass +class C(B, B): pass # blocker [file b.py] import a [file a.py.2] @@ -2245,12 +2154,12 @@ import b class B: pass class C(B): pass [out1] -tmp/a.py:3: error: Cycle in inheritance hierarchy +tmp/a.py:3: error: Duplicate base class "B" [out2] [rechecked a, b] [stale a, b] -[case testQuickAndDirtyBlockerOnSecondRound] +[case testQuickAndDirty10] # flags: --quick-and-dirty import a, b [file a.py] @@ -2261,15 +2170,15 @@ class C(B): pass import a [file a.py.2] import b -class B(C): pass -class C(B): pass # blocker +class B: pass +class C(B, B): pass # blocker [out1] [out2] -tmp/a.py:3: error: Cycle in inheritance hierarchy +tmp/a.py:3: error: Duplicate base class "B" [rechecked a, b] [stale a, b] -[case testQuickAndDirtyRenameFunctionInTwoModules] +[case testQuickAndDirty11] # flags: --quick-and-dirty import a, b, c, d [file a.py] @@ -2287,7 +2196,7 @@ def g(): pass # renamed f to g [file c.py.2] from a import g -[case testQuickAndDirtyUnmodifiedModuleDoesNotGenerateError] +[case testQuickAndDirty12] # flags: --quick-and-dirty import a, b, c, d [file a.py] @@ -2300,7 +2209,7 @@ from a import C from b import C [file d.py] from c import C -C().f() # no error because unmodified +C().f() [file a.py.2] import d class C: @@ -2310,7 +2219,7 @@ from a import C [out1] [out2] -[case testQuickAndDirtyUnmodifiedModuleDoesNotGenerateError2] +[case testQuickAndDirty13] # flags: --quick-and-dirty import a, b, c [file a.py] @@ -2332,7 +2241,7 @@ class C: [rechecked a] [stale a] -[case testQuickAndDirtyTypeAliasReference] +[case testQuickAndDirty14] # flags: --quick-and-dirty import a, b [file a.py] @@ -2345,7 +2254,7 @@ S = str import b def f(x: b.S) -> int: return 0 -[case testQuickAndDirtyNamedTupleReference] +[case testQuickAndDirty15] # flags: --quick-and-dirty import a, b [file a.py] @@ -2359,7 +2268,7 @@ P = NamedTuple('P', (('x', int),)) import b def f(x: b.P) -> int: return 0 -[case testQuickAndDirtyTypeVarReference] +[case testQuickAndDirty16] # flags: --quick-and-dirty import a, b [file a.py] @@ -2372,344 +2281,3 @@ T = TypeVar('T') [file a.py.2] import b def f(x: b.T) -> int: return 0 - -[case testQuickAndDirtyDeleteFunctionUsedByOtherModule] -# flags: --quick-and-dirty -import a -[file a.py] -from b import f -[file b.py] -import a -def f() -> int: pass -a.f() -[file b.py.2] -import a -reveal_type(a.f) -[out2] -tmp/b.py:2: error: Revealed type is 'Any' - -[case testQuickAndDirtyDeleteClassUsedInAnnotation] -# flags: --quick-and-dirty -import a -[file a.py] -import b -def f() -> b.C: pass -[file b.py] -import a -class C: pass -[file b.py.2] -import a -reveal_type(a.f) -a.f().x -[out2] -tmp/b.py:2: error: Revealed type is 'def () -> ' -tmp/b.py:3: error: "" has no attribute "x" - -[case testQuickAndDirtyDeleteClassUsedAsBase] -# flags: --quick-and-dirty -import a -[file a.py] -import b -class D(b.C): pass -[file b.py] -import a -class C: pass -[file b.py.2] -import a -reveal_type(a.D) -a.D().x -[out2] -tmp/b.py:2: error: Revealed type is 'Any' - -[case testQuickAndDirtyDeleteNestedClassUsedInAnnotation] -# flags: --quick-and-dirty -import a -[file a.py] -import b -def f() -> b.C.D: pass -[file b.py] -import a -class C: - class D: pass -[file b.py.2] -import a -class C: - pass -reveal_type(a.f) -a.f().x -[out2] -tmp/b.py:4: error: Revealed type is 'def () -> ' -tmp/b.py:5: error: "" has no attribute "x" - -[case testQuickAndDirtyTurnGenericClassIntoNonGeneric-skip] -# flags: --quick-and-dirty -import a -[file a.py] -import b -def f() -> b.C[int]: pass -[file b.py] -from typing import TypeVar, Generic -import a -T = TypeVar('T') -class C(Generic[T]): pass -[file b.py.2] -import a -class C: pass -reveal_type(a.f) -c: C -d = a.f() -c = d -d = c -[out2] -# TODO: Crashes (https://github.com/python/mypy/issues/3279) - -[case testQuickAndDirtyTurnClassIntoGenericOne-skip] -# flags: --quick-and-dirty -import a -[file a.py] -import b -def f() -> b.C: pass -[file b.py] -import a -class C: pass -[file b.py.2] -from typing import TypeVar, Generic -import a -T = TypeVar('T') -class C(Generic[T]): pass -reveal_type(a.f) -c: C[int] -d = a.f() -d = c -c = d -[out2] -# TODO: Crashes (https://github.com/python/mypy/issues/3279) - -[case testQuickAndDirtyDeleteTypeVarUsedInAnnotation] -# flags: --quick-and-dirty -import a -[file a.py] -import b -def f(x: b.T) -> b.T: return x -[file b.py] -from typing import TypeVar -import a -T = TypeVar('T') -[file b.py.2] -import a -reveal_type(a.f) -reveal_type(a.f(1)) -[out2] -tmp/b.py:2: error: Revealed type is 'def [b.T] (x: b.T`-1) -> b.T`-1' -tmp/b.py:3: error: Revealed type is 'builtins.int*' - -[case testQuickAndDirtyDeleteNewTypeUsedInAnnotation] -# flags: --quick-and-dirty -import a -[file a.py] -import b -def f() -> b.C: pass -[file b.py] -from typing import NewType -import a -C = NewType('C', int) -[file b.py.2] -import a -reveal_type(a.f) -a.f().x -[out2] -tmp/b.py:2: error: Revealed type is 'def () -> ' -tmp/b.py:3: error: "" has no attribute "x" - -[case testQuickAndDirtyChangeClassIntoFunction] -# flags: --quick-and-dirty -import a -[file a.py] -import b -def f() -> b.C: pass -[file b.py] -import a -class C: pass -[file b.py.2] -import a -def C() -> None: pass -reveal_type(a.f) -a.f().x -[out2] -tmp/b.py:3: error: Revealed type is 'def () -> ' -tmp/b.py:4: error: "" has no attribute "x" - -[case testQuickAndDirtyChangeClassIntoVariable] -# flags: --quick-and-dirty -import a -[file a.py] -import b -def f() -> b.C: pass -[file b.py] -import a -class C: pass -[file b.py.2] -import a -C = 0 -reveal_type(a.f) -a.f().x -[out2] -tmp/b.py:3: error: Revealed type is 'def () -> ' -tmp/b.py:4: error: "" has no attribute "x" - -[case testQuickAndDirtyAddFile] -# flags: --quick-and-dirty -import a -[file a.py] -import b -x = '' -[file b.py] -import a -[file b.py.2] -import c -reveal_type(c.x) -[file c.py.2] -import a -x = 1 -reveal_type(a.x) -[rechecked b, c] -[stale] -[out2] -tmp/c.py:3: error: Revealed type is 'builtins.str' -tmp/b.py:2: error: Revealed type is 'builtins.int' - -[case testQuickAndDirtyDeleteFile] -# flags: --quick-and-dirty -import b -[file a.py] -def f() -> None: pass -[file b.py] -import a -a.f() -[delete a.py.2] -[file b.py.3] -import a -a.f() # Comment change -[file b.py.4] -# Remove import -[rechecked b] -[stale] -[rechecked2 b] -[stale2] -[rechecked3 b] -[stale3 b] -[out2] -tmp/b.py:1: error: Cannot find module named 'a' -tmp/b.py:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) -[out3] -tmp/b.py:1: error: Cannot find module named 'a' -tmp/b.py:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) -[out4] - -[case testQuickAndDirtyRenameModule] -# flags: --quick-and-dirty -import a -[file a.py] -import b -b.f() -[file b.py] -def f() -> None: pass -[delete b.py.2] -[file c.py.2] -def f() -> None: pass -[file a.py.2] -import c -c.f(1) -[file c.py.3] -def f() -> None: pass # comment change -[file c.py.4] -def f(x) -> None: pass -[out] -[out2] -tmp/a.py:2: error: Too many arguments for "f" -[out3] -tmp/a.py:2: error: Too many arguments for "f" -[out4] -[rechecked a, c] -[stale c] -[rechecked2 a, c] -[stale2] -[rechecked3 a, c] -[stale3 a, c] - -[case testQuickAndDirtyMultiplePasses] -# flags: --quick-and-dirty -import a -[file a.py] -import b -b.f() -[file b.py] -def f() -> None: pass -[file b.py.2] -# Write cache file but the error in a is not caught yet. -def f(x) -> None: pass -[file a.py.3] -# Editing a triggers the error. -import b -b.f() -[rechecked b] -[rechecked2 a] -[out2] -[out3] -tmp/a.py:3: error: Too few arguments for "f" - -[case testQuickAndDirtySerializeStaleType] -# flags: --quick-and-dirty -import a, c -[file a.py] -import b -def f() -> b.C: pass -[file b.py] -import a -class C: pass -[file c.py] -[file b.py.2] -import a -x = a.f() -[file c.py.3] -import b -reveal_type(b.x) -def g(x: object) -> None: pass -g(b.x) -b.x.y -[rechecked b] -[stale b] -[rechecked2 c] -[stale2] -[out3] -tmp/c.py:2: error: Revealed type is '' -tmp/c.py:5: error: "" has no attribute "y" - -[case testSerializeAbstractPropertyIncremental] -from abc import abstractmethod -import typing -class A: - @property - def f(self) -> int: - return 1 - @f.setter # type: ignore - @abstractmethod - def f(self, x: int) -> None: - pass -a = A() -[builtins fixtures/property.pyi] - -[case testSerializeAbstractPropertyDisallowUntypedIncremental] -# flags: --disallow-untyped-defs -from abc import abstractmethod -import typing -class A: - @property - def f(self) -> int: - return 1 - @f.setter # type: ignore - @abstractmethod - def f(self, x: int) -> None: - pass -a = A() -[builtins fixtures/property.pyi] diff --git a/test-data/unit/check-modules.test b/test-data/unit/check-modules.test index b1b6857e5518..66050601d26f 100644 --- a/test-data/unit/check-modules.test +++ b/test-data/unit/check-modules.test @@ -1416,227 +1416,3 @@ reveal_type(f()) # E: Revealed type is 'types.ModuleType' reveal_type(types) # E: Revealed type is 'types.ModuleType' [builtins fixtures/module.pyi] - -[case testClassImportAccessedInMethod] -class C: - import m - def foo(self) -> None: - x = self.m.a - reveal_type(x) # E: Revealed type is 'builtins.str' - # ensure we distinguish self from other variables - y = 'hello' - z = y.m.a # E: "str" has no attribute "m" - @classmethod - def cmethod(cls) -> None: - y = cls.m.a - reveal_type(y) # E: Revealed type is 'builtins.str' - @staticmethod - def smethod(foo: int) -> None: - # we aren't confused by first arg of a staticmethod - y = foo.m.a # E: "int" has no attribute "m" - -[file m.py] -a = 'foo' - -[builtins fixtures/module.pyi] - -[case testModuleAlias] -import m -m2 = m -reveal_type(m2.a) # E: Revealed type is 'builtins.str' -m2.b # E: Module has no attribute "b" -m2.c = 'bar' # E: Module has no attribute "c" - -[file m.py] -a = 'foo' - -[builtins fixtures/module.pyi] - -[case testClassModuleAlias] -import m - -class C: - x = m - def foo(self) -> None: - reveal_type(self.x.a) # E: Revealed type is 'builtins.str' - -[file m.py] -a = 'foo' - -[builtins fixtures/module.pyi] - -[case testLocalModuleAlias] -import m - -def foo() -> None: - x = m - reveal_type(x.a) # E: Revealed type is 'builtins.str' - -class C: - def foo(self) -> None: - x = m - reveal_type(x.a) # E: Revealed type is 'builtins.str' - -[file m.py] -a = 'foo' - -[builtins fixtures/module.pyi] - -[case testChainedModuleAlias] -import m -m3 = m2 = m -m4 = m3 -m5 = m4 -reveal_type(m2.a) # E: Revealed type is 'builtins.str' -reveal_type(m3.a) # E: Revealed type is 'builtins.str' -reveal_type(m4.a) # E: Revealed type is 'builtins.str' -reveal_type(m5.a) # E: Revealed type is 'builtins.str' - -[file m.py] -a = 'foo' - -[builtins fixtures/module.pyi] - -[case testMultiModuleAlias] -import m, n -m2, n2, (m3, n3) = m, n, [m, n] -reveal_type(m2.a) # E: Revealed type is 'builtins.str' -reveal_type(n2.b) # E: Revealed type is 'builtins.str' -reveal_type(m3.a) # E: Revealed type is 'builtins.str' -reveal_type(n3.b) # E: Revealed type is 'builtins.str' - -x, y = m # E: 'types.ModuleType' object is not iterable -x, y, z = m, n # E: Need more than 2 values to unpack (3 expected) -x, y = m, m, m # E: Too many values to unpack (2 expected, 3 provided) -x, (y, z) = m, n # E: 'types.ModuleType' object is not iterable -x, (y, z) = m, (n, n, n) # E: Too many values to unpack (2 expected, 3 provided) - -[file m.py] -a = 'foo' - -[file n.py] -b = 'bar' - -[builtins fixtures/module.pyi] - -[case testModuleAliasWithExplicitAnnotation] -from typing import Any -import types -import m -mod_mod: types.ModuleType = m -mod_mod2: types.ModuleType -mod_mod2 = m -mod_mod3 = m # type: types.ModuleType -mod_any: Any = m -mod_int: int = m # E: Incompatible types in assignment (expression has type Module, variable has type "int") - -reveal_type(mod_mod) # E: Revealed type is 'types.ModuleType' -mod_mod.a # E: Module has no attribute "a" -reveal_type(mod_mod2) # E: Revealed type is 'types.ModuleType' -mod_mod2.a # E: Module has no attribute "a" -reveal_type(mod_mod3) # E: Revealed type is 'types.ModuleType' -mod_mod3.a # E: Module has no attribute "a" -reveal_type(mod_any) # E: Revealed type is 'Any' - -[file m.py] -a = 'foo' - -[builtins fixtures/module.pyi] - -[case testModuleAliasPassedToFunction] -import types -import m - -def takes_module(x: types.ModuleType): - reveal_type(x.__file__) # E: Revealed type is 'builtins.str' - -n = m -takes_module(m) -takes_module(n) - -[file m.py] -a = 'foo' - -[builtins fixtures/module.pyi] - -[case testModuleAliasRepeated] -import m, n - -if bool(): - x = m -else: - x = 3 # E: Incompatible types in assignment (expression has type "int", variable has type Module) - -if bool(): - y = 3 -else: - y = m # E: Incompatible types in assignment (expression has type Module, variable has type "int") - -if bool(): - z = m -else: - z = n # E: Cannot assign multiple modules to name 'z' without explicit 'types.ModuleType' annotation - -[file m.py] -a = 'foo' - -[file n.py] -a = 3 - -[builtins fixtures/module.pyi] - -[case testModuleAliasRepeatedWithAnnotation] -import types -import m, n - -x: types.ModuleType -if bool(): - x = m -else: - x = n - -x.a # E: Module has no attribute "a" -reveal_type(x.__file__) # E: Revealed type is 'builtins.str' - -[file m.py] -a = 'foo' - -[file n.py] -a = 3 - -[builtins fixtures/module.pyi] - -[case testModuleAliasRepeatedComplex] -import m, n, o - -x = m -x = n # E: Cannot assign multiple modules to name 'x' without explicit 'types.ModuleType' annotation -x = o # E: Cannot assign multiple modules to name 'x' without explicit 'types.ModuleType' annotation - -y = o -y, z = m, n # E: Cannot assign multiple modules to name 'y' without explicit 'types.ModuleType' annotation - -xx = m -xx = m -reveal_type(xx.a) # E: Revealed type is 'builtins.str' - -[file m.py] -a = 'foo' - -[file n.py] -a = 3 - -[file o.py] -a = 'bar' - -[builtins fixtures/module.pyi] - -[case testModuleAliasToOtherModule] -import m, n -m = n # E: Cannot assign multiple modules to name 'm' without explicit 'types.ModuleType' annotation - -[file m.py] - -[file n.py] - -[builtins fixtures/module.pyi] diff --git a/test-data/unit/check-newsyntax.test b/test-data/unit/check-newsyntax.test index 645fbe525358..9f3f87853e63 100644 --- a/test-data/unit/check-newsyntax.test +++ b/test-data/unit/check-newsyntax.test @@ -119,19 +119,19 @@ f'{type(1)}' a: str a = f'foobar' a = f'{"foobar"}' -[builtins fixtures/f_string.pyi] +[builtins fixtures/primitives.pyi] [case testNewSyntaxFStringExpressionsOk] # flags: --python-version 3.6 f'.{1 + 1}.' f'.{1 + 1}.{"foo" + "bar"}' -[builtins fixtures/f_string.pyi] +[builtins fixtures/primitives.pyi] [case testNewSyntaxFStringExpressionsErrors] # flags: --python-version 3.6 f'{1 + ""}' f'.{1 + ""}' -[builtins fixtures/f_string.pyi] +[builtins fixtures/primitives.pyi] [out] main:2: error: Unsupported operand types for + ("int" and "str") main:3: error: Unsupported operand types for + ("int" and "str") @@ -142,12 +142,4 @@ value = 10.5142 width = 10 precision = 4 f'result: {value:{width}.{precision}}' -[builtins fixtures/f_string.pyi] - -[case testNewSyntaxFStringSingleField] -# flags: --python-version 3.6 -v = 1 -reveal_type(f'{v}') # E: Revealed type is 'builtins.str' -reveal_type(f'{1}') # E: Revealed type is 'builtins.str' -[builtins fixtures/f_string.pyi] - +[builtins fixtures/primitives.pyi] diff --git a/test-data/unit/check-newtype.test b/test-data/unit/check-newtype.test index 32b25558c84f..144c8fba04c3 100644 --- a/test-data/unit/check-newtype.test +++ b/test-data/unit/check-newtype.test @@ -155,29 +155,6 @@ y = Bar2(42) y = func3(x) [out] -[case testNewTypeWithNewType] -from typing import NewType -A = NewType('A', int) -B = NewType('B', A) -C = A -D = C -E = NewType('E', D) - -a = A(1) -b = B(a) -e = E(a) - -def funca(a: A) -> None: ... -def funcb(b: B) -> None: ... - -funca(a) -funca(b) -funca(e) -funcb(a) # E: Argument 1 to "funcb" has incompatible type "A"; expected "B" -funcb(b) -funcb(e) # E: Argument 1 to "funcb" has incompatible type "E"; expected "B" - -[out] -- Make sure NewType works as expected in a variety of different scopes/across files @@ -302,6 +279,15 @@ main:3: error: Argument 2 to NewType(...) must be subclassable (got T?) main:3: error: Invalid type "__main__.T" main:4: error: Invalid type "__main__.T" +[case testNewTypeWithNewTypeFails] +from typing import NewType +A = NewType('A', int) +B = NewType('B', A) # E: Argument 2 to NewType(...) cannot be another NewType +C = A +D = C +E = NewType('E', D) # E: Argument 2 to NewType(...) cannot be another NewType +[out] + [case testNewTypeRedefiningVariablesFails] from typing import NewType diff --git a/test-data/unit/check-optional.test b/test-data/unit/check-optional.test index 3cde2f99a1fb..6ae6df914376 100644 --- a/test-data/unit/check-optional.test +++ b/test-data/unit/check-optional.test @@ -125,10 +125,11 @@ def f(x: int = None) -> None: f(None) [out] -[case testNoInferOptionalFromDefaultNone] -# flags: --no-implicit-optional -def f(x: int = None) -> None: # E: Incompatible types in assignment (expression has type None, variable has type "int") - pass +[case testInferOptionalFromDefaultNoneWithFastParser] + +def f(x: int = None) -> None: + x + 1 # E: Unsupported left operand type for + (some union) +f(None) [out] [case testInferOptionalFromDefaultNoneComment] @@ -138,11 +139,12 @@ def f(x=None): f(None) [out] -[case testNoInferOptionalFromDefaultNoneComment] -# flags: --no-implicit-optional -def f(x=None): # E: Incompatible types in assignment (expression has type None, variable has type "int") +[case testInferOptionalFromDefaultNoneCommentWithFastParser] + +def f(x=None): # type: (int) -> None - pass + x + 1 # E: Unsupported left operand type for + (some union) +f(None) [out] [case testInferOptionalType] diff --git a/test-data/unit/check-unions.test b/test-data/unit/check-unions.test index 6ab2d0fed017..37a170e7367e 100644 --- a/test-data/unit/check-unions.test +++ b/test-data/unit/check-unions.test @@ -335,13 +335,13 @@ t_a = None # type: Type[Any] reveal_type(u(t_o, t_o)) # E: Revealed type is 'Type[builtins.object]' reveal_type(u(t_s, t_s)) # E: Revealed type is 'Type[builtins.str]' reveal_type(u(t_a, t_a)) # E: Revealed type is 'Type[Any]' -reveal_type(u(type, type)) # E: Revealed type is 'def (x: builtins.object) -> builtins.type' +reveal_type(u(type, type)) # E: Revealed type is 'def (x: Any) -> builtins.type' # One type, other non-type reveal_type(u(t_s, 1)) # E: Revealed type is 'Union[builtins.int*, Type[builtins.str]]' reveal_type(u(1, t_s)) # E: Revealed type is 'Union[Type[builtins.str], builtins.int*]' -reveal_type(u(type, 1)) # E: Revealed type is 'Union[builtins.int*, def (x: builtins.object) -> builtins.type]' -reveal_type(u(1, type)) # E: Revealed type is 'Union[def (x: builtins.object) -> builtins.type, builtins.int*]' +reveal_type(u(type, 1)) # E: Revealed type is 'Union[builtins.int*, def (x: Any) -> builtins.type]' +reveal_type(u(1, type)) # E: Revealed type is 'Union[def (x: Any) -> builtins.type, builtins.int*]' reveal_type(u(t_a, 1)) # E: Revealed type is 'Union[builtins.int*, Type[Any]]' reveal_type(u(1, t_a)) # E: Revealed type is 'Union[Type[Any], builtins.int*]' reveal_type(u(t_o, 1)) # E: Revealed type is 'Union[builtins.int*, Type[builtins.object]]' diff --git a/test-data/unit/check-warnings.test b/test-data/unit/check-warnings.test index c95baec1cc93..2f2d592b13fd 100644 --- a/test-data/unit/check-warnings.test +++ b/test-data/unit/check-warnings.test @@ -165,20 +165,3 @@ from typing import Any def g() -> Any: pass def f() -> Any: return g() [out] - -[case testOKReturnAnyIfProperSubtype] -# flags: --warn-return-any --strict-optional -from typing import Any, Optional - -class Test(object): - - def __init__(self) -> None: - self.attr = "foo" # type: Any - - def foo(self, do_it: bool) -> Optional[Any]: - if do_it: - return self.attr # Should not warn here - else: - return None -[builtins fixtures/list.pyi] -[out] diff --git a/test-data/unit/cmdline.test b/test-data/unit/cmdline.test index a9ba3c28950b..d0648844daaa 100644 --- a/test-data/unit/cmdline.test +++ b/test-data/unit/cmdline.test @@ -75,14 +75,6 @@ undef dir/subpkg/a.py:1: error: Name 'undef' is not defined dir/a.py:1: error: Name 'undef' is not defined -[case testCmdlineInvalidPackageName] -# cmd: mypy dir/sub.pkg/a.py -[file dir/sub.pkg/__init__.py] -[file dir/sub.pkg/a.py] -undef -[out] -sub.pkg is not a valid Python package name - [case testBadFileEncoding] # cmd: mypy a.py [file a.py] diff --git a/test-data/unit/fixtures/f_string.pyi b/test-data/unit/fixtures/f_string.pyi deleted file mode 100644 index 78d39aee85b8..000000000000 --- a/test-data/unit/fixtures/f_string.pyi +++ /dev/null @@ -1,36 +0,0 @@ -# Builtins stub used for format-string-related test cases. -# We need str and list, and str needs join and format methods. - -from typing import TypeVar, Generic, Iterable, Iterator, List, overload - -T = TypeVar('T') - -class object: - def __init__(self): pass - -class type: - def __init__(self, x) -> None: pass - -class ellipsis: pass - -class list(Iterable[T], Generic[T]): - @overload - def __init__(self) -> None: pass - @overload - def __init__(self, x: Iterable[T]) -> None: pass - def append(self, x: T) -> None: pass - -class tuple(Generic[T]): pass - -class function: pass -class int: - def __add__(self, i: int) -> int: pass - -class float: pass -class bool(int): pass - -class str: - def __add__(self, s: str) -> str: pass - def format(self, *args) -> str: pass - def join(self, l: List[str]) -> str: pass - diff --git a/test-data/unit/fixtures/module.pyi b/test-data/unit/fixtures/module.pyi index 44a4dfe0c277..b130d795d25c 100644 --- a/test-data/unit/fixtures/module.pyi +++ b/test-data/unit/fixtures/module.pyi @@ -17,5 +17,3 @@ class tuple: pass class dict(Generic[T, S]): pass class ellipsis: pass -classmethod = object() -staticmethod = object() diff --git a/test-data/unit/fixtures/property.pyi b/test-data/unit/fixtures/property.pyi index 929317e2ef66..994874b93b79 100644 --- a/test-data/unit/fixtures/property.pyi +++ b/test-data/unit/fixtures/property.pyi @@ -6,7 +6,7 @@ class object: def __init__(self) -> None: pass class type: - def __init__(self, x: typing.Any) -> None: pass + def __init__(self, x) -> None: pass class function: pass diff --git a/test-data/unit/fixtures/typing-full.pyi b/test-data/unit/fixtures/typing-full.pyi deleted file mode 100644 index 87b51cd0d340..000000000000 --- a/test-data/unit/fixtures/typing-full.pyi +++ /dev/null @@ -1,110 +0,0 @@ -# More complete stub for typing module. -# -# Use [typing fixtures/typing-full.pyi] to use this instead of lib-stub/typing.pyi -# in a particular test case. -# -# Many of the definitions have special handling in the type checker, so they -# can just be initialized to anything. - -from abc import abstractmethod - -class GenericMeta(type): pass - -cast = 0 -overload = 0 -Any = 0 -Union = 0 -Optional = 0 -TypeVar = 0 -Generic = 0 -Tuple = 0 -Callable = 0 -_promote = 0 -NamedTuple = 0 -Type = 0 -no_type_check = 0 -ClassVar = 0 -NoReturn = 0 -NewType = 0 - -# Type aliases. -List = 0 -Dict = 0 -Set = 0 - -T = TypeVar('T') -U = TypeVar('U') -V = TypeVar('V') -S = TypeVar('S') - -class Container(Generic[T]): - @abstractmethod - # Use int because bool isn't in the default test builtins - def __contains__(self, arg: T) -> int: pass - -class Sized: - @abstractmethod - def __len__(self) -> int: pass - -class Iterable(Generic[T]): - @abstractmethod - def __iter__(self) -> 'Iterator[T]': pass - -class Iterator(Iterable[T], Generic[T]): - @abstractmethod - def __next__(self) -> T: pass - -class Generator(Iterator[T], Generic[T, U, V]): - @abstractmethod - def send(self, value: U) -> T: pass - - @abstractmethod - def throw(self, typ: Any, val: Any=None, tb=None) -> None: pass - - @abstractmethod - def close(self) -> None: pass - - @abstractmethod - def __iter__(self) -> 'Generator[T, U, V]': pass - -class AsyncGenerator(AsyncIterator[T], Generic[T, U]): - @abstractmethod - def __anext__(self) -> Awaitable[T]: pass - - @abstractmethod - def asend(self, value: U) -> Awaitable[T]: pass - - @abstractmethod - def athrow(self, typ: Any, val: Any=None, tb: Any=None) -> Awaitable[T]: pass - - @abstractmethod - def aclose(self) -> Awaitable[T]: pass - - @abstractmethod - def __aiter__(self) -> 'AsyncGenerator[T, U]': pass - -class Awaitable(Generic[T]): - @abstractmethod - def __await__(self) -> Generator[Any, Any, T]: pass - -class AwaitableGenerator(Generator[T, U, V], Awaitable[V], Generic[T, U, V, S]): - pass - -class AsyncIterable(Generic[T]): - @abstractmethod - def __aiter__(self) -> 'AsyncIterator[T]': pass - -class AsyncIterator(AsyncIterable[T], Generic[T]): - def __aiter__(self) -> 'AsyncIterator[T]': return self - @abstractmethod - def __anext__(self) -> Awaitable[T]: pass - -class Sequence(Iterable[T], Generic[T]): - @abstractmethod - def __getitem__(self, n: Any) -> T: pass - -class Mapping(Generic[T, U]): pass - -class MutableMapping(Generic[T, U]): pass - -TYPE_CHECKING = 1 diff --git a/test-data/unit/lib-stub/builtins.pyi b/test-data/unit/lib-stub/builtins.pyi index 457bea0e9020..5010235a53ab 100644 --- a/test-data/unit/lib-stub/builtins.pyi +++ b/test-data/unit/lib-stub/builtins.pyi @@ -1,8 +1,10 @@ +Any = 0 + class object: def __init__(self) -> None: pass class type: - def __init__(self, x: object) -> None: pass + def __init__(self, x: Any) -> None: pass # These are provided here for convenience. class int: diff --git a/test-data/unit/lib-stub/types.pyi b/test-data/unit/lib-stub/types.pyi index 02113aea3834..b118000e688c 100644 --- a/test-data/unit/lib-stub/types.pyi +++ b/test-data/unit/lib-stub/types.pyi @@ -1,10 +1,9 @@ -from typing import TypeVar +from typing import TypeVar, Optional, List, Any, Generic, Sequence +T = TypeVar('T') -_T = TypeVar('_T') - -def coroutine(func: _T) -> _T: pass +def coroutine(func: T) -> T: + return func class bool: ... -class ModuleType: - __file__ = ... # type: str +class ModuleType: ... diff --git a/test-data/unit/lib-stub/typing.pyi b/test-data/unit/lib-stub/typing.pyi index 274f3da76164..754c32c8d23e 100644 --- a/test-data/unit/lib-stub/typing.pyi +++ b/test-data/unit/lib-stub/typing.pyi @@ -20,7 +20,6 @@ Type = 0 no_type_check = 0 ClassVar = 0 NoReturn = 0 -NewType = 0 # Type aliases. List = 0 @@ -54,7 +53,7 @@ class Generator(Iterator[T], Generic[T, U, V]): def send(self, value: U) -> T: pass @abstractmethod - def throw(self, typ: Any, val: Any = None, tb: Any = None) -> None: pass + def throw(self, typ: Any, val: Any=None, tb=None) -> None: pass @abstractmethod def close(self) -> None: pass @@ -62,6 +61,38 @@ class Generator(Iterator[T], Generic[T, U, V]): @abstractmethod def __iter__(self) -> 'Generator[T, U, V]': pass +class AsyncGenerator(AsyncIterator[T], Generic[T, U]): + @abstractmethod + def __anext__(self) -> Awaitable[T]: pass + + @abstractmethod + def asend(self, value: U) -> Awaitable[T]: pass + + @abstractmethod + def athrow(self, typ: Any, val: Any=None, tb: Any=None) -> Awaitable[T]: pass + + @abstractmethod + def aclose(self) -> Awaitable[T]: pass + + @abstractmethod + def __aiter__(self) -> 'AsyncGenerator[T, U]': pass + +class Awaitable(Generic[T]): + @abstractmethod + def __await__(self) -> Generator[Any, Any, T]: pass + +class AwaitableGenerator(Generator[T, U, V], Awaitable[V], Generic[T, U, V, S]): + pass + +class AsyncIterable(Generic[T]): + @abstractmethod + def __aiter__(self) -> 'AsyncIterator[T]': pass + +class AsyncIterator(AsyncIterable[T], Generic[T]): + def __aiter__(self) -> 'AsyncIterator[T]': return self + @abstractmethod + def __anext__(self) -> Awaitable[T]: pass + class Sequence(Iterable[T], Generic[T]): @abstractmethod def __getitem__(self, n: Any) -> T: pass @@ -70,4 +101,9 @@ class Mapping(Generic[T, U]): pass class MutableMapping(Generic[T, U]): pass +def NewType(name: str, tp: Type[T]) -> Callable[[T], T]: + def new_type(x): + return x + return new_type + TYPE_CHECKING = 1 diff --git a/test-data/unit/pythoneval.test b/test-data/unit/pythoneval.test index c38bc5cff9b8..d224fcc19a9c 100644 --- a/test-data/unit/pythoneval.test +++ b/test-data/unit/pythoneval.test @@ -400,7 +400,7 @@ f.write(b'x') f.foobar() [out] _program.py:3: error: Argument 1 to "write" of "IO" has incompatible type "bytes"; expected "str" -_program.py:4: error: "TextIO" has no attribute "foobar" +_program.py:4: error: IO[str] has no attribute "foobar" [case testOpenReturnTypeInference] reveal_type(open('x')) @@ -409,9 +409,9 @@ reveal_type(open('x', 'rb')) mode = 'rb' reveal_type(open('x', mode)) [out] -_program.py:1: error: Revealed type is 'typing.TextIO' -_program.py:2: error: Revealed type is 'typing.TextIO' -_program.py:3: error: Revealed type is 'typing.BinaryIO' +_program.py:1: error: Revealed type is 'typing.IO[builtins.str]' +_program.py:2: error: Revealed type is 'typing.IO[builtins.str]' +_program.py:3: error: Revealed type is 'typing.IO[builtins.bytes]' _program.py:5: error: Revealed type is 'typing.IO[Any]' [case testOpenReturnTypeInferenceSpecialCases] @@ -421,10 +421,10 @@ reveal_type(open(file='x', mode='rb')) mode = 'rb' reveal_type(open(mode=mode, file='r')) [out] -_testOpenReturnTypeInferenceSpecialCases.py:1: error: Revealed type is 'typing.TextIO' +_testOpenReturnTypeInferenceSpecialCases.py:1: error: Revealed type is 'typing.IO[builtins.str]' _testOpenReturnTypeInferenceSpecialCases.py:1: error: Too few arguments for "open" -_testOpenReturnTypeInferenceSpecialCases.py:2: error: Revealed type is 'typing.BinaryIO' -_testOpenReturnTypeInferenceSpecialCases.py:3: error: Revealed type is 'typing.BinaryIO' +_testOpenReturnTypeInferenceSpecialCases.py:2: error: Revealed type is 'typing.IO[builtins.bytes]' +_testOpenReturnTypeInferenceSpecialCases.py:3: error: Revealed type is 'typing.IO[builtins.bytes]' _testOpenReturnTypeInferenceSpecialCases.py:5: error: Revealed type is 'typing.IO[Any]' [case testGenericPatterns] diff --git a/test-data/unit/semanal-classvar.test b/test-data/unit/semanal-classvar.test index d2e474cd278f..677e1bd8cadc 100644 --- a/test-data/unit/semanal-classvar.test +++ b/test-data/unit/semanal-classvar.test @@ -95,7 +95,7 @@ def f(x: ClassVar, y: ClassVar) -> ClassVar: pass main:2: error: ClassVar can only be used for assignments in class body [case testClassVarInCallableArgs] -from typing import Callable, ClassVar, Any +from typing import Callable, ClassVar f = None # type: Callable[[int, ClassVar], Any] [out] main:2: error: Invalid type: ClassVar nested inside other type diff --git a/test-data/unit/semanal-types.test b/test-data/unit/semanal-types.test index c1ec57f205de..4c3033191dfc 100644 --- a/test-data/unit/semanal-types.test +++ b/test-data/unit/semanal-types.test @@ -720,7 +720,7 @@ MypyFile:1( def ())) [case testOverloadedFunction] -from typing import overload, Any +from typing import overload @overload def f(a: object) -> int: a @overload @@ -730,7 +730,7 @@ def f(a: Any) -> Any: return a [out] MypyFile:1( - ImportFrom:1(typing, [overload, Any]) + ImportFrom:1(typing, [overload]) OverloadedFuncDef:2( FuncDef:7( f diff --git a/typeshed b/typeshed index be80c368161e..c2e6a6f670f5 160000 --- a/typeshed +++ b/typeshed @@ -1 +1 @@ -Subproject commit be80c368161eeace47d93eb9908ddda1aacf3b86 +Subproject commit c2e6a6f670f5d75c07eb31cf2509db302825b76d From 13dcb81311d1ecf9455c53649f72dbd636de1cf0 Mon Sep 17 00:00:00 2001 From: quartox Date: Sun, 4 Jun 2017 14:35:21 -0700 Subject: [PATCH 13/27] Fix typeshed --- typeshed | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/typeshed b/typeshed index c2e6a6f670f5..be80c368161e 160000 --- a/typeshed +++ b/typeshed @@ -1 +1 @@ -Subproject commit c2e6a6f670f5d75c07eb31cf2509db302825b76d +Subproject commit be80c368161eeace47d93eb9908ddda1aacf3b86 From 079bff693dd5a6d60f7a966e7ab9702b20b6b1b9 Mon Sep 17 00:00:00 2001 From: quartox Date: Sun, 4 Jun 2017 14:53:51 -0700 Subject: [PATCH 14/27] Fix spacing --- test-data/unit/check-typevar-values.test | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/test-data/unit/check-typevar-values.test b/test-data/unit/check-typevar-values.test index e8d432268fd0..399f6e2ab4f7 100644 --- a/test-data/unit/check-typevar-values.test +++ b/test-data/unit/check-typevar-values.test @@ -7,7 +7,7 @@ T = TypeVar('T', int, str) def f(x: T) -> None: pass f(1) f('x') -f(object()) # E: Type argument 1 of "f" has incompatible value\ +f(object()) # E: Type argument 1 of "f" has incompatible value \ # N: "T" must be all one type: int or str [case testCallGenericFunctionWithTypeVarValueRestrictionUsingContext] @@ -19,7 +19,7 @@ s = ['x'] o = [object()] i = f(1) s = f('') -o = f(1) # E: Type argument 1 of "f" has incompatible value\ +o = f(1) # E: Type argument 1 of "f" has incompatible value \ # N: "T" must be all one type: int or str [builtins fixtures/list.pyi] @@ -241,7 +241,7 @@ class A(Generic[X]): A(1) A('x') A(cast(Any, object())) -A(object()) # E: Type argument 1 of "A" has incompatible value\ +A(object()) # E: Type argument 1 of "A" has incompatible value \ # N: "X" must be all one type: int or str [case testGenericTypeWithTypevarValuesAndTypevarArgument] From 5c2b024dd927f35894aafaabd20166788286a6b4 Mon Sep 17 00:00:00 2001 From: quartox Date: Sun, 11 Jun 2017 09:12:55 -0700 Subject: [PATCH 15/27] Test checkout upstream/master --- .travis.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.travis.yml b/.travis.yml index 56804c71bf50..2dffdb283666 100644 --- a/.travis.yml +++ b/.travis.yml @@ -15,6 +15,7 @@ python: install: - pip install -r test-requirements.txt + - python2 -m pip install --user typing - python setup.py install script: From 2bf23ca341632574d8aae25055e1cc1ae61cceef Mon Sep 17 00:00:00 2001 From: quartox Date: Sun, 11 Jun 2017 09:28:16 -0700 Subject: [PATCH 16/27] Fix inappropriate updates to files --- LICENSE | 3 +- ROADMAP.md | 96 + docs/source/cheat_sheet.rst | 12 + docs/source/cheat_sheet_py3.rst | 13 + docs/source/common_issues.rst | 2 +- docs/source/config_file.rst | 2 + docs/source/revision_history.rst | 2 +- extensions/setup.py | 2 +- lib-typing/2.7/mod_generics_cache.py | 14 - lib-typing/2.7/setup.py | 46 - lib-typing/2.7/test_typing.py | 1828 ------------- lib-typing/2.7/typing.py | 2140 --------------- lib-typing/3.2/mod_generics_cache.py | 14 - lib-typing/3.2/test_typing.py | 2422 ----------------- lib-typing/3.2/typing.py | 2335 ---------------- mypy/__main__.py | 8 +- mypy/checker.py | 15 +- mypy/checkexpr.py | 11 +- mypy/checkmember.py | 9 + mypy/fastparse.py | 89 +- mypy/fastparse2.py | 37 +- mypy/main.py | 20 +- mypy/options.py | 4 + mypy/parse.py | 6 +- mypy/report.py | 2 +- mypy/semanal.py | 152 +- mypy/subtypes.py | 40 +- mypy/test/data.py | 33 +- mypy/test/helpers.py | 30 +- mypy/test/testcheck.py | 10 +- mypy/test/testpythoneval.py | 10 +- mypy/waiter.py | 14 +- runtests.py | 49 +- scripts/stubgen | 0 setup.cfg | 2 - setup.py | 6 +- .../3.2/test/test_genericpath.py | 2 +- test-data/unit/README.md | 23 +- test-data/unit/check-async-await.test | 44 +- test-data/unit/check-class-namedtuple.test | 1 + test-data/unit/check-classes.test | 159 +- test-data/unit/check-expressions.test | 10 + test-data/unit/check-incomplete-fixture.test | 98 + test-data/unit/check-incremental.test | 480 +++- test-data/unit/check-modules.test | 224 ++ test-data/unit/check-newsyntax.test | 16 +- test-data/unit/check-newtype.test | 32 +- test-data/unit/check-optional.test | 18 +- test-data/unit/check-unions.test | 6 +- test-data/unit/check-warnings.test | 17 + test-data/unit/cmdline.test | 8 + test-data/unit/fixtures/f_string.pyi | 36 + test-data/unit/fixtures/module.pyi | 2 + test-data/unit/fixtures/property.pyi | 2 +- test-data/unit/lib-stub/builtins.pyi | 4 +- test-data/unit/lib-stub/types.pyi | 11 +- test-data/unit/lib-stub/typing.pyi | 40 +- test-data/unit/semanal-classvar.test | 2 +- test-data/unit/semanal-types.test | 4 +- 59 files changed, 1620 insertions(+), 9097 deletions(-) create mode 100644 ROADMAP.md delete mode 100644 lib-typing/2.7/mod_generics_cache.py delete mode 100644 lib-typing/2.7/setup.py delete mode 100644 lib-typing/2.7/test_typing.py delete mode 100644 lib-typing/2.7/typing.py delete mode 100644 lib-typing/3.2/mod_generics_cache.py delete mode 100644 lib-typing/3.2/test_typing.py delete mode 100644 lib-typing/3.2/typing.py mode change 100755 => 100644 scripts/stubgen create mode 100644 test-data/unit/check-incomplete-fixture.test create mode 100644 test-data/unit/fixtures/f_string.pyi diff --git a/LICENSE b/LICENSE index 8145cc386a7b..afddd48c889e 100644 --- a/LICENSE +++ b/LICENSE @@ -27,8 +27,7 @@ DEALINGS IN THE SOFTWARE. = = = = = Portions of mypy are licensed under different licenses. The files -under stdlib-samples and lib-typing are licensed under the PSF 2 -License, reproduced below. +under stdlib-samples are licensed under the PSF 2 License, reproduced below. = = = = = diff --git a/ROADMAP.md b/ROADMAP.md new file mode 100644 index 000000000000..132d53c2c581 --- /dev/null +++ b/ROADMAP.md @@ -0,0 +1,96 @@ +# Mypy Roadmap + +The goal of the roadmap is to document areas the mypy core team is +planning to work on in the future or is currently working on. PRs +targeting these areas are very welcome, but please check first with a +core team member that nobody else is working on the same thing. + +**Note:** This doesn’t include everything that the core team will work +on, and everything is subject to change. Near-term plans are likely +more accurate. + +## April-June 2017 + +- Add more comprehensive testing for `--incremental` and `--quick` + modes to improve reliability. At least write more unit tests with + focus on areas that have previously had bugs. + ([issue](https://github.com/python/mypy/issues/3455)) + +- Speed up `--quick` mode to better support million+ line codebases + through some of these: + + - Make it possible to use remote caching for incremental cache + files. This would speed up a cold run with no local cache data. + We need to update incremental cache to use hashes to determine + whether files have changes to allow + [sharing cache data](https://github.com/python/mypy/issues/3403). + + - See if we can speed up deserialization of incremental cache + files. Initial experiments aren’t very promising though so there + might not be any easy wins left. + ([issue](https://github.com/python/mypy/issues/3456)) + +- Improve support for complex signatures such as `open(fn, 'rb')` and + specific complex decorators such as `contextlib.contextmanager` + through type checker plugins/hooks. + ([issue](https://github.com/python/mypy/issues/1240)) + +- Document basic properties of all type operations used within mypy, + including compatibility, proper subtyping, joins and meets. + ([issue](https://github.com/python/mypy/issues/3454)) + +- Make TypedDict an officially supported mypy feature. This makes it + possible to give precise types for dictionaries that represent JSON + objects, such as `{"path": "/dir/fnam.ext", "size": 1234}`. + ([issue](https://github.com/python/mypy/issues/3453)) + +- Make error messages more useful and informative. + ([issue](https://github.com/python/mypy/labels/topic-usability)) + +- Resolve [#2008](https://github.com/python/mypy/issues/2008) (we are + converging on approach 4). + +## July-December 2017 + +- Invest some effort into systematically filling in missing + annotations and stubs in typeshed, with focus on features heavily + used at Dropbox. Better support for ORMs will be a separate + project. + +- Improve opt-in warnings about `Any` types to make it easier to keep + code free from unwanted `Any` types. For example, warn about using + `list` (instead of `List[x]`) and calling `open` if we can’t infer a + precise return type, or using types imported from ignored modules + (they are implicitly `Any`). + +- Add support for protocols and structural subtyping (PEP 544). + +- Switch completely to pytest and remove the custom testing framework. + ([issue](https://github.com/python/mypy/issues/1673)) + +- Make it possible to run mypy as a daemon to avoid reprocessing the + entire program on each run. This will improve performance + significantly. Even when using the incremental mode, processing a + large number of files is not cheap. + +- Refactor and simplify specific tricky parts of mypy internals, such + as the [conditional type binder](https://github.com/python/mypy/issues/3457), + [symbol tables](https://github.com/python/mypy/issues/3458) or + the various [semantic analysis passes](https://github.com/python/mypy/issues/3459). + +- Implement a general type system plugin architecture. It should be + able to support some typical ORM features at least, such as + metaclasses that add methods with automatically inferred signatures + and complex descriptors such as those used by Django models. + ([issue](https://github.com/python/mypy/issues/1240)) + +- Add support for statically typed + [protobufs](https://developers.google.com/protocol-buffers/). + +- Provide much faster, reliable interactive feedback through + fine-grained incremental type checking, built on top the daemon + mode. + +- Start work on editor plugins and support for selected IDE features. + +- Turn on `--strict-optional` by default. diff --git a/docs/source/cheat_sheet.rst b/docs/source/cheat_sheet.rst index 49919a56831c..f8e7146c65f4 100644 --- a/docs/source/cheat_sheet.rst +++ b/docs/source/cheat_sheet.rst @@ -149,6 +149,18 @@ When you're puzzled or when things are complicated reveal_type(c) # -> error: Revealed type is 'builtins.list[builtins.str]' print(c) # -> [4] the object is not cast + # if you want dynamic attributes on your class, have it override __setattr__ or __getattr__ + # in a stub or in your source code. + # __setattr__ allows for dynamic assignment to names + # __getattr__ allows for dynamic access to names + class A: + # this will allow assignment to any A.x, if x is the same type as `value` + def __setattr__(self, name, value): + # type: (str, int) -> None + ... + a.foo = 42 # works + a.bar = 'Ex-parrot' # fails type checking + # TODO: explain "Need type annotation for variable" when # initializing with None or an empty container diff --git a/docs/source/cheat_sheet_py3.rst b/docs/source/cheat_sheet_py3.rst index adeab7d734d4..5ef62b28134f 100644 --- a/docs/source/cheat_sheet_py3.rst +++ b/docs/source/cheat_sheet_py3.rst @@ -142,6 +142,19 @@ When you're puzzled or when things are complicated reveal_type(c) # -> error: Revealed type is 'builtins.list[builtins.str]' print(c) # -> [4] the object is not cast + # if you want dynamic attributes on your class, have it override __setattr__ or __getattr__ + # in a stub or in your source code. + # __setattr__ allows for dynamic assignment to names + # __getattr__ allows for dynamic access to names + class A: + # this will allow assignment to any A.x, if x is the same type as `value` + def __setattr__(self, name: str, value: int) -> None: ... + # this will allow access to any A.x, if x is compatible with the return type + def __getattr__(self, name: str) -> int: ... + a.foo = 42 # works + a.bar = 'Ex-parrot' # fails type checking + + # TODO: explain "Need type annotation for variable" when # initializing with None or an empty container diff --git a/docs/source/common_issues.rst b/docs/source/common_issues.rst index 2501acd28fc0..0c8b500d8f06 100644 --- a/docs/source/common_issues.rst +++ b/docs/source/common_issues.rst @@ -180,7 +180,7 @@ not support ``sort()``) as a list and sort it in-place: # Type of x is List[int] here. x.sort() # Okay! -.. _invariance-vs-covariance: +.. _variance: Invariance vs covariance ------------------------ diff --git a/docs/source/config_file.rst b/docs/source/config_file.rst index 7e04286d6cd3..6fe139b1bb8c 100644 --- a/docs/source/config_file.rst +++ b/docs/source/config_file.rst @@ -178,6 +178,8 @@ overridden by the pattern sections matching the module name. - ``strict_boolean`` (Boolean, default False) makes using non-boolean expressions in conditions an error. +- ``no_implicit_optional`` (Boolean, default false) changes the treatment of + arguments with a default value of None by not implicitly making their type Optional Example ******* diff --git a/docs/source/revision_history.rst b/docs/source/revision_history.rst index dd74dadd244e..98f6495dd188 100644 --- a/docs/source/revision_history.rst +++ b/docs/source/revision_history.rst @@ -31,7 +31,7 @@ List of major changes: * Add :ref:`variance-of-generics`. - * Add :ref:`invariance-vs-covariance`. + * Add :ref:`variance`. * Updates to :ref:`python-36`. diff --git a/extensions/setup.py b/extensions/setup.py index 32741a0b67b6..59d634debc3f 100644 --- a/extensions/setup.py +++ b/extensions/setup.py @@ -4,7 +4,7 @@ from distutils.core import setup -version = '0.2.0' +version = '0.3.0-dev' description = 'Experimental type system extensions for programs checked with the mypy typechecker.' long_description = ''' Mypy Extensions diff --git a/lib-typing/2.7/mod_generics_cache.py b/lib-typing/2.7/mod_generics_cache.py deleted file mode 100644 index d9a60b4b28c3..000000000000 --- a/lib-typing/2.7/mod_generics_cache.py +++ /dev/null @@ -1,14 +0,0 @@ -"""Module for testing the behavior of generics across different modules.""" - -from typing import TypeVar, Generic - -T = TypeVar('T') - - -class A(Generic[T]): - pass - - -class B(Generic[T]): - class A(Generic[T]): - pass diff --git a/lib-typing/2.7/setup.py b/lib-typing/2.7/setup.py deleted file mode 100644 index 18c34d84be64..000000000000 --- a/lib-typing/2.7/setup.py +++ /dev/null @@ -1,46 +0,0 @@ -#!/usr/bin/env python - -"""setup.py for Python 2.x typing module""" - -import glob -import os -import os.path -import sys - -from distutils.core import setup - -if sys.version_info >= (3, 0, 0): - sys.stderr.write("ERROR: You need Python 2.x to install this module.\n") - exit(1) - -version = '0.0.1.dev1' -description = 'typing (Python 2.x)' -long_description = ''' -typing (Python 2.x) -=================== - -This module is part of mypy, a static type checker for Python. -'''.lstrip() - -classifiers = [ - 'Development Status :: 2 - Pre-Alpha', - 'Environment :: Console', - 'Intended Audience :: Developers', - 'License :: OSI Approved :: MIT License', - 'Operating System :: POSIX', - 'Programming Language :: Python :: 2.7', - 'Topic :: Software Development', -] - -setup(name='typing', - version=version, - description=description, - long_description=long_description, - author='Jukka Lehtosalo', - author_email='jukka.lehtosalo@iki.fi', - url='http://www.mypy-lang.org/', - license='MIT License', - platforms=['POSIX'], - py_modules=['typing'], - classifiers=classifiers, - ) diff --git a/lib-typing/2.7/test_typing.py b/lib-typing/2.7/test_typing.py deleted file mode 100644 index 2ea954fe30f4..000000000000 --- a/lib-typing/2.7/test_typing.py +++ /dev/null @@ -1,1828 +0,0 @@ -from __future__ import absolute_import, unicode_literals - -import collections -import pickle -import re -import sys -from unittest import TestCase, main, SkipTest -from copy import copy, deepcopy - -from typing import Any -from typing import TypeVar, AnyStr -from typing import T, KT, VT # Not in __all__. -from typing import Union, Optional -from typing import Tuple, List, MutableMapping -from typing import Callable -from typing import Generic, ClassVar, GenericMeta -from typing import cast -from typing import Type -from typing import NewType -from typing import NamedTuple -from typing import Pattern, Match -import typing -import weakref -try: - import collections.abc as collections_abc -except ImportError: - import collections as collections_abc # Fallback for PY3.2. - - -class BaseTestCase(TestCase): - - def assertIsSubclass(self, cls, class_or_tuple, msg=None): - if not issubclass(cls, class_or_tuple): - message = '%r is not a subclass of %r' % (cls, class_or_tuple) - if msg is not None: - message += ' : %s' % msg - raise self.failureException(message) - - def assertNotIsSubclass(self, cls, class_or_tuple, msg=None): - if issubclass(cls, class_or_tuple): - message = '%r is a subclass of %r' % (cls, class_or_tuple) - if msg is not None: - message += ' : %s' % msg - raise self.failureException(message) - - def clear_caches(self): - for f in typing._cleanups: - f() - - -class Employee(object): - pass - - -class Manager(Employee): - pass - - -class Founder(Employee): - pass - - -class ManagingFounder(Manager, Founder): - pass - - -class AnyTests(BaseTestCase): - - def test_any_instance_type_error(self): - with self.assertRaises(TypeError): - isinstance(42, Any) - - def test_any_subclass_type_error(self): - with self.assertRaises(TypeError): - issubclass(Employee, Any) - with self.assertRaises(TypeError): - issubclass(Any, Employee) - - def test_repr(self): - self.assertEqual(repr(Any), 'typing.Any') - - def test_errors(self): - with self.assertRaises(TypeError): - issubclass(42, Any) - with self.assertRaises(TypeError): - Any[int] # Any is not a generic type. - - def test_cannot_subclass(self): - with self.assertRaises(TypeError): - class A(Any): - pass - with self.assertRaises(TypeError): - class A(type(Any)): - pass - - def test_cannot_instantiate(self): - with self.assertRaises(TypeError): - Any() - with self.assertRaises(TypeError): - type(Any)() - - def test_any_is_subclass(self): - # These expressions must simply not fail. - typing.Match[Any] - typing.Pattern[Any] - typing.IO[Any] - - -class TypeVarTests(BaseTestCase): - - def test_basic_plain(self): - T = TypeVar('T') - # T equals itself. - self.assertEqual(T, T) - # T is an instance of TypeVar - self.assertIsInstance(T, TypeVar) - - def test_typevar_instance_type_error(self): - T = TypeVar('T') - with self.assertRaises(TypeError): - isinstance(42, T) - - def test_typevar_subclass_type_error(self): - T = TypeVar('T') - with self.assertRaises(TypeError): - issubclass(int, T) - with self.assertRaises(TypeError): - issubclass(T, int) - - def test_constrained_error(self): - with self.assertRaises(TypeError): - X = TypeVar('X', int) - X - - def test_union_unique(self): - X = TypeVar('X') - Y = TypeVar('Y') - self.assertNotEqual(X, Y) - self.assertEqual(Union[X], X) - self.assertNotEqual(Union[X], Union[X, Y]) - self.assertEqual(Union[X, X], X) - self.assertNotEqual(Union[X, int], Union[X]) - self.assertNotEqual(Union[X, int], Union[int]) - self.assertEqual(Union[X, int].__args__, (X, int)) - self.assertEqual(Union[X, int].__parameters__, (X,)) - self.assertIs(Union[X, int].__origin__, Union) - - def test_union_constrained(self): - A = TypeVar('A', str, bytes) - self.assertNotEqual(Union[A, str], Union[A]) - - def test_repr(self): - self.assertEqual(repr(T), '~T') - self.assertEqual(repr(KT), '~KT') - self.assertEqual(repr(VT), '~VT') - self.assertEqual(repr(AnyStr), '~AnyStr') - T_co = TypeVar('T_co', covariant=True) - self.assertEqual(repr(T_co), '+T_co') - T_contra = TypeVar('T_contra', contravariant=True) - self.assertEqual(repr(T_contra), '-T_contra') - - def test_no_redefinition(self): - self.assertNotEqual(TypeVar('T'), TypeVar('T')) - self.assertNotEqual(TypeVar('T', int, str), TypeVar('T', int, str)) - - def test_cannot_subclass_vars(self): - with self.assertRaises(TypeError): - class V(TypeVar('T')): - pass - - def test_cannot_subclass_var_itself(self): - with self.assertRaises(TypeError): - class V(TypeVar): - pass - - def test_cannot_instantiate_vars(self): - with self.assertRaises(TypeError): - TypeVar('A')() - - def test_bound_errors(self): - with self.assertRaises(TypeError): - TypeVar('X', bound=42) - with self.assertRaises(TypeError): - TypeVar('X', str, float, bound=Employee) - - def test_no_bivariant(self): - with self.assertRaises(ValueError): - TypeVar('T', covariant=True, contravariant=True) - - -class UnionTests(BaseTestCase): - - def test_basics(self): - u = Union[int, float] - self.assertNotEqual(u, Union) - - def test_subclass_error(self): - with self.assertRaises(TypeError): - issubclass(int, Union) - with self.assertRaises(TypeError): - issubclass(Union, int) - with self.assertRaises(TypeError): - issubclass(int, Union[int, str]) - with self.assertRaises(TypeError): - issubclass(Union[int, str], int) - - def test_union_any(self): - u = Union[Any] - self.assertEqual(u, Any) - u1 = Union[int, Any] - u2 = Union[Any, int] - u3 = Union[Any, object] - self.assertEqual(u1, u2) - self.assertNotEqual(u1, Any) - self.assertNotEqual(u2, Any) - self.assertNotEqual(u3, Any) - - def test_union_object(self): - u = Union[object] - self.assertEqual(u, object) - u = Union[int, object] - self.assertEqual(u, object) - u = Union[object, int] - self.assertEqual(u, object) - - def test_unordered(self): - u1 = Union[int, float] - u2 = Union[float, int] - self.assertEqual(u1, u2) - - def test_single_class_disappears(self): - t = Union[Employee] - self.assertIs(t, Employee) - - def test_base_class_disappears(self): - u = Union[Employee, Manager, int] - self.assertEqual(u, Union[int, Employee]) - u = Union[Manager, int, Employee] - self.assertEqual(u, Union[int, Employee]) - u = Union[Employee, Manager] - self.assertIs(u, Employee) - - def test_union_union(self): - u = Union[int, float] - v = Union[u, Employee] - self.assertEqual(v, Union[int, float, Employee]) - - def test_repr(self): - self.assertEqual(repr(Union), 'typing.Union') - u = Union[Employee, int] - self.assertEqual(repr(u), 'typing.Union[%s.Employee, int]' % __name__) - u = Union[int, Employee] - self.assertEqual(repr(u), 'typing.Union[int, %s.Employee]' % __name__) - T = TypeVar('T') - u = Union[T, int][int] - self.assertEqual(repr(u), repr(int)) - u = Union[List[int], int] - self.assertEqual(repr(u), 'typing.Union[typing.List[int], int]') - - def test_cannot_subclass(self): - with self.assertRaises(TypeError): - class C(Union): - pass - with self.assertRaises(TypeError): - class C(type(Union)): - pass - with self.assertRaises(TypeError): - class C(Union[int, str]): - pass - - def test_cannot_instantiate(self): - with self.assertRaises(TypeError): - Union() - u = Union[int, float] - with self.assertRaises(TypeError): - u() - with self.assertRaises(TypeError): - type(u)() - - def test_union_generalization(self): - self.assertFalse(Union[str, typing.Iterable[int]] == str) - self.assertFalse(Union[str, typing.Iterable[int]] == typing.Iterable[int]) - self.assertTrue(Union[str, typing.Iterable] == typing.Iterable) - - def test_union_compare_other(self): - self.assertNotEqual(Union, object) - self.assertNotEqual(Union, Any) - self.assertNotEqual(ClassVar, Union) - self.assertNotEqual(Optional, Union) - self.assertNotEqual([None], Optional) - self.assertNotEqual(Optional, typing.Mapping) - self.assertNotEqual(Optional[typing.MutableMapping], Union) - - def test_optional(self): - o = Optional[int] - u = Union[int, None] - self.assertEqual(o, u) - - def test_empty(self): - with self.assertRaises(TypeError): - Union[()] - - def test_union_instance_type_error(self): - with self.assertRaises(TypeError): - isinstance(42, Union[int, str]) - - def test_no_eval_union(self): - u = Union[int, str] - self.assertIs(u._eval_type({}, {}), u) - - def test_function_repr_union(self): - def fun(): pass - self.assertEqual(repr(Union[fun, int]), 'typing.Union[fun, int]') - - def test_union_str_pattern(self): - # Shouldn't crash; see http://bugs.python.org/issue25390 - A = Union[str, Pattern] - A - - def test_etree(self): - # See https://github.com/python/typing/issues/229 - # (Only relevant for Python 2.) - try: - from xml.etree.cElementTree import Element - except ImportError: - raise SkipTest("cElementTree not found") - Union[Element, str] # Shouldn't crash - - def Elem(*args): - return Element(*args) - - Union[Elem, str] # Nor should this - - -class TupleTests(BaseTestCase): - - def test_basics(self): - with self.assertRaises(TypeError): - issubclass(Tuple, Tuple[int, str]) - with self.assertRaises(TypeError): - issubclass(tuple, Tuple[int, str]) - - class TP(tuple): pass - self.assertTrue(issubclass(tuple, Tuple)) - self.assertTrue(issubclass(TP, Tuple)) - - def test_equality(self): - self.assertEqual(Tuple[int], Tuple[int]) - self.assertEqual(Tuple[int, ...], Tuple[int, ...]) - self.assertNotEqual(Tuple[int], Tuple[int, int]) - self.assertNotEqual(Tuple[int], Tuple[int, ...]) - - def test_tuple_subclass(self): - class MyTuple(tuple): - pass - self.assertTrue(issubclass(MyTuple, Tuple)) - - def test_tuple_instance_type_error(self): - with self.assertRaises(TypeError): - isinstance((0, 0), Tuple[int, int]) - isinstance((0, 0), Tuple) - - def test_repr(self): - self.assertEqual(repr(Tuple), 'typing.Tuple') - self.assertEqual(repr(Tuple[()]), 'typing.Tuple[()]') - self.assertEqual(repr(Tuple[int, float]), 'typing.Tuple[int, float]') - self.assertEqual(repr(Tuple[int, ...]), 'typing.Tuple[int, ...]') - - def test_errors(self): - with self.assertRaises(TypeError): - issubclass(42, Tuple) - with self.assertRaises(TypeError): - issubclass(42, Tuple[int]) - - -class CallableTests(BaseTestCase): - - def test_self_subclass(self): - with self.assertRaises(TypeError): - self.assertTrue(issubclass(type(lambda x: x), Callable[[int], int])) - self.assertTrue(issubclass(type(lambda x: x), Callable)) - - def test_eq_hash(self): - self.assertEqual(Callable[[int], int], Callable[[int], int]) - self.assertEqual(len({Callable[[int], int], Callable[[int], int]}), 1) - self.assertNotEqual(Callable[[int], int], Callable[[int], str]) - self.assertNotEqual(Callable[[int], int], Callable[[str], int]) - self.assertNotEqual(Callable[[int], int], Callable[[int, int], int]) - self.assertNotEqual(Callable[[int], int], Callable[[], int]) - self.assertNotEqual(Callable[[int], int], Callable) - - def test_cannot_instantiate(self): - with self.assertRaises(TypeError): - Callable() - with self.assertRaises(TypeError): - type(Callable)() - c = Callable[[int], str] - with self.assertRaises(TypeError): - c() - with self.assertRaises(TypeError): - type(c)() - - def test_callable_wrong_forms(self): - with self.assertRaises(TypeError): - Callable[(), int] - with self.assertRaises(TypeError): - Callable[[()], int] - with self.assertRaises(TypeError): - Callable[[int, 1], 2] - with self.assertRaises(TypeError): - Callable[int] - - def test_callable_instance_works(self): - def f(): - pass - self.assertIsInstance(f, Callable) - self.assertNotIsInstance(None, Callable) - - def test_callable_instance_type_error(self): - def f(): - pass - with self.assertRaises(TypeError): - self.assertIsInstance(f, Callable[[], None]) - with self.assertRaises(TypeError): - self.assertIsInstance(f, Callable[[], Any]) - with self.assertRaises(TypeError): - self.assertNotIsInstance(None, Callable[[], None]) - with self.assertRaises(TypeError): - self.assertNotIsInstance(None, Callable[[], Any]) - - def test_repr(self): - ct0 = Callable[[], bool] - self.assertEqual(repr(ct0), 'typing.Callable[[], bool]') - ct2 = Callable[[str, float], int] - self.assertEqual(repr(ct2), 'typing.Callable[[str, float], int]') - ctv = Callable[..., str] - self.assertEqual(repr(ctv), 'typing.Callable[..., str]') - - def test_ellipsis_in_generic(self): - # Shouldn't crash; see https://github.com/python/typing/issues/259 - typing.List[Callable[..., str]] - - -XK = TypeVar('XK', unicode, bytes) -XV = TypeVar('XV') - - -class SimpleMapping(Generic[XK, XV]): - - def __getitem__(self, key): - pass - - def __setitem__(self, key, value): - pass - - def get(self, key, default=None): - pass - - -class MySimpleMapping(SimpleMapping[XK, XV]): - - def __init__(self): - self.store = {} - - def __getitem__(self, key): - return self.store[key] - - def __setitem__(self, key, value): - self.store[key] = value - - def get(self, key, default=None): - try: - return self.store[key] - except KeyError: - return default - - -class ProtocolTests(BaseTestCase): - - def test_supports_int(self): - self.assertIsSubclass(int, typing.SupportsInt) - self.assertNotIsSubclass(str, typing.SupportsInt) - - def test_supports_float(self): - self.assertIsSubclass(float, typing.SupportsFloat) - self.assertNotIsSubclass(str, typing.SupportsFloat) - - def test_supports_complex(self): - - # Note: complex itself doesn't have __complex__. - class C(object): - def __complex__(self): - return 0j - - self.assertIsSubclass(C, typing.SupportsComplex) - self.assertNotIsSubclass(str, typing.SupportsComplex) - - def test_supports_abs(self): - self.assertIsSubclass(float, typing.SupportsAbs) - self.assertIsSubclass(int, typing.SupportsAbs) - self.assertNotIsSubclass(str, typing.SupportsAbs) - - def test_reversible(self): - self.assertIsSubclass(list, typing.Reversible) - self.assertNotIsSubclass(int, typing.Reversible) - - def test_protocol_instance_type_error(self): - with self.assertRaises(TypeError): - isinstance(0, typing.SupportsAbs) - class C1(typing.SupportsInt): - def __int__(self): - return 42 - class C2(C1): - pass - c = C2() - self.assertIsInstance(c, C1) - - -class GenericTests(BaseTestCase): - - def test_basics(self): - X = SimpleMapping[str, Any] - self.assertEqual(X.__parameters__, ()) - with self.assertRaises(TypeError): - X[unicode] - with self.assertRaises(TypeError): - X[unicode, unicode] - Y = SimpleMapping[XK, unicode] - self.assertEqual(Y.__parameters__, (XK,)) - Y[unicode] - with self.assertRaises(TypeError): - Y[unicode, unicode] - self.assertIsSubclass(SimpleMapping[str, int], SimpleMapping) - - def test_generic_errors(self): - T = TypeVar('T') - S = TypeVar('S') - with self.assertRaises(TypeError): - Generic[T]() - with self.assertRaises(TypeError): - Generic[T][T] - with self.assertRaises(TypeError): - Generic[T][S] - with self.assertRaises(TypeError): - isinstance([], List[int]) - with self.assertRaises(TypeError): - issubclass(list, List[int]) - with self.assertRaises(TypeError): - class NewGeneric(Generic): pass - with self.assertRaises(TypeError): - class MyGeneric(Generic[T], Generic[S]): pass - with self.assertRaises(TypeError): - class MyGeneric(List[T], Generic[S]): pass - - def test_init(self): - T = TypeVar('T') - S = TypeVar('S') - with self.assertRaises(TypeError): - Generic[T, T] - with self.assertRaises(TypeError): - Generic[T, S, T] - - def test_repr(self): - self.assertEqual(repr(SimpleMapping), - __name__ + '.' + 'SimpleMapping') - self.assertEqual(repr(MySimpleMapping), - __name__ + '.' + 'MySimpleMapping') - - def test_chain_repr(self): - T = TypeVar('T') - S = TypeVar('S') - - class C(Generic[T]): - pass - - X = C[Tuple[S, T]] - self.assertEqual(X, C[Tuple[S, T]]) - self.assertNotEqual(X, C[Tuple[T, S]]) - - Y = X[T, int] - self.assertEqual(Y, X[T, int]) - self.assertNotEqual(Y, X[S, int]) - self.assertNotEqual(Y, X[T, str]) - - Z = Y[str] - self.assertEqual(Z, Y[str]) - self.assertNotEqual(Z, Y[int]) - self.assertNotEqual(Z, Y[T]) - - self.assertTrue(str(Z).endswith( - '.C[typing.Tuple[str, int]]')) - - def test_new_repr(self): - T = TypeVar('T') - U = TypeVar('U', covariant=True) - S = TypeVar('S') - - self.assertEqual(repr(List), 'typing.List') - self.assertEqual(repr(List[T]), 'typing.List[~T]') - self.assertEqual(repr(List[U]), 'typing.List[+U]') - self.assertEqual(repr(List[S][T][int]), 'typing.List[int]') - self.assertEqual(repr(List[int]), 'typing.List[int]') - - def test_new_repr_complex(self): - T = TypeVar('T') - TS = TypeVar('TS') - - self.assertEqual(repr(typing.Mapping[T, TS][TS, T]), 'typing.Mapping[~TS, ~T]') - self.assertEqual(repr(List[Tuple[T, TS]][int, T]), - 'typing.List[typing.Tuple[int, ~T]]') - self.assertEqual( - repr(List[Tuple[T, T]][List[int]]), - 'typing.List[typing.Tuple[typing.List[int], typing.List[int]]]' - ) - - def test_new_repr_bare(self): - T = TypeVar('T') - self.assertEqual(repr(Generic[T]), 'typing.Generic[~T]') - self.assertEqual(repr(typing._Protocol[T]), 'typing.Protocol[~T]') - class C(typing.Dict[Any, Any]): pass - # this line should just work - repr(C.__mro__) - - def test_dict(self): - T = TypeVar('T') - - class B(Generic[T]): - pass - - b = B() - b.foo = 42 - self.assertEqual(b.__dict__, {'foo': 42}) - - class C(B[int]): - pass - - c = C() - c.bar = 'abc' - self.assertEqual(c.__dict__, {'bar': 'abc'}) - - def test_subscripted_generics_as_proxies(self): - T = TypeVar('T') - class C(Generic[T]): - x = 'def' - self.assertEqual(C[int].x, 'def') - self.assertEqual(C[C[int]].x, 'def') - C[C[int]].x = 'changed' - self.assertEqual(C.x, 'changed') - self.assertEqual(C[str].x, 'changed') - C[List[str]].z = 'new' - self.assertEqual(C.z, 'new') - self.assertEqual(C[Tuple[int]].z, 'new') - - self.assertEqual(C().x, 'changed') - self.assertEqual(C[Tuple[str]]().z, 'new') - - class D(C[T]): - pass - self.assertEqual(D[int].x, 'changed') - self.assertEqual(D.z, 'new') - D.z = 'from derived z' - D[int].x = 'from derived x' - self.assertEqual(C.x, 'changed') - self.assertEqual(C[int].z, 'new') - self.assertEqual(D.x, 'from derived x') - self.assertEqual(D[str].z, 'from derived z') - - def test_abc_registry_kept(self): - T = TypeVar('T') - class C(Generic[T]): pass - C.register(int) - self.assertIsInstance(1, C) - C[int] - self.assertIsInstance(1, C) - - def test_false_subclasses(self): - class MyMapping(MutableMapping[str, str]): pass - self.assertNotIsInstance({}, MyMapping) - self.assertNotIsSubclass(dict, MyMapping) - - def test_abc_bases(self): - class MM(MutableMapping[str, str]): - def __getitem__(self, k): - return None - def __setitem__(self, k, v): - pass - def __delitem__(self, k): - pass - def __iter__(self): - return iter(()) - def __len__(self): - return 0 - # this should just work - MM().update() - self.assertIsInstance(MM(), collections_abc.MutableMapping) - self.assertIsInstance(MM(), MutableMapping) - self.assertNotIsInstance(MM(), List) - self.assertNotIsInstance({}, MM) - - def test_multiple_bases(self): - class MM1(MutableMapping[str, str], collections_abc.MutableMapping): - pass - with self.assertRaises(TypeError): - # consistent MRO not possible - class MM2(collections_abc.MutableMapping, MutableMapping[str, str]): - pass - - def test_orig_bases(self): - T = TypeVar('T') - class C(typing.Dict[str, T]): pass - self.assertEqual(C.__orig_bases__, (typing.Dict[str, T],)) - - def test_naive_runtime_checks(self): - def naive_dict_check(obj, tp): - # Check if a dictionary conforms to Dict type - if len(tp.__parameters__) > 0: - raise NotImplementedError - if tp.__args__: - KT, VT = tp.__args__ - return all( - isinstance(k, KT) and isinstance(v, VT) - for k, v in obj.items() - ) - self.assertTrue(naive_dict_check({'x': 1}, typing.Dict[typing.Text, int])) - self.assertFalse(naive_dict_check({1: 'x'}, typing.Dict[typing.Text, int])) - with self.assertRaises(NotImplementedError): - naive_dict_check({1: 'x'}, typing.Dict[typing.Text, T]) - - def naive_generic_check(obj, tp): - # Check if an instance conforms to the generic class - if not hasattr(obj, '__orig_class__'): - raise NotImplementedError - return obj.__orig_class__ == tp - class Node(Generic[T]): pass - self.assertTrue(naive_generic_check(Node[int](), Node[int])) - self.assertFalse(naive_generic_check(Node[str](), Node[int])) - self.assertFalse(naive_generic_check(Node[str](), List)) - with self.assertRaises(NotImplementedError): - naive_generic_check([1, 2, 3], Node[int]) - - def naive_list_base_check(obj, tp): - # Check if list conforms to a List subclass - return all(isinstance(x, tp.__orig_bases__[0].__args__[0]) - for x in obj) - class C(List[int]): pass - self.assertTrue(naive_list_base_check([1, 2, 3], C)) - self.assertFalse(naive_list_base_check(['a', 'b'], C)) - - def test_multi_subscr_base(self): - T = TypeVar('T') - U = TypeVar('U') - V = TypeVar('V') - class C(List[T][U][V]): pass - class D(C, List[T][U][V]): pass - self.assertEqual(C.__parameters__, (V,)) - self.assertEqual(D.__parameters__, (V,)) - self.assertEqual(C[int].__parameters__, ()) - self.assertEqual(D[int].__parameters__, ()) - self.assertEqual(C[int].__args__, (int,)) - self.assertEqual(D[int].__args__, (int,)) - self.assertEqual(C.__bases__, (List,)) - self.assertEqual(D.__bases__, (C, List)) - self.assertEqual(C.__orig_bases__, (List[T][U][V],)) - self.assertEqual(D.__orig_bases__, (C, List[T][U][V])) - - def test_subscript_meta(self): - T = TypeVar('T') - self.assertEqual(Type[GenericMeta], Type[GenericMeta]) - self.assertEqual(Union[T, int][GenericMeta], Union[GenericMeta, int]) - self.assertEqual(Callable[..., GenericMeta].__args__, (Ellipsis, GenericMeta)) - - def test_generic_hashes(self): - import mod_generics_cache - class A(Generic[T]): - __module__ = 'test_typing' - - class B(Generic[T]): - class A(Generic[T]): - pass - - self.assertEqual(A, A) - self.assertEqual(mod_generics_cache.A[str], mod_generics_cache.A[str]) - self.assertEqual(B.A, B.A) - self.assertEqual(mod_generics_cache.B.A[B.A[str]], - mod_generics_cache.B.A[B.A[str]]) - - self.assertNotEqual(A, B.A) - self.assertNotEqual(A, mod_generics_cache.A) - self.assertNotEqual(A, mod_generics_cache.B.A) - self.assertNotEqual(B.A, mod_generics_cache.A) - self.assertNotEqual(B.A, mod_generics_cache.B.A) - - self.assertNotEqual(A[str], B.A[str]) - self.assertNotEqual(A[List[Any]], B.A[List[Any]]) - self.assertNotEqual(A[str], mod_generics_cache.A[str]) - self.assertNotEqual(A[str], mod_generics_cache.B.A[str]) - self.assertNotEqual(B.A[int], mod_generics_cache.A[int]) - self.assertNotEqual(B.A[List[Any]], mod_generics_cache.B.A[List[Any]]) - - self.assertNotEqual(Tuple[A[str]], Tuple[B.A[str]]) - self.assertNotEqual(Tuple[A[List[Any]]], Tuple[B.A[List[Any]]]) - self.assertNotEqual(Union[str, A[str]], Union[str, mod_generics_cache.A[str]]) - self.assertNotEqual(Union[A[str], A[str]], - Union[A[str], mod_generics_cache.A[str]]) - self.assertNotEqual(typing.FrozenSet[A[str]], - typing.FrozenSet[mod_generics_cache.B.A[str]]) - - self.assertTrue(repr(Tuple[A[str]]).endswith('test_typing.A[str]]')) - self.assertTrue(repr(Tuple[mod_generics_cache.A[str]]) - .endswith('mod_generics_cache.A[str]]')) - - def test_extended_generic_rules_eq(self): - T = TypeVar('T') - U = TypeVar('U') - self.assertEqual(Tuple[T, T][int], Tuple[int, int]) - self.assertEqual(typing.Iterable[Tuple[T, T]][T], typing.Iterable[Tuple[T, T]]) - with self.assertRaises(TypeError): - Tuple[T, int][()] - with self.assertRaises(TypeError): - Tuple[T, U][T, ...] - - self.assertEqual(Union[T, int][int], int) - self.assertEqual(Union[T, U][int, Union[int, str]], Union[int, str]) - class Base(object): pass - class Derived(Base): pass - self.assertEqual(Union[T, Base][Derived], Base) - with self.assertRaises(TypeError): - Union[T, int][1] - - self.assertEqual(Callable[[T], T][KT], Callable[[KT], KT]) - self.assertEqual(Callable[..., List[T]][int], Callable[..., List[int]]) - with self.assertRaises(TypeError): - Callable[[T], U][..., int] - with self.assertRaises(TypeError): - Callable[[T], U][[], int] - - def test_extended_generic_rules_repr(self): - T = TypeVar('T') - self.assertEqual(repr(Union[Tuple, Callable]).replace('typing.', ''), - 'Union[Tuple, Callable]') - self.assertEqual(repr(Union[Tuple, Tuple[int]]).replace('typing.', ''), - 'Tuple') - self.assertEqual(repr(Callable[..., Optional[T]][int]).replace('typing.', ''), - 'Callable[..., Union[int, NoneType]]') - self.assertEqual(repr(Callable[[], List[T]][int]).replace('typing.', ''), - 'Callable[[], List[int]]') - - def test_generic_forvard_ref(self): - LLT = List[List['CC']] - class CC: pass - self.assertEqual(typing._eval_type(LLT, globals(), locals()), List[List[CC]]) - T = TypeVar('T') - AT = Tuple[T, ...] - self.assertIs(typing._eval_type(AT, globals(), locals()), AT) - CT = Callable[..., List[T]] - self.assertIs(typing._eval_type(CT, globals(), locals()), CT) - - def test_extended_generic_rules_subclassing(self): - class T1(Tuple[T, KT]): pass - class T2(Tuple[T, ...]): pass - class C1(Callable[[T], T]): pass - class C2(Callable[..., int]): - def __call__(self): - return None - - self.assertEqual(T1.__parameters__, (T, KT)) - self.assertEqual(T1[int, str].__args__, (int, str)) - self.assertEqual(T1[int, T].__origin__, T1) - - self.assertEqual(T2.__parameters__, (T,)) - with self.assertRaises(TypeError): - T1[int] - with self.assertRaises(TypeError): - T2[int, str] - - self.assertEqual(repr(C1[int]).split('.')[-1], 'C1[int]') - self.assertEqual(C2.__parameters__, ()) - self.assertIsInstance(C2(), collections_abc.Callable) - self.assertIsSubclass(C2, collections_abc.Callable) - self.assertIsSubclass(C1, collections_abc.Callable) - self.assertIsInstance(T1(), tuple) - self.assertIsSubclass(T2, tuple) - self.assertIsSubclass(Tuple[int, ...], typing.Sequence) - self.assertIsSubclass(Tuple[int, ...], typing.Iterable) - - def test_fail_with_bare_union(self): - with self.assertRaises(TypeError): - List[Union] - with self.assertRaises(TypeError): - Tuple[Optional] - with self.assertRaises(TypeError): - ClassVar[ClassVar] - with self.assertRaises(TypeError): - List[ClassVar[int]] - - def test_fail_with_bare_generic(self): - T = TypeVar('T') - with self.assertRaises(TypeError): - List[Generic] - with self.assertRaises(TypeError): - Tuple[Generic[T]] - with self.assertRaises(TypeError): - List[typing._Protocol] - with self.assertRaises(TypeError): - isinstance(1, Generic) - - def test_type_erasure_special(self): - T = TypeVar('T') - # this is the only test that checks type caching - self.clear_caches() - class MyTup(Tuple[T, T]): pass - self.assertIs(MyTup[int]().__class__, MyTup) - self.assertIs(MyTup[int]().__orig_class__, MyTup[int]) - class MyCall(Callable[..., T]): - def __call__(self): return None - self.assertIs(MyCall[T]().__class__, MyCall) - self.assertIs(MyCall[T]().__orig_class__, MyCall[T]) - class MyDict(typing.Dict[T, T]): pass - self.assertIs(MyDict[int]().__class__, MyDict) - self.assertIs(MyDict[int]().__orig_class__, MyDict[int]) - class MyDef(typing.DefaultDict[str, T]): pass - self.assertIs(MyDef[int]().__class__, MyDef) - self.assertIs(MyDef[int]().__orig_class__, MyDef[int]) - - def test_all_repr_eq_any(self): - objs = (getattr(typing, el) for el in typing.__all__) - for obj in objs: - self.assertNotEqual(repr(obj), '') - self.assertEqual(obj, obj) - if getattr(obj, '__parameters__', None) and len(obj.__parameters__) == 1: - self.assertEqual(obj[Any].__args__, (Any,)) - if isinstance(obj, type): - for base in obj.__mro__: - self.assertNotEqual(repr(base), '') - self.assertEqual(base, base) - - def test_pickle(self): - global C # pickle wants to reference the class by name - T = TypeVar('T') - - class B(Generic[T]): - pass - - class C(B[int]): - pass - - c = C() - c.foo = 42 - c.bar = 'abc' - for proto in range(pickle.HIGHEST_PROTOCOL + 1): - z = pickle.dumps(c, proto) - x = pickle.loads(z) - self.assertEqual(x.foo, 42) - self.assertEqual(x.bar, 'abc') - self.assertEqual(x.__dict__, {'foo': 42, 'bar': 'abc'}) - simples = [Any, Union, Tuple, Callable, ClassVar, List, typing.Iterable] - for s in simples: - for proto in range(pickle.HIGHEST_PROTOCOL + 1): - z = pickle.dumps(s, proto) - x = pickle.loads(z) - self.assertEqual(s, x) - - def test_copy_and_deepcopy(self): - T = TypeVar('T') - class Node(Generic[T]): pass - things = [ - Any, - Callable[..., T], - Callable[[int], int], - ClassVar[List[T]], - ClassVar[int], - List['T'], - Node[Any], - Node[T], - Node[int], - Tuple['T', 'T'], - Tuple[Any, Any], - Tuple[T, int], - Union['T', int], - Union[T, int], - typing.Dict[T, Any], - typing.Dict[int, str], - typing.Iterable[Any], - typing.Iterable[T], - typing.Iterable[int], - typing.Mapping['T', int] - ] - for t in things: - self.assertEqual(t, deepcopy(t)) - self.assertEqual(t, copy(t)) - - def test_weakref_all(self): - T = TypeVar('T') - things = [Any, Union[T, int], Callable[..., T], Tuple[Any, Any], - Optional[List[int]], typing.Mapping[int, str], - typing.re.Match[bytes], typing.Iterable['whatever']] - for t in things: - self.assertEqual(weakref.ref(t)(), t) - - def test_parameterized_slots(self): - T = TypeVar('T') - class C(Generic[T]): - __slots__ = ('potato',) - - c = C() - c_int = C[int]() - self.assertEqual(C.__slots__, C[str].__slots__) - - c.potato = 0 - c_int.potato = 0 - with self.assertRaises(AttributeError): - c.tomato = 0 - with self.assertRaises(AttributeError): - c_int.tomato = 0 - - self.assertEqual(typing._eval_type(C['C'], globals(), locals()), C[C]) - self.assertEqual(typing._eval_type(C['C'], globals(), locals()).__slots__, - C.__slots__) - self.assertEqual(copy(C[int]), deepcopy(C[int])) - - def test_parameterized_slots_dict(self): - T = TypeVar('T') - class D(Generic[T]): - __slots__ = {'banana': 42} - - d = D() - d_int = D[int]() - self.assertEqual(D.__slots__, D[str].__slots__) - - d.banana = 'yes' - d_int.banana = 'yes' - with self.assertRaises(AttributeError): - d.foobar = 'no' - with self.assertRaises(AttributeError): - d_int.foobar = 'no' - - def test_errors(self): - with self.assertRaises(TypeError): - B = SimpleMapping[XK, Any] - - class C(Generic[B]): - pass - - def test_repr_2(self): - PY32 = sys.version_info[:2] < (3, 3) - - class C(Generic[T]): - pass - - self.assertEqual(C.__module__, __name__) - if not PY32: - self.assertEqual(C.__qualname__, - 'GenericTests.test_repr_2..C') - self.assertEqual(repr(C).split('.')[-1], 'C') - X = C[int] - self.assertEqual(X.__module__, __name__) - if not PY32: - self.assertTrue(X.__qualname__.endswith('..C')) - self.assertEqual(repr(X).split('.')[-1], 'C[int]') - - class Y(C[int]): - pass - - self.assertEqual(Y.__module__, __name__) - if not PY32: - self.assertEqual(Y.__qualname__, - 'GenericTests.test_repr_2..Y') - self.assertEqual(repr(Y).split('.')[-1], 'Y') - - def test_eq_1(self): - self.assertEqual(Generic, Generic) - self.assertEqual(Generic[T], Generic[T]) - self.assertNotEqual(Generic[KT], Generic[VT]) - - def test_eq_2(self): - - class A(Generic[T]): - pass - - class B(Generic[T]): - pass - - self.assertEqual(A, A) - self.assertNotEqual(A, B) - self.assertEqual(A[T], A[T]) - self.assertNotEqual(A[T], B[T]) - - def test_multiple_inheritance(self): - - class A(Generic[T, VT]): - pass - - class B(Generic[KT, T]): - pass - - class C(A[T, VT], Generic[VT, T, KT], B[KT, T]): - pass - - self.assertEqual(C.__parameters__, (VT, T, KT)) - - def test_nested(self): - - G = Generic - - class Visitor(G[T]): - - a = None - - def set(self, a): - self.a = a - - def get(self): - return self.a - - def visit(self): - return self.a - - V = Visitor[typing.List[int]] - - class IntListVisitor(V): - - def append(self, x): - self.a.append(x) - - a = IntListVisitor() - a.set([]) - a.append(1) - a.append(42) - self.assertEqual(a.get(), [1, 42]) - - def test_type_erasure(self): - T = TypeVar('T') - - class Node(Generic[T]): - def __init__(self, label, - left=None, - right=None): - self.label = label # type: T - self.left = left # type: Optional[Node[T]] - self.right = right # type: Optional[Node[T]] - - def foo(x): - a = Node(x) - b = Node[T](x) - c = Node[Any](x) - self.assertIs(type(a), Node) - self.assertIs(type(b), Node) - self.assertIs(type(c), Node) - self.assertEqual(a.label, x) - self.assertEqual(b.label, x) - self.assertEqual(c.label, x) - - foo(42) - - def test_implicit_any(self): - T = TypeVar('T') - - class C(Generic[T]): - pass - - class D(C): - pass - - self.assertEqual(D.__parameters__, ()) - - with self.assertRaises(Exception): - D[int] - with self.assertRaises(Exception): - D[Any] - with self.assertRaises(Exception): - D[T] - - -class ClassVarTests(BaseTestCase): - - def test_basics(self): - with self.assertRaises(TypeError): - ClassVar[1] - with self.assertRaises(TypeError): - ClassVar[int, str] - with self.assertRaises(TypeError): - ClassVar[int][str] - - def test_repr(self): - self.assertEqual(repr(ClassVar), 'typing.ClassVar') - cv = ClassVar[int] - self.assertEqual(repr(cv), 'typing.ClassVar[int]') - cv = ClassVar[Employee] - self.assertEqual(repr(cv), 'typing.ClassVar[%s.Employee]' % __name__) - - def test_cannot_subclass(self): - with self.assertRaises(TypeError): - class C(type(ClassVar)): - pass - with self.assertRaises(TypeError): - class C(type(ClassVar[int])): - pass - - def test_cannot_init(self): - with self.assertRaises(TypeError): - ClassVar() - with self.assertRaises(TypeError): - type(ClassVar)() - with self.assertRaises(TypeError): - type(ClassVar[Optional[int]])() - - def test_no_isinstance(self): - with self.assertRaises(TypeError): - isinstance(1, ClassVar[int]) - with self.assertRaises(TypeError): - issubclass(int, ClassVar) - - -class CastTests(BaseTestCase): - - def test_basics(self): - self.assertEqual(cast(int, 42), 42) - self.assertEqual(cast(float, 42), 42) - self.assertIs(type(cast(float, 42)), int) - self.assertEqual(cast(Any, 42), 42) - self.assertEqual(cast(list, 42), 42) - self.assertEqual(cast(Union[str, float], 42), 42) - self.assertEqual(cast(AnyStr, 42), 42) - self.assertEqual(cast(None, 42), 42) - - def test_errors(self): - # Bogus calls are not expected to fail. - cast(42, 42) - cast('hello', 42) - - -class ForwardRefTests(BaseTestCase): - - def test_forwardref_instance_type_error(self): - fr = typing._ForwardRef('int') - with self.assertRaises(TypeError): - isinstance(42, fr) - - def test_syntax_error(self): - - with self.assertRaises(SyntaxError): - Generic['/T'] - - def test_forwardref_subclass_type_error(self): - fr = typing._ForwardRef('int') - with self.assertRaises(TypeError): - issubclass(int, fr) - - def test_forward_equality(self): - fr = typing._ForwardRef('int') - self.assertEqual(fr, typing._ForwardRef('int')) - self.assertNotEqual(List['int'], List[int]) - - def test_forward_repr(self): - self.assertEqual(repr(List['int']), "typing.List[_ForwardRef(%r)]" % 'int') - - -class OverloadTests(BaseTestCase): - - def test_overload_fails(self): - from typing import overload - - with self.assertRaises(RuntimeError): - - @overload - def blah(): - pass - - blah() - - def test_overload_succeeds(self): - from typing import overload - - @overload - def blah(): - pass - - def blah(): - pass - - blah() - - -class CollectionsAbcTests(BaseTestCase): - - def test_hashable(self): - self.assertIsInstance(42, typing.Hashable) - self.assertNotIsInstance([], typing.Hashable) - - def test_iterable(self): - self.assertIsInstance([], typing.Iterable) - # Due to ABC caching, the second time takes a separate code - # path and could fail. So call this a few times. - self.assertIsInstance([], typing.Iterable) - self.assertIsInstance([], typing.Iterable) - self.assertNotIsInstance(42, typing.Iterable) - # Just in case, also test issubclass() a few times. - self.assertIsSubclass(list, typing.Iterable) - self.assertIsSubclass(list, typing.Iterable) - - def test_iterator(self): - it = iter([]) - self.assertIsInstance(it, typing.Iterator) - self.assertNotIsInstance(42, typing.Iterator) - - def test_sized(self): - self.assertIsInstance([], typing.Sized) - self.assertNotIsInstance(42, typing.Sized) - - def test_container(self): - self.assertIsInstance([], typing.Container) - self.assertNotIsInstance(42, typing.Container) - - def test_abstractset(self): - self.assertIsInstance(set(), typing.AbstractSet) - self.assertNotIsInstance(42, typing.AbstractSet) - - def test_mutableset(self): - self.assertIsInstance(set(), typing.MutableSet) - self.assertNotIsInstance(frozenset(), typing.MutableSet) - - def test_mapping(self): - self.assertIsInstance({}, typing.Mapping) - self.assertNotIsInstance(42, typing.Mapping) - - def test_mutablemapping(self): - self.assertIsInstance({}, typing.MutableMapping) - self.assertNotIsInstance(42, typing.MutableMapping) - - def test_sequence(self): - self.assertIsInstance([], typing.Sequence) - self.assertNotIsInstance(42, typing.Sequence) - - def test_mutablesequence(self): - self.assertIsInstance([], typing.MutableSequence) - self.assertNotIsInstance((), typing.MutableSequence) - - def test_bytestring(self): - self.assertIsInstance(b'', typing.ByteString) - self.assertIsInstance(bytearray(b''), typing.ByteString) - - def test_list(self): - self.assertIsSubclass(list, typing.List) - - def test_deque(self): - self.assertIsSubclass(collections.deque, typing.Deque) - class MyDeque(typing.Deque[int]): pass - self.assertIsInstance(MyDeque(), collections.deque) - - def test_counter(self): - self.assertIsSubclass(collections.Counter, typing.Counter) - - def test_set(self): - self.assertIsSubclass(set, typing.Set) - self.assertNotIsSubclass(frozenset, typing.Set) - - def test_frozenset(self): - self.assertIsSubclass(frozenset, typing.FrozenSet) - self.assertNotIsSubclass(set, typing.FrozenSet) - - def test_dict(self): - self.assertIsSubclass(dict, typing.Dict) - - def test_no_list_instantiation(self): - with self.assertRaises(TypeError): - typing.List() - with self.assertRaises(TypeError): - typing.List[T]() - with self.assertRaises(TypeError): - typing.List[int]() - - def test_list_subclass(self): - - class MyList(typing.List[int]): - pass - - a = MyList() - self.assertIsInstance(a, MyList) - self.assertIsInstance(a, typing.Sequence) - - self.assertIsSubclass(MyList, list) - self.assertNotIsSubclass(list, MyList) - - def test_no_dict_instantiation(self): - with self.assertRaises(TypeError): - typing.Dict() - with self.assertRaises(TypeError): - typing.Dict[KT, VT]() - with self.assertRaises(TypeError): - typing.Dict[str, int]() - - def test_dict_subclass(self): - - class MyDict(typing.Dict[str, int]): - pass - - d = MyDict() - self.assertIsInstance(d, MyDict) - self.assertIsInstance(d, typing.MutableMapping) - - self.assertIsSubclass(MyDict, dict) - self.assertNotIsSubclass(dict, MyDict) - - def test_defaultdict_instantiation(self): - self.assertIs(type(typing.DefaultDict()), collections.defaultdict) - self.assertIs(type(typing.DefaultDict[KT, VT]()), collections.defaultdict) - self.assertIs(type(typing.DefaultDict[str, int]()), collections.defaultdict) - - def test_defaultdict_subclass(self): - - class MyDefDict(typing.DefaultDict[str, int]): - pass - - dd = MyDefDict() - self.assertIsInstance(dd, MyDefDict) - - self.assertIsSubclass(MyDefDict, collections.defaultdict) - self.assertNotIsSubclass(collections.defaultdict, MyDefDict) - - def test_deque_instantiation(self): - self.assertIs(type(typing.Deque()), collections.deque) - self.assertIs(type(typing.Deque[T]()), collections.deque) - self.assertIs(type(typing.Deque[int]()), collections.deque) - class D(typing.Deque[T]): pass - self.assertIs(type(D[int]()), D) - - def test_counter_instantiation(self): - self.assertIs(type(typing.Counter()), collections.Counter) - self.assertIs(type(typing.Counter[T]()), collections.Counter) - self.assertIs(type(typing.Counter[int]()), collections.Counter) - class C(typing.Counter[T]): pass - self.assertIs(type(C[int]()), C) - - def test_counter_subclass_instantiation(self): - - class MyCounter(typing.Counter[int]): - pass - - d = MyCounter() - self.assertIsInstance(d, MyCounter) - self.assertIsInstance(d, typing.Counter) - self.assertIsInstance(d, collections.Counter) - - def test_no_set_instantiation(self): - with self.assertRaises(TypeError): - typing.Set() - with self.assertRaises(TypeError): - typing.Set[T]() - with self.assertRaises(TypeError): - typing.Set[int]() - - def test_set_subclass_instantiation(self): - - class MySet(typing.Set[int]): - pass - - d = MySet() - self.assertIsInstance(d, MySet) - - def test_no_frozenset_instantiation(self): - with self.assertRaises(TypeError): - typing.FrozenSet() - with self.assertRaises(TypeError): - typing.FrozenSet[T]() - with self.assertRaises(TypeError): - typing.FrozenSet[int]() - - def test_frozenset_subclass_instantiation(self): - - class MyFrozenSet(typing.FrozenSet[int]): - pass - - d = MyFrozenSet() - self.assertIsInstance(d, MyFrozenSet) - - def test_no_tuple_instantiation(self): - with self.assertRaises(TypeError): - Tuple() - with self.assertRaises(TypeError): - Tuple[T]() - with self.assertRaises(TypeError): - Tuple[int]() - - def test_generator(self): - def foo(): - yield 42 - g = foo() - self.assertIsSubclass(type(g), typing.Generator) - - def test_no_generator_instantiation(self): - with self.assertRaises(TypeError): - typing.Generator() - with self.assertRaises(TypeError): - typing.Generator[T, T, T]() - with self.assertRaises(TypeError): - typing.Generator[int, int, int]() - - def test_subclassing(self): - - class MMA(typing.MutableMapping): - pass - - with self.assertRaises(TypeError): # It's abstract - MMA() - - class MMC(MMA): - def __getitem__(self, k): - return None - def __setitem__(self, k, v): - pass - def __delitem__(self, k): - pass - def __iter__(self): - return iter(()) - def __len__(self): - return 0 - - self.assertEqual(len(MMC()), 0) - assert callable(MMC.update) - self.assertIsInstance(MMC(), typing.Mapping) - - class MMB(typing.MutableMapping[KT, VT]): - def __getitem__(self, k): - return None - def __setitem__(self, k, v): - pass - def __delitem__(self, k): - pass - def __iter__(self): - return iter(()) - def __len__(self): - return 0 - - self.assertEqual(len(MMB()), 0) - self.assertEqual(len(MMB[str, str]()), 0) - self.assertEqual(len(MMB[KT, VT]()), 0) - - self.assertNotIsSubclass(dict, MMA) - self.assertNotIsSubclass(dict, MMB) - - self.assertIsSubclass(MMA, typing.Mapping) - self.assertIsSubclass(MMB, typing.Mapping) - self.assertIsSubclass(MMC, typing.Mapping) - - self.assertIsInstance(MMB[KT, VT](), typing.Mapping) - self.assertIsInstance(MMB[KT, VT](), collections.Mapping) - - self.assertIsSubclass(MMA, collections.Mapping) - self.assertIsSubclass(MMB, collections.Mapping) - self.assertIsSubclass(MMC, collections.Mapping) - - self.assertIsSubclass(MMB[str, str], typing.Mapping) - self.assertIsSubclass(MMC, MMA) - - class I(typing.Iterable): pass - self.assertNotIsSubclass(list, I) - - class G(typing.Generator[int, int, int]): pass - def g(): yield 0 - self.assertIsSubclass(G, typing.Generator) - self.assertIsSubclass(G, typing.Iterable) - if hasattr(collections, 'Generator'): - self.assertIsSubclass(G, collections.Generator) - self.assertIsSubclass(G, collections.Iterable) - self.assertNotIsSubclass(type(g), G) - - def test_subclassing_subclasshook(self): - - class Base(typing.Iterable): - @classmethod - def __subclasshook__(cls, other): - if other.__name__ == 'Foo': - return True - else: - return False - - class C(Base): pass - class Foo: pass - class Bar: pass - self.assertIsSubclass(Foo, Base) - self.assertIsSubclass(Foo, C) - self.assertNotIsSubclass(Bar, C) - - def test_subclassing_register(self): - - class A(typing.Container): pass - class B(A): pass - - class C: pass - A.register(C) - self.assertIsSubclass(C, A) - self.assertNotIsSubclass(C, B) - - class D: pass - B.register(D) - self.assertIsSubclass(D, A) - self.assertIsSubclass(D, B) - - class M(): pass - collections.MutableMapping.register(M) - self.assertIsSubclass(M, typing.Mapping) - - def test_collections_as_base(self): - - class M(collections.Mapping): pass - self.assertIsSubclass(M, typing.Mapping) - self.assertIsSubclass(M, typing.Iterable) - - class S(collections.MutableSequence): pass - self.assertIsSubclass(S, typing.MutableSequence) - self.assertIsSubclass(S, typing.Iterable) - - class I(collections.Iterable): pass - self.assertIsSubclass(I, typing.Iterable) - - class A(collections.Mapping): pass - class B: pass - A.register(B) - self.assertIsSubclass(B, typing.Mapping) - - -class TypeTests(BaseTestCase): - - def test_type_basic(self): - - class User(object): pass - class BasicUser(User): pass - class ProUser(User): pass - - def new_user(user_class): - # type: (Type[User]) -> User - return user_class() - - new_user(BasicUser) - - def test_type_typevar(self): - - class User(object): pass - class BasicUser(User): pass - class ProUser(User): pass - - global U - U = TypeVar('U', bound=User) - - def new_user(user_class): - # type: (Type[U]) -> U - return user_class() - - new_user(BasicUser) - - def test_type_optional(self): - A = Optional[Type[BaseException]] # noqa - - def foo(a): - # type: (A) -> Optional[BaseException] - if a is None: - return None - else: - return a() - - assert isinstance(foo(KeyboardInterrupt), KeyboardInterrupt) - assert foo(None) is None - - -class NewTypeTests(BaseTestCase): - - def test_basic(self): - UserId = NewType('UserId', int) - UserName = NewType('UserName', str) - self.assertIsInstance(UserId(5), int) - self.assertIsInstance(UserName('Joe'), type('Joe')) - self.assertEqual(UserId(5) + 1, 6) - - def test_errors(self): - UserId = NewType('UserId', int) - UserName = NewType('UserName', str) - with self.assertRaises(TypeError): - issubclass(UserId, int) - with self.assertRaises(TypeError): - class D(UserName): - pass - - -class NamedTupleTests(BaseTestCase): - - def test_basics(self): - Emp = NamedTuple('Emp', [('name', str), ('id', int)]) - self.assertIsSubclass(Emp, tuple) - joe = Emp('Joe', 42) - jim = Emp(name='Jim', id=1) - self.assertIsInstance(joe, Emp) - self.assertIsInstance(joe, tuple) - self.assertEqual(joe.name, 'Joe') - self.assertEqual(joe.id, 42) - self.assertEqual(jim.name, 'Jim') - self.assertEqual(jim.id, 1) - self.assertEqual(Emp.__name__, 'Emp') - self.assertEqual(Emp._fields, ('name', 'id')) - self.assertEqual(Emp._field_types, dict(name=str, id=int)) - - def test_pickle(self): - global Emp # pickle wants to reference the class by name - Emp = NamedTuple('Emp', [('name', str), ('id', int)]) - jane = Emp('jane', 37) - for proto in range(pickle.HIGHEST_PROTOCOL + 1): - z = pickle.dumps(jane, proto) - jane2 = pickle.loads(z) - self.assertEqual(jane2, jane) - - -class IOTests(BaseTestCase): - - def test_io_submodule(self): - from typing.io import IO, TextIO, BinaryIO, __all__, __name__ - self.assertIs(IO, typing.IO) - self.assertIs(TextIO, typing.TextIO) - self.assertIs(BinaryIO, typing.BinaryIO) - self.assertEqual(set(__all__), set(['IO', 'TextIO', 'BinaryIO'])) - self.assertEqual(__name__, 'typing.io') - - -class RETests(BaseTestCase): - # Much of this is really testing _TypeAlias. - - def test_basics(self): - pat = re.compile('[a-z]+', re.I) - self.assertIsSubclass(pat.__class__, Pattern) - self.assertIsSubclass(type(pat), Pattern) - self.assertIsInstance(pat, Pattern) - - mat = pat.search('12345abcde.....') - self.assertIsSubclass(mat.__class__, Match) - self.assertIsSubclass(type(mat), Match) - self.assertIsInstance(mat, Match) - - # these should just work - Pattern[Union[str, bytes]] - Match[Union[bytes, str]] - - def test_alias_equality(self): - self.assertEqual(Pattern[str], Pattern[str]) - self.assertNotEqual(Pattern[str], Pattern[bytes]) - self.assertNotEqual(Pattern[str], Match[str]) - self.assertNotEqual(Pattern[str], str) - - def test_errors(self): - with self.assertRaises(TypeError): - # Doesn't fit AnyStr. - Pattern[int] - with self.assertRaises(TypeError): - # Can't change type vars? - Match[T] - m = Match[Union[str, bytes]] - with self.assertRaises(TypeError): - # Too complicated? - m[str] - with self.assertRaises(TypeError): - # We don't support isinstance(). - isinstance(42, Pattern[str]) - with self.assertRaises(TypeError): - # We don't support issubclass(). - issubclass(Pattern[bytes], Pattern[str]) - - def test_repr(self): - self.assertEqual(repr(Pattern), 'Pattern[~AnyStr]') - self.assertEqual(repr(Pattern[unicode]), 'Pattern[unicode]') - self.assertEqual(repr(Pattern[str]), 'Pattern[str]') - self.assertEqual(repr(Match), 'Match[~AnyStr]') - self.assertEqual(repr(Match[unicode]), 'Match[unicode]') - self.assertEqual(repr(Match[str]), 'Match[str]') - - def test_re_submodule(self): - from typing.re import Match, Pattern, __all__, __name__ - self.assertIs(Match, typing.Match) - self.assertIs(Pattern, typing.Pattern) - self.assertEqual(set(__all__), set(['Match', 'Pattern'])) - self.assertEqual(__name__, 'typing.re') - - def test_cannot_subclass(self): - with self.assertRaises(TypeError) as ex: - - class A(typing.Match): - pass - - self.assertEqual(str(ex.exception), - "Cannot subclass typing._TypeAlias") - - -class AllTests(BaseTestCase): - """Tests for __all__.""" - - def test_all(self): - from typing import __all__ as a - # Just spot-check the first and last of every category. - self.assertIn('AbstractSet', a) - self.assertIn('ValuesView', a) - self.assertIn('cast', a) - self.assertIn('overload', a) - # Check that io and re are not exported. - self.assertNotIn('io', a) - self.assertNotIn('re', a) - # Spot-check that stdlib modules aren't exported. - self.assertNotIn('os', a) - self.assertNotIn('sys', a) - # Check that Text is defined. - self.assertIn('Text', a) - - def test_respect_no_type_check(self): - @typing.no_type_check - class NoTpCheck(object): - class Inn(object): - def __init__(self, x): - # type: (this is not actually a type) -> None - pass - self.assertTrue(NoTpCheck.__no_type_check__) - self.assertTrue(NoTpCheck.Inn.__init__.__no_type_check__) - - def test_get_type_hints_dummy(self): - - def foo(x): - # type: (int) -> int - return x + 1 - - self.assertIsNone(typing.get_type_hints(foo)) - - -if __name__ == '__main__': - main() diff --git a/lib-typing/2.7/typing.py b/lib-typing/2.7/typing.py deleted file mode 100644 index 0d67e4c3e150..000000000000 --- a/lib-typing/2.7/typing.py +++ /dev/null @@ -1,2140 +0,0 @@ -from __future__ import absolute_import, unicode_literals - -import abc -from abc import abstractmethod, abstractproperty -import collections -import functools -import re as stdlib_re # Avoid confusion with the re we export. -import sys -import types -try: - import collections.abc as collections_abc -except ImportError: - import collections as collections_abc # Fallback for PY3.2. - - -# Please keep __all__ alphabetized within each category. -__all__ = [ - # Super-special typing primitives. - 'Any', - 'Callable', - 'ClassVar', - 'Generic', - 'Optional', - 'Tuple', - 'Type', - 'TypeVar', - 'Union', - - # ABCs (from collections.abc). - 'AbstractSet', # collections.abc.Set. - 'GenericMeta', # subclass of abc.ABCMeta and a metaclass - # for 'Generic' and ABCs below. - 'ByteString', - 'Container', - 'Hashable', - 'ItemsView', - 'Iterable', - 'Iterator', - 'KeysView', - 'Mapping', - 'MappingView', - 'MutableMapping', - 'MutableSequence', - 'MutableSet', - 'Sequence', - 'Sized', - 'ValuesView', - - # Structural checks, a.k.a. protocols. - 'Reversible', - 'SupportsAbs', - 'SupportsFloat', - 'SupportsInt', - - # Concrete collection types. - 'Counter', - 'Deque', - 'Dict', - 'DefaultDict', - 'List', - 'Set', - 'FrozenSet', - 'NamedTuple', # Not really a type. - 'Generator', - - # One-off things. - 'AnyStr', - 'cast', - 'get_type_hints', - 'NewType', - 'no_type_check', - 'no_type_check_decorator', - 'overload', - 'Text', - 'TYPE_CHECKING', -] - -# The pseudo-submodules 're' and 'io' are part of the public -# namespace, but excluded from __all__ because they might stomp on -# legitimate imports of those modules. - - -def _qualname(x): - if sys.version_info[:2] >= (3, 3): - return x.__qualname__ - else: - # Fall back to just name. - return x.__name__ - - -def _trim_name(nm): - whitelist = ('_TypeAlias', '_ForwardRef', '_TypingBase', '_FinalTypingBase') - if nm.startswith('_') and nm not in whitelist: - nm = nm[1:] - return nm - - -class TypingMeta(type): - """Metaclass for most types defined in typing module - (not a part of public API). - - This also defines a dummy constructor (all the work for most typing - constructs is done in __new__) and a nicer repr(). - """ - - _is_protocol = False - - def __new__(cls, name, bases, namespace): - return super(TypingMeta, cls).__new__(cls, str(name), bases, namespace) - - @classmethod - def assert_no_subclassing(cls, bases): - for base in bases: - if isinstance(base, cls): - raise TypeError("Cannot subclass %s" % - (', '.join(map(_type_repr, bases)) or '()')) - - def __init__(self, *args, **kwds): - pass - - def _eval_type(self, globalns, localns): - """Override this in subclasses to interpret forward references. - - For example, List['C'] is internally stored as - List[_ForwardRef('C')], which should evaluate to List[C], - where C is an object found in globalns or localns (searching - localns first, of course). - """ - return self - - def _get_type_vars(self, tvars): - pass - - def __repr__(self): - qname = _trim_name(_qualname(self)) - return '%s.%s' % (self.__module__, qname) - - -class _TypingBase(object): - """Internal indicator of special typing constructs.""" - __metaclass__ = TypingMeta - __slots__ = ('__weakref__',) - - def __init__(self, *args, **kwds): - pass - - def __new__(cls, *args, **kwds): - """Constructor. - - This only exists to give a better error message in case - someone tries to subclass a special typing object (not a good idea). - """ - if (len(args) == 3 and - isinstance(args[0], str) and - isinstance(args[1], tuple)): - # Close enough. - raise TypeError("Cannot subclass %r" % cls) - return super(_TypingBase, cls).__new__(cls) - - # Things that are not classes also need these. - def _eval_type(self, globalns, localns): - return self - - def _get_type_vars(self, tvars): - pass - - def __repr__(self): - cls = type(self) - qname = _trim_name(_qualname(cls)) - return '%s.%s' % (cls.__module__, qname) - - def __call__(self, *args, **kwds): - raise TypeError("Cannot instantiate %r" % type(self)) - - -class _FinalTypingBase(_TypingBase): - """Internal mix-in class to prevent instantiation. - - Prevents instantiation unless _root=True is given in class call. - It is used to create pseudo-singleton instances Any, Union, Optional, etc. - """ - - __slots__ = () - - def __new__(cls, *args, **kwds): - self = super(_FinalTypingBase, cls).__new__(cls, *args, **kwds) - if '_root' in kwds and kwds['_root'] is True: - return self - raise TypeError("Cannot instantiate %r" % cls) - - def __reduce__(self): - return _trim_name(type(self).__name__) - - -class _ForwardRef(_TypingBase): - """Internal wrapper to hold a forward reference.""" - - __slots__ = ('__forward_arg__', '__forward_code__', - '__forward_evaluated__', '__forward_value__') - - def __init__(self, arg): - super(_ForwardRef, self).__init__(arg) - if not isinstance(arg, basestring): - raise TypeError('Forward reference must be a string -- got %r' % (arg,)) - try: - code = compile(arg, '', 'eval') - except SyntaxError: - raise SyntaxError('Forward reference must be an expression -- got %r' % - (arg,)) - self.__forward_arg__ = arg - self.__forward_code__ = code - self.__forward_evaluated__ = False - self.__forward_value__ = None - - def _eval_type(self, globalns, localns): - if not self.__forward_evaluated__ or localns is not globalns: - if globalns is None and localns is None: - globalns = localns = {} - elif globalns is None: - globalns = localns - elif localns is None: - localns = globalns - self.__forward_value__ = _type_check( - eval(self.__forward_code__, globalns, localns), - "Forward references must evaluate to types.") - self.__forward_evaluated__ = True - return self.__forward_value__ - - def __eq__(self, other): - if not isinstance(other, _ForwardRef): - return NotImplemented - return (self.__forward_arg__ == other.__forward_arg__ and - self.__forward_value__ == other.__forward_value__) - - def __hash__(self): - return hash((self.__forward_arg__, self.__forward_value__)) - - def __instancecheck__(self, obj): - raise TypeError("Forward references cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - raise TypeError("Forward references cannot be used with issubclass().") - - def __repr__(self): - return '_ForwardRef(%r)' % (self.__forward_arg__,) - - -class _TypeAlias(_TypingBase): - """Internal helper class for defining generic variants of concrete types. - - Note that this is not a type; let's call it a pseudo-type. It cannot - be used in instance and subclass checks in parameterized form, i.e. - ``isinstance(42, Match[str])`` raises ``TypeError`` instead of returning - ``False``. - """ - - __slots__ = ('name', 'type_var', 'impl_type', 'type_checker') - - def __init__(self, name, type_var, impl_type, type_checker): - """Initializer. - - Args: - name: The name, e.g. 'Pattern'. - type_var: The type parameter, e.g. AnyStr, or the - specific type, e.g. str. - impl_type: The implementation type. - type_checker: Function that takes an impl_type instance. - and returns a value that should be a type_var instance. - """ - assert isinstance(name, basestring), repr(name) - assert isinstance(impl_type, type), repr(impl_type) - assert not isinstance(impl_type, TypingMeta), repr(impl_type) - assert isinstance(type_var, (type, _TypingBase)), repr(type_var) - self.name = name - self.type_var = type_var - self.impl_type = impl_type - self.type_checker = type_checker - - def __repr__(self): - return "%s[%s]" % (self.name, _type_repr(self.type_var)) - - def __getitem__(self, parameter): - if not isinstance(self.type_var, TypeVar): - raise TypeError("%s cannot be further parameterized." % self) - if self.type_var.__constraints__ and isinstance(parameter, type): - if not issubclass(parameter, self.type_var.__constraints__): - raise TypeError("%s is not a valid substitution for %s." % - (parameter, self.type_var)) - if isinstance(parameter, TypeVar) and parameter is not self.type_var: - raise TypeError("%s cannot be re-parameterized." % self) - return self.__class__(self.name, parameter, - self.impl_type, self.type_checker) - - def __eq__(self, other): - if not isinstance(other, _TypeAlias): - return NotImplemented - return self.name == other.name and self.type_var == other.type_var - - def __hash__(self): - return hash((self.name, self.type_var)) - - def __instancecheck__(self, obj): - if not isinstance(self.type_var, TypeVar): - raise TypeError("Parameterized type aliases cannot be used " - "with isinstance().") - return isinstance(obj, self.impl_type) - - def __subclasscheck__(self, cls): - if not isinstance(self.type_var, TypeVar): - raise TypeError("Parameterized type aliases cannot be used " - "with issubclass().") - return issubclass(cls, self.impl_type) - - -def _get_type_vars(types, tvars): - for t in types: - if isinstance(t, TypingMeta) or isinstance(t, _TypingBase): - t._get_type_vars(tvars) - - -def _type_vars(types): - tvars = [] - _get_type_vars(types, tvars) - return tuple(tvars) - - -def _eval_type(t, globalns, localns): - if isinstance(t, TypingMeta) or isinstance(t, _TypingBase): - return t._eval_type(globalns, localns) - return t - - -def _type_check(arg, msg): - """Check that the argument is a type, and return it (internal helper). - - As a special case, accept None and return type(None) instead. - Also, _TypeAlias instances (e.g. Match, Pattern) are acceptable. - - The msg argument is a human-readable error message, e.g. - - "Union[arg, ...]: arg should be a type." - - We append the repr() of the actual value (truncated to 100 chars). - """ - if arg is None: - return type(None) - if isinstance(arg, basestring): - arg = _ForwardRef(arg) - if ( - isinstance(arg, _TypingBase) and type(arg).__name__ == '_ClassVar' or - not isinstance(arg, (type, _TypingBase)) and not callable(arg) - ): - raise TypeError(msg + " Got %.100r." % (arg,)) - # Bare Union etc. are not valid as type arguments - if ( - type(arg).__name__ in ('_Union', '_Optional') and - not getattr(arg, '__origin__', None) or - isinstance(arg, TypingMeta) and _gorg(arg) in (Generic, _Protocol) - ): - raise TypeError("Plain %s is not valid as type argument" % arg) - return arg - - -def _type_repr(obj): - """Return the repr() of an object, special-casing types (internal helper). - - If obj is a type, we return a shorter version than the default - type.__repr__, based on the module and qualified name, which is - typically enough to uniquely identify a type. For everything - else, we fall back on repr(obj). - """ - if isinstance(obj, type) and not isinstance(obj, TypingMeta): - if obj.__module__ == '__builtin__': - return _qualname(obj) - return '%s.%s' % (obj.__module__, _qualname(obj)) - if obj is Ellipsis: - return('...') - if isinstance(obj, types.FunctionType): - return obj.__name__ - return repr(obj) - - -class ClassVarMeta(TypingMeta): - """Metaclass for _ClassVar""" - - def __new__(cls, name, bases, namespace): - cls.assert_no_subclassing(bases) - self = super(ClassVarMeta, cls).__new__(cls, name, bases, namespace) - return self - - -class _ClassVar(_FinalTypingBase): - """Special type construct to mark class variables. - - An annotation wrapped in ClassVar indicates that a given - attribute is intended to be used as a class variable and - should not be set on instances of that class. Usage:: - - class Starship: - stats = {} # type: ClassVar[Dict[str, int]] # class variable - damage = 10 # type: int # instance variable - - ClassVar accepts only types and cannot be further subscribed. - - Note that ClassVar is not a class itself, and should not - be used with isinstance() or issubclass(). - """ - - __metaclass__ = ClassVarMeta - __slots__ = ('__type__',) - - def __init__(self, tp=None, _root=False): - self.__type__ = tp - - def __getitem__(self, item): - cls = type(self) - if self.__type__ is None: - return cls(_type_check(item, - '{} accepts only types.'.format(cls.__name__[1:])), - _root=True) - raise TypeError('{} cannot be further subscripted' - .format(cls.__name__[1:])) - - def _eval_type(self, globalns, localns): - return type(self)(_eval_type(self.__type__, globalns, localns), - _root=True) - - def __repr__(self): - r = super(_ClassVar, self).__repr__() - if self.__type__ is not None: - r += '[{}]'.format(_type_repr(self.__type__)) - return r - - def __hash__(self): - return hash((type(self).__name__, self.__type__)) - - def __eq__(self, other): - if not isinstance(other, _ClassVar): - return NotImplemented - if self.__type__ is not None: - return self.__type__ == other.__type__ - return self is other - - -ClassVar = _ClassVar(_root=True) - - -class AnyMeta(TypingMeta): - """Metaclass for Any.""" - - def __new__(cls, name, bases, namespace): - cls.assert_no_subclassing(bases) - self = super(AnyMeta, cls).__new__(cls, name, bases, namespace) - return self - - -class _Any(_FinalTypingBase): - """Special type indicating an unconstrained type. - - - Any is compatible with every type. - - Any assumed to have all methods. - - All values assumed to be instances of Any. - - Note that all the above statements are true from the point of view of - static type checkers. At runtime, Any should not be used with instance - or class checks. - """ - __metaclass__ = AnyMeta - __slots__ = () - - def __instancecheck__(self, obj): - raise TypeError("Any cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - raise TypeError("Any cannot be used with issubclass().") - - -Any = _Any(_root=True) - - -class TypeVarMeta(TypingMeta): - def __new__(cls, name, bases, namespace): - cls.assert_no_subclassing(bases) - return super(TypeVarMeta, cls).__new__(cls, name, bases, namespace) - - -class TypeVar(_TypingBase): - """Type variable. - - Usage:: - - T = TypeVar('T') # Can be anything - A = TypeVar('A', str, bytes) # Must be str or bytes - - Type variables exist primarily for the benefit of static type - checkers. They serve as the parameters for generic types as well - as for generic function definitions. See class Generic for more - information on generic types. Generic functions work as follows: - - def repeat(x: T, n: int) -> List[T]: - '''Return a list containing n references to x.''' - return [x]*n - - def longest(x: A, y: A) -> A: - '''Return the longest of two strings.''' - return x if len(x) >= len(y) else y - - The latter example's signature is essentially the overloading - of (str, str) -> str and (bytes, bytes) -> bytes. Also note - that if the arguments are instances of some subclass of str, - the return type is still plain str. - - At runtime, isinstance(x, T) and issubclass(C, T) will raise TypeError. - - Type variables defined with covariant=True or contravariant=True - can be used do declare covariant or contravariant generic types. - See PEP 484 for more details. By default generic types are invariant - in all type variables. - - Type variables can be introspected. e.g.: - - T.__name__ == 'T' - T.__constraints__ == () - T.__covariant__ == False - T.__contravariant__ = False - A.__constraints__ == (str, bytes) - """ - - __metaclass__ = TypeVarMeta - __slots__ = ('__name__', '__bound__', '__constraints__', - '__covariant__', '__contravariant__') - - def __init__(self, name, *constraints, **kwargs): - super(TypeVar, self).__init__(name, *constraints, **kwargs) - bound = kwargs.get('bound', None) - covariant = kwargs.get('covariant', False) - contravariant = kwargs.get('contravariant', False) - self.__name__ = name - if covariant and contravariant: - raise ValueError("Bivariant types are not supported.") - self.__covariant__ = bool(covariant) - self.__contravariant__ = bool(contravariant) - if constraints and bound is not None: - raise TypeError("Constraints cannot be combined with bound=...") - if constraints and len(constraints) == 1: - raise TypeError("A single constraint is not allowed") - msg = "TypeVar(name, constraint, ...): constraints must be types." - self.__constraints__ = tuple(_type_check(t, msg) for t in constraints) - if bound: - self.__bound__ = _type_check(bound, "Bound must be a type.") - else: - self.__bound__ = None - - def _get_type_vars(self, tvars): - if self not in tvars: - tvars.append(self) - - def __repr__(self): - if self.__covariant__: - prefix = '+' - elif self.__contravariant__: - prefix = '-' - else: - prefix = '~' - return prefix + self.__name__ - - def __instancecheck__(self, instance): - raise TypeError("Type variables cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - raise TypeError("Type variables cannot be used with issubclass().") - - -# Some unconstrained type variables. These are used by the container types. -# (These are not for export.) -T = TypeVar('T') # Any type. -KT = TypeVar('KT') # Key type. -VT = TypeVar('VT') # Value type. -T_co = TypeVar('T_co', covariant=True) # Any type covariant containers. -V_co = TypeVar('V_co', covariant=True) # Any type covariant containers. -VT_co = TypeVar('VT_co', covariant=True) # Value type covariant containers. -T_contra = TypeVar('T_contra', contravariant=True) # Ditto contravariant. - -# A useful type variable with constraints. This represents string types. -# (This one *is* for export!) -AnyStr = TypeVar('AnyStr', bytes, unicode) - - -def _replace_arg(arg, tvars, args): - """An internal helper function: replace arg if it is a type variable - found in tvars with corresponding substitution from args or - with corresponding substitution sub-tree if arg is a generic type. - """ - - if tvars is None: - tvars = [] - if hasattr(arg, '_subs_tree') and isinstance(arg, (GenericMeta, _TypingBase)): - return arg._subs_tree(tvars, args) - if isinstance(arg, TypeVar): - for i, tvar in enumerate(tvars): - if arg == tvar: - return args[i] - return arg - - -# Special typing constructs Union, Optional, Generic, Callable and Tuple -# use three special attributes for internal bookkeeping of generic types: -# * __parameters__ is a tuple of unique free type parameters of a generic -# type, for example, Dict[T, T].__parameters__ == (T,); -# * __origin__ keeps a reference to a type that was subscripted, -# e.g., Union[T, int].__origin__ == Union; -# * __args__ is a tuple of all arguments used in subscripting, -# e.g., Dict[T, int].__args__ == (T, int). - - -def _subs_tree(cls, tvars=None, args=None): - """An internal helper function: calculate substitution tree - for generic cls after replacing its type parameters with - substitutions in tvars -> args (if any). - Repeat the same following __origin__'s. - - Return a list of arguments with all possible substitutions - performed. Arguments that are generic classes themselves are represented - as tuples (so that no new classes are created by this function). - For example: _subs_tree(List[Tuple[int, T]][str]) == [(Tuple, int, str)] - """ - - if cls.__origin__ is None: - return cls - # Make of chain of origins (i.e. cls -> cls.__origin__) - current = cls.__origin__ - orig_chain = [] - while current.__origin__ is not None: - orig_chain.append(current) - current = current.__origin__ - # Replace type variables in __args__ if asked ... - tree_args = [] - for arg in cls.__args__: - tree_args.append(_replace_arg(arg, tvars, args)) - # ... then continue replacing down the origin chain. - for ocls in orig_chain: - new_tree_args = [] - for arg in ocls.__args__: - new_tree_args.append(_replace_arg(arg, ocls.__parameters__, tree_args)) - tree_args = new_tree_args - return tree_args - - -def _remove_dups_flatten(parameters): - """An internal helper for Union creation and substitution: flatten Union's - among parameters, then remove duplicates and strict subclasses. - """ - - # Flatten out Union[Union[...], ...]. - params = [] - for p in parameters: - if isinstance(p, _Union) and p.__origin__ is Union: - params.extend(p.__args__) - elif isinstance(p, tuple) and len(p) > 0 and p[0] is Union: - params.extend(p[1:]) - else: - params.append(p) - # Weed out strict duplicates, preserving the first of each occurrence. - all_params = set(params) - if len(all_params) < len(params): - new_params = [] - for t in params: - if t in all_params: - new_params.append(t) - all_params.remove(t) - params = new_params - assert not all_params, all_params - # Weed out subclasses. - # E.g. Union[int, Employee, Manager] == Union[int, Employee]. - # If object is present it will be sole survivor among proper classes. - # Never discard type variables. - # (In particular, Union[str, AnyStr] != AnyStr.) - all_params = set(params) - for t1 in params: - if not isinstance(t1, type): - continue - if any(isinstance(t2, type) and issubclass(t1, t2) - for t2 in all_params - {t1} - if not (isinstance(t2, GenericMeta) and - t2.__origin__ is not None)): - all_params.remove(t1) - return tuple(t for t in params if t in all_params) - - -def _check_generic(cls, parameters): - # Check correct count for parameters of a generic cls (internal helper). - if not cls.__parameters__: - raise TypeError("%s is not a generic class" % repr(cls)) - alen = len(parameters) - elen = len(cls.__parameters__) - if alen != elen: - raise TypeError("Too %s parameters for %s; actual %s, expected %s" % - ("many" if alen > elen else "few", repr(cls), alen, elen)) - - -_cleanups = [] - - -def _tp_cache(func): - maxsize = 128 - cache = {} - _cleanups.append(cache.clear) - - @functools.wraps(func) - def inner(*args): - key = args - try: - return cache[key] - except TypeError: - # Assume it's an unhashable argument. - return func(*args) - except KeyError: - value = func(*args) - if len(cache) >= maxsize: - # If the cache grows too much, just start over. - cache.clear() - cache[key] = value - return value - - return inner - - -class UnionMeta(TypingMeta): - """Metaclass for Union.""" - - def __new__(cls, name, bases, namespace): - cls.assert_no_subclassing(bases) - return super(UnionMeta, cls).__new__(cls, name, bases, namespace) - - -class _Union(_FinalTypingBase): - """Union type; Union[X, Y] means either X or Y. - - To define a union, use e.g. Union[int, str]. Details: - - - The arguments must be types and there must be at least one. - - - None as an argument is a special case and is replaced by - type(None). - - - Unions of unions are flattened, e.g.:: - - Union[Union[int, str], float] == Union[int, str, float] - - - Unions of a single argument vanish, e.g.:: - - Union[int] == int # The constructor actually returns int - - - Redundant arguments are skipped, e.g.:: - - Union[int, str, int] == Union[int, str] - - - When comparing unions, the argument order is ignored, e.g.:: - - Union[int, str] == Union[str, int] - - - When two arguments have a subclass relationship, the least - derived argument is kept, e.g.:: - - class Employee: pass - class Manager(Employee): pass - Union[int, Employee, Manager] == Union[int, Employee] - Union[Manager, int, Employee] == Union[int, Employee] - Union[Employee, Manager] == Employee - - - Similar for object:: - - Union[int, object] == object - - - You cannot subclass or instantiate a union. - - - You can use Optional[X] as a shorthand for Union[X, None]. - """ - - __metaclass__ = UnionMeta - __slots__ = ('__parameters__', '__args__', '__origin__', '__tree_hash__') - - def __new__(cls, parameters=None, origin=None, *args, **kwds): - self = super(_Union, cls).__new__(cls, parameters, origin, *args, **kwds) - if origin is None: - self.__parameters__ = None - self.__args__ = None - self.__origin__ = None - self.__tree_hash__ = hash(frozenset(('Union',))) - return self - if not isinstance(parameters, tuple): - raise TypeError("Expected parameters=") - if origin is Union: - parameters = _remove_dups_flatten(parameters) - # It's not a union if there's only one type left. - if len(parameters) == 1: - return parameters[0] - self.__parameters__ = _type_vars(parameters) - self.__args__ = parameters - self.__origin__ = origin - # Pre-calculate the __hash__ on instantiation. - # This improves speed for complex substitutions. - subs_tree = self._subs_tree() - if isinstance(subs_tree, tuple): - self.__tree_hash__ = hash(frozenset(subs_tree)) - else: - self.__tree_hash__ = hash(subs_tree) - return self - - def _eval_type(self, globalns, localns): - if self.__args__ is None: - return self - ev_args = tuple(_eval_type(t, globalns, localns) for t in self.__args__) - ev_origin = _eval_type(self.__origin__, globalns, localns) - if ev_args == self.__args__ and ev_origin == self.__origin__: - # Everything is already evaluated. - return self - return self.__class__(ev_args, ev_origin, _root=True) - - def _get_type_vars(self, tvars): - if self.__origin__ and self.__parameters__: - _get_type_vars(self.__parameters__, tvars) - - def __repr__(self): - if self.__origin__ is None: - return super(_Union, self).__repr__() - tree = self._subs_tree() - if not isinstance(tree, tuple): - return repr(tree) - return tree[0]._tree_repr(tree) - - def _tree_repr(self, tree): - arg_list = [] - for arg in tree[1:]: - if not isinstance(arg, tuple): - arg_list.append(_type_repr(arg)) - else: - arg_list.append(arg[0]._tree_repr(arg)) - return super(_Union, self).__repr__() + '[%s]' % ', '.join(arg_list) - - @_tp_cache - def __getitem__(self, parameters): - if parameters == (): - raise TypeError("Cannot take a Union of no types.") - if not isinstance(parameters, tuple): - parameters = (parameters,) - if self.__origin__ is None: - msg = "Union[arg, ...]: each arg must be a type." - else: - msg = "Parameters to generic types must be types." - parameters = tuple(_type_check(p, msg) for p in parameters) - if self is not Union: - _check_generic(self, parameters) - return self.__class__(parameters, origin=self, _root=True) - - def _subs_tree(self, tvars=None, args=None): - if self is Union: - return Union # Nothing to substitute - tree_args = _subs_tree(self, tvars, args) - tree_args = _remove_dups_flatten(tree_args) - if len(tree_args) == 1: - return tree_args[0] # Union of a single type is that type - return (Union,) + tree_args - - def __eq__(self, other): - if isinstance(other, _Union): - return self.__tree_hash__ == other.__tree_hash__ - elif self is not Union: - return self._subs_tree() == other - else: - return self is other - - def __hash__(self): - return self.__tree_hash__ - - def __instancecheck__(self, obj): - raise TypeError("Unions cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - raise TypeError("Unions cannot be used with issubclass().") - - -Union = _Union(_root=True) - - -class OptionalMeta(TypingMeta): - """Metaclass for Optional.""" - - def __new__(cls, name, bases, namespace): - cls.assert_no_subclassing(bases) - return super(OptionalMeta, cls).__new__(cls, name, bases, namespace) - - -class _Optional(_FinalTypingBase): - """Optional type. - - Optional[X] is equivalent to Union[X, None]. - """ - - __metaclass__ = OptionalMeta - __slots__ = () - - @_tp_cache - def __getitem__(self, arg): - arg = _type_check(arg, "Optional[t] requires a single type.") - return Union[arg, type(None)] - - -Optional = _Optional(_root=True) - - -def _gorg(a): - """Return the farthest origin of a generic class (internal helper).""" - assert isinstance(a, GenericMeta) - while a.__origin__ is not None: - a = a.__origin__ - return a - - -def _geqv(a, b): - """Return whether two generic classes are equivalent (internal helper). - - The intention is to consider generic class X and any of its - parameterized forms (X[T], X[int], etc.) as equivalent. - - However, X is not equivalent to a subclass of X. - - The relation is reflexive, symmetric and transitive. - """ - assert isinstance(a, GenericMeta) and isinstance(b, GenericMeta) - # Reduce each to its origin. - return _gorg(a) is _gorg(b) - - -def _next_in_mro(cls): - """Helper for Generic.__new__. - - Returns the class after the last occurrence of Generic or - Generic[...] in cls.__mro__. - """ - next_in_mro = object - # Look for the last occurrence of Generic or Generic[...]. - for i, c in enumerate(cls.__mro__[:-1]): - if isinstance(c, GenericMeta) and _gorg(c) is Generic: - next_in_mro = cls.__mro__[i + 1] - return next_in_mro - - -def _make_subclasshook(cls): - """Construct a __subclasshook__ callable that incorporates - the associated __extra__ class in subclass checks performed - against cls. - """ - if isinstance(cls.__extra__, abc.ABCMeta): - # The logic mirrors that of ABCMeta.__subclasscheck__. - # Registered classes need not be checked here because - # cls and its extra share the same _abc_registry. - def __extrahook__(cls, subclass): - res = cls.__extra__.__subclasshook__(subclass) - if res is not NotImplemented: - return res - if cls.__extra__ in getattr(subclass, '__mro__', ()): - return True - for scls in cls.__extra__.__subclasses__(): - if isinstance(scls, GenericMeta): - continue - if issubclass(subclass, scls): - return True - return NotImplemented - else: - # For non-ABC extras we'll just call issubclass(). - def __extrahook__(cls, subclass): - if cls.__extra__ and issubclass(subclass, cls.__extra__): - return True - return NotImplemented - return classmethod(__extrahook__) - - -class GenericMeta(TypingMeta, abc.ABCMeta): - """Metaclass for generic types. - - This is a metaclass for typing.Generic and generic ABCs defined in - typing module. User defined subclasses of GenericMeta can override - __new__ and invoke super().__new__. Note that GenericMeta.__new__ - has strict rules on what is allowed in its bases argument: - * plain Generic is disallowed in bases; - * Generic[...] should appear in bases at most once; - * if Generic[...] is present, then it should list all type variables - that appear in other bases. - In addition, type of all generic bases is erased, e.g., C[int] is - stripped to plain C. - """ - - def __new__(cls, name, bases, namespace, - tvars=None, args=None, origin=None, extra=None, orig_bases=None): - """Create a new generic class. GenericMeta.__new__ accepts - keyword arguments that are used for internal bookkeeping, therefore - an override should pass unused keyword arguments to super(). - """ - if tvars is not None: - # Called from __getitem__() below. - assert origin is not None - assert all(isinstance(t, TypeVar) for t in tvars), tvars - else: - # Called from class statement. - assert tvars is None, tvars - assert args is None, args - assert origin is None, origin - - # Get the full set of tvars from the bases. - tvars = _type_vars(bases) - # Look for Generic[T1, ..., Tn]. - # If found, tvars must be a subset of it. - # If not found, tvars is it. - # Also check for and reject plain Generic, - # and reject multiple Generic[...]. - gvars = None - for base in bases: - if base is Generic: - raise TypeError("Cannot inherit from plain Generic") - if (isinstance(base, GenericMeta) and - base.__origin__ is Generic): - if gvars is not None: - raise TypeError( - "Cannot inherit from Generic[...] multiple types.") - gvars = base.__parameters__ - if gvars is None: - gvars = tvars - else: - tvarset = set(tvars) - gvarset = set(gvars) - if not tvarset <= gvarset: - raise TypeError( - "Some type variables (%s) " - "are not listed in Generic[%s]" % - (", ".join(str(t) for t in tvars if t not in gvarset), - ", ".join(str(g) for g in gvars))) - tvars = gvars - - initial_bases = bases - if extra is None: - extra = namespace.get('__extra__') - if extra is not None and type(extra) is abc.ABCMeta and extra not in bases: - bases = (extra,) + bases - bases = tuple(_gorg(b) if isinstance(b, GenericMeta) else b for b in bases) - - # remove bare Generic from bases if there are other generic bases - if any(isinstance(b, GenericMeta) and b is not Generic for b in bases): - bases = tuple(b for b in bases if b is not Generic) - namespace.update({'__origin__': origin, '__extra__': extra}) - self = super(GenericMeta, cls).__new__(cls, name, bases, namespace) - - self.__parameters__ = tvars - # Be prepared that GenericMeta will be subclassed by TupleMeta - # and CallableMeta, those two allow ..., (), or [] in __args___. - self.__args__ = tuple(Ellipsis if a is _TypingEllipsis else - () if a is _TypingEmpty else - a for a in args) if args else None - # Speed hack (https://github.com/python/typing/issues/196). - self.__next_in_mro__ = _next_in_mro(self) - # Preserve base classes on subclassing (__bases__ are type erased now). - if orig_bases is None: - self.__orig_bases__ = initial_bases - - # This allows unparameterized generic collections to be used - # with issubclass() and isinstance() in the same way as their - # collections.abc counterparts (e.g., isinstance([], Iterable)). - if ( - '__subclasshook__' not in namespace and extra or - # allow overriding - getattr(self.__subclasshook__, '__name__', '') == '__extrahook__' - ): - self.__subclasshook__ = _make_subclasshook(self) - - if origin and hasattr(origin, '__qualname__'): # Fix for Python 3.2. - self.__qualname__ = origin.__qualname__ - self.__tree_hash__ = (hash(self._subs_tree()) if origin else - super(GenericMeta, self).__hash__()) - return self - - def __init__(self, *args, **kwargs): - super(GenericMeta, self).__init__(*args, **kwargs) - if isinstance(self.__extra__, abc.ABCMeta): - self._abc_registry = self.__extra__._abc_registry - self._abc_cache = self.__extra__._abc_cache - elif self.__origin__ is not None: - self._abc_registry = self.__origin__._abc_registry - self._abc_cache = self.__origin__._abc_cache - - # _abc_negative_cache and _abc_negative_cache_version - # realised as descriptors, since GenClass[t1, t2, ...] always - # share subclass info with GenClass. - # This is an important memory optimization. - @property - def _abc_negative_cache(self): - if isinstance(self.__extra__, abc.ABCMeta): - return self.__extra__._abc_negative_cache - return _gorg(self)._abc_generic_negative_cache - - @_abc_negative_cache.setter - def _abc_negative_cache(self, value): - if self.__origin__ is None: - if isinstance(self.__extra__, abc.ABCMeta): - self.__extra__._abc_negative_cache = value - else: - self._abc_generic_negative_cache = value - - @property - def _abc_negative_cache_version(self): - if isinstance(self.__extra__, abc.ABCMeta): - return self.__extra__._abc_negative_cache_version - return _gorg(self)._abc_generic_negative_cache_version - - @_abc_negative_cache_version.setter - def _abc_negative_cache_version(self, value): - if self.__origin__ is None: - if isinstance(self.__extra__, abc.ABCMeta): - self.__extra__._abc_negative_cache_version = value - else: - self._abc_generic_negative_cache_version = value - - def _get_type_vars(self, tvars): - if self.__origin__ and self.__parameters__: - _get_type_vars(self.__parameters__, tvars) - - def _eval_type(self, globalns, localns): - ev_origin = (self.__origin__._eval_type(globalns, localns) - if self.__origin__ else None) - ev_args = tuple(_eval_type(a, globalns, localns) for a - in self.__args__) if self.__args__ else None - if ev_origin == self.__origin__ and ev_args == self.__args__: - return self - return self.__class__(self.__name__, - self.__bases__, - dict(self.__dict__), - tvars=_type_vars(ev_args) if ev_args else None, - args=ev_args, - origin=ev_origin, - extra=self.__extra__, - orig_bases=self.__orig_bases__) - - def __repr__(self): - if self.__origin__ is None: - return super(GenericMeta, self).__repr__() - return self._tree_repr(self._subs_tree()) - - def _tree_repr(self, tree): - arg_list = [] - for arg in tree[1:]: - if arg == (): - arg_list.append('()') - elif not isinstance(arg, tuple): - arg_list.append(_type_repr(arg)) - else: - arg_list.append(arg[0]._tree_repr(arg)) - return super(GenericMeta, self).__repr__() + '[%s]' % ', '.join(arg_list) - - def _subs_tree(self, tvars=None, args=None): - if self.__origin__ is None: - return self - tree_args = _subs_tree(self, tvars, args) - return (_gorg(self),) + tuple(tree_args) - - def __eq__(self, other): - if not isinstance(other, GenericMeta): - return NotImplemented - if self.__origin__ is None or other.__origin__ is None: - return self is other - return self.__tree_hash__ == other.__tree_hash__ - - def __hash__(self): - return self.__tree_hash__ - - @_tp_cache - def __getitem__(self, params): - if not isinstance(params, tuple): - params = (params,) - if not params and not _gorg(self) is Tuple: - raise TypeError( - "Parameter list to %s[...] cannot be empty" % _qualname(self)) - msg = "Parameters to generic types must be types." - params = tuple(_type_check(p, msg) for p in params) - if self is Generic: - # Generic can only be subscripted with unique type variables. - if not all(isinstance(p, TypeVar) for p in params): - raise TypeError( - "Parameters to Generic[...] must all be type variables") - if len(set(params)) != len(params): - raise TypeError( - "Parameters to Generic[...] must all be unique") - tvars = params - args = params - elif self in (Tuple, Callable): - tvars = _type_vars(params) - args = params - elif self is _Protocol: - # _Protocol is internal, don't check anything. - tvars = params - args = params - elif self.__origin__ in (Generic, _Protocol): - # Can't subscript Generic[...] or _Protocol[...]. - raise TypeError("Cannot subscript already-subscripted %s" % - repr(self)) - else: - # Subscripting a regular Generic subclass. - _check_generic(self, params) - tvars = _type_vars(params) - args = params - - prepend = (self,) if self.__origin__ is None else () - return self.__class__(self.__name__, - prepend + self.__bases__, - dict(self.__dict__), - tvars=tvars, - args=args, - origin=self, - extra=self.__extra__, - orig_bases=self.__orig_bases__) - - def __subclasscheck__(self, cls): - if self.__origin__ is not None: - if sys._getframe(1).f_globals['__name__'] not in ['abc', 'functools']: - raise TypeError("Parameterized generics cannot be used with class " - "or instance checks") - return False - if self is Generic: - raise TypeError("Class %r cannot be used with class " - "or instance checks" % self) - return super(GenericMeta, self).__subclasscheck__(cls) - - def __instancecheck__(self, instance): - # Since we extend ABC.__subclasscheck__ and - # ABC.__instancecheck__ inlines the cache checking done by the - # latter, we must extend __instancecheck__ too. For simplicity - # we just skip the cache check -- instance checks for generic - # classes are supposed to be rare anyways. - if not isinstance(instance, type): - return issubclass(instance.__class__, self) - return False - - def __copy__(self): - return self.__class__(self.__name__, self.__bases__, dict(self.__dict__), - self.__parameters__, self.__args__, self.__origin__, - self.__extra__, self.__orig_bases__) - - def __setattr__(self, attr, value): - # We consider all the subscripted genrics as proxies for original class - if ( - attr.startswith('__') and attr.endswith('__') or - attr.startswith('_abc_') - ): - super(GenericMeta, self).__setattr__(attr, value) - else: - super(GenericMeta, _gorg(self)).__setattr__(attr, value) - - -# Prevent checks for Generic to crash when defining Generic. -Generic = None - - -def _generic_new(base_cls, cls, *args, **kwds): - # Assure type is erased on instantiation, - # but attempt to store it in __orig_class__ - if cls.__origin__ is None: - return base_cls.__new__(cls) - else: - origin = _gorg(cls) - obj = base_cls.__new__(origin) - try: - obj.__orig_class__ = cls - except AttributeError: - pass - obj.__init__(*args, **kwds) - return obj - - -class Generic(object): - """Abstract base class for generic types. - - A generic type is typically declared by inheriting from - this class parameterized with one or more type variables. - For example, a generic mapping type might be defined as:: - - class Mapping(Generic[KT, VT]): - def __getitem__(self, key: KT) -> VT: - ... - # Etc. - - This class can then be used as follows:: - - def lookup_name(mapping: Mapping[KT, VT], key: KT, default: VT) -> VT: - try: - return mapping[key] - except KeyError: - return default - """ - - __metaclass__ = GenericMeta - __slots__ = () - - def __new__(cls, *args, **kwds): - if _geqv(cls, Generic): - raise TypeError("Type Generic cannot be instantiated; " - "it can be used only as a base class") - return _generic_new(cls.__next_in_mro__, cls, *args, **kwds) - - -class _TypingEmpty(object): - """Internal placeholder for () or []. Used by TupleMeta and CallableMeta - to allow empty list/tuple in specific places, without allowing them - to sneak in where prohibited. - """ - - -class _TypingEllipsis(object): - """Internal placeholder for ... (ellipsis).""" - - -class TupleMeta(GenericMeta): - """Metaclass for Tuple (internal).""" - - @_tp_cache - def __getitem__(self, parameters): - if self.__origin__ is not None or not _geqv(self, Tuple): - # Normal generic rules apply if this is not the first subscription - # or a subscription of a subclass. - return super(TupleMeta, self).__getitem__(parameters) - if parameters == (): - return super(TupleMeta, self).__getitem__((_TypingEmpty,)) - if not isinstance(parameters, tuple): - parameters = (parameters,) - if len(parameters) == 2 and parameters[1] is Ellipsis: - msg = "Tuple[t, ...]: t must be a type." - p = _type_check(parameters[0], msg) - return super(TupleMeta, self).__getitem__((p, _TypingEllipsis)) - msg = "Tuple[t0, t1, ...]: each t must be a type." - parameters = tuple(_type_check(p, msg) for p in parameters) - return super(TupleMeta, self).__getitem__(parameters) - - def __instancecheck__(self, obj): - if self.__args__ is None: - return isinstance(obj, tuple) - raise TypeError("Parameterized Tuple cannot be used " - "with isinstance().") - - def __subclasscheck__(self, cls): - if self.__args__ is None: - return issubclass(cls, tuple) - raise TypeError("Parameterized Tuple cannot be used " - "with issubclass().") - - -class Tuple(tuple): - """Tuple type; Tuple[X, Y] is the cross-product type of X and Y. - - Example: Tuple[T1, T2] is a tuple of two elements corresponding - to type variables T1 and T2. Tuple[int, float, str] is a tuple - of an int, a float and a string. - - To specify a variable-length tuple of homogeneous type, use Tuple[T, ...]. - """ - - __metaclass__ = TupleMeta - __extra__ = tuple - __slots__ = () - - def __new__(cls, *args, **kwds): - if _geqv(cls, Tuple): - raise TypeError("Type Tuple cannot be instantiated; " - "use tuple() instead") - return _generic_new(tuple, cls, *args, **kwds) - - -class CallableMeta(GenericMeta): - """ Metaclass for Callable.""" - - def __repr__(self): - if self.__origin__ is None: - return super(CallableMeta, self).__repr__() - return self._tree_repr(self._subs_tree()) - - def _tree_repr(self, tree): - if _gorg(self) is not Callable: - return super(CallableMeta, self)._tree_repr(tree) - # For actual Callable (not its subclass) we override - # super(CallableMeta, self)._tree_repr() for nice formatting. - arg_list = [] - for arg in tree[1:]: - if not isinstance(arg, tuple): - arg_list.append(_type_repr(arg)) - else: - arg_list.append(arg[0]._tree_repr(arg)) - if arg_list[0] == '...': - return repr(tree[0]) + '[..., %s]' % arg_list[1] - return (repr(tree[0]) + - '[[%s], %s]' % (', '.join(arg_list[:-1]), arg_list[-1])) - - def __getitem__(self, parameters): - """A thin wrapper around __getitem_inner__ to provide the latter - with hashable arguments to improve speed. - """ - - if self.__origin__ is not None or not _geqv(self, Callable): - return super(CallableMeta, self).__getitem__(parameters) - if not isinstance(parameters, tuple) or len(parameters) != 2: - raise TypeError("Callable must be used as " - "Callable[[arg, ...], result].") - args, result = parameters - if args is Ellipsis: - parameters = (Ellipsis, result) - else: - if not isinstance(args, list): - raise TypeError("Callable[args, result]: args must be a list." - " Got %.100r." % (args,)) - parameters = (tuple(args), result) - return self.__getitem_inner__(parameters) - - @_tp_cache - def __getitem_inner__(self, parameters): - args, result = parameters - msg = "Callable[args, result]: result must be a type." - result = _type_check(result, msg) - if args is Ellipsis: - return super(CallableMeta, self).__getitem__((_TypingEllipsis, result)) - msg = "Callable[[arg, ...], result]: each arg must be a type." - args = tuple(_type_check(arg, msg) for arg in args) - parameters = args + (result,) - return super(CallableMeta, self).__getitem__(parameters) - - -class Callable(object): - """Callable type; Callable[[int], str] is a function of (int) -> str. - - The subscription syntax must always be used with exactly two - values: the argument list and the return type. The argument list - must be a list of types or ellipsis; the return type must be a single type. - - There is no syntax to indicate optional or keyword arguments, - such function types are rarely used as callback types. - """ - - __metaclass__ = CallableMeta - __extra__ = collections_abc.Callable - __slots__ = () - - def __new__(cls, *args, **kwds): - if _geqv(cls, Callable): - raise TypeError("Type Callable cannot be instantiated; " - "use a non-abstract subclass instead") - return _generic_new(cls.__next_in_mro__, cls, *args, **kwds) - - -def cast(typ, val): - """Cast a value to a type. - - This returns the value unchanged. To the type checker this - signals that the return value has the designated type, but at - runtime we intentionally don't check anything (we want this - to be as fast as possible). - """ - return val - - -def _get_defaults(func): - """Internal helper to extract the default arguments, by name.""" - code = func.__code__ - pos_count = code.co_argcount - arg_names = code.co_varnames - arg_names = arg_names[:pos_count] - defaults = func.__defaults__ or () - kwdefaults = func.__kwdefaults__ - res = dict(kwdefaults) if kwdefaults else {} - pos_offset = pos_count - len(defaults) - for name, value in zip(arg_names[pos_offset:], defaults): - assert name not in res - res[name] = value - return res - - -def get_type_hints(obj, globalns=None, localns=None): - """In Python 2 this is not supported and always returns None.""" - return None - - -def no_type_check(arg): - """Decorator to indicate that annotations are not type hints. - - The argument must be a class or function; if it is a class, it - applies recursively to all methods and classes defined in that class - (but not to methods defined in its superclasses or subclasses). - - This mutates the function(s) or class(es) in place. - """ - if isinstance(arg, type): - arg_attrs = arg.__dict__.copy() - for attr, val in arg.__dict__.items(): - if val in arg.__bases__: - arg_attrs.pop(attr) - for obj in arg_attrs.values(): - if isinstance(obj, types.FunctionType): - obj.__no_type_check__ = True - if isinstance(obj, type): - no_type_check(obj) - try: - arg.__no_type_check__ = True - except TypeError: # built-in classes - pass - return arg - - -def no_type_check_decorator(decorator): - """Decorator to give another decorator the @no_type_check effect. - - This wraps the decorator with something that wraps the decorated - function in @no_type_check. - """ - - @functools.wraps(decorator) - def wrapped_decorator(*args, **kwds): - func = decorator(*args, **kwds) - func = no_type_check(func) - return func - - return wrapped_decorator - - -def _overload_dummy(*args, **kwds): - """Helper for @overload to raise when called.""" - raise NotImplementedError( - "You should not call an overloaded function. " - "A series of @overload-decorated functions " - "outside a stub module should always be followed " - "by an implementation that is not @overload-ed.") - - -def overload(func): - """Decorator for overloaded functions/methods. - - In a stub file, place two or more stub definitions for the same - function in a row, each decorated with @overload. For example: - - @overload - def utf8(value: None) -> None: ... - @overload - def utf8(value: bytes) -> bytes: ... - @overload - def utf8(value: str) -> bytes: ... - - In a non-stub file (i.e. a regular .py file), do the same but - follow it with an implementation. The implementation should *not* - be decorated with @overload. For example: - - @overload - def utf8(value: None) -> None: ... - @overload - def utf8(value: bytes) -> bytes: ... - @overload - def utf8(value: str) -> bytes: ... - def utf8(value): - # implementation goes here - """ - return _overload_dummy - - -class _ProtocolMeta(GenericMeta): - """Internal metaclass for _Protocol. - - This exists so _Protocol classes can be generic without deriving - from Generic. - """ - - def __instancecheck__(self, obj): - if _Protocol not in self.__bases__: - return super(_ProtocolMeta, self).__instancecheck__(obj) - raise TypeError("Protocols cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - if not self._is_protocol: - # No structural checks since this isn't a protocol. - return NotImplemented - - if self is _Protocol: - # Every class is a subclass of the empty protocol. - return True - - # Find all attributes defined in the protocol. - attrs = self._get_protocol_attrs() - - for attr in attrs: - if not any(attr in d.__dict__ for d in cls.__mro__): - return False - return True - - def _get_protocol_attrs(self): - # Get all Protocol base classes. - protocol_bases = [] - for c in self.__mro__: - if getattr(c, '_is_protocol', False) and c.__name__ != '_Protocol': - protocol_bases.append(c) - - # Get attributes included in protocol. - attrs = set() - for base in protocol_bases: - for attr in base.__dict__.keys(): - # Include attributes not defined in any non-protocol bases. - for c in self.__mro__: - if (c is not base and attr in c.__dict__ and - not getattr(c, '_is_protocol', False)): - break - else: - if (not attr.startswith('_abc_') and - attr != '__abstractmethods__' and - attr != '_is_protocol' and - attr != '__dict__' and - attr != '__args__' and - attr != '__slots__' and - attr != '_get_protocol_attrs' and - attr != '__next_in_mro__' and - attr != '__parameters__' and - attr != '__origin__' and - attr != '__orig_bases__' and - attr != '__extra__' and - attr != '__tree_hash__' and - attr != '__module__'): - attrs.add(attr) - - return attrs - - -class _Protocol(object): - """Internal base class for protocol classes. - - This implements a simple-minded structural issubclass check - (similar but more general than the one-offs in collections.abc - such as Hashable). - """ - - __metaclass__ = _ProtocolMeta - __slots__ = () - - _is_protocol = True - - -# Various ABCs mimicking those in collections.abc. -# A few are simply re-exported for completeness. - -Hashable = collections_abc.Hashable # Not generic. - - -class Iterable(Generic[T_co]): - __slots__ = () - __extra__ = collections_abc.Iterable - - -class Iterator(Iterable[T_co]): - __slots__ = () - __extra__ = collections_abc.Iterator - - -class SupportsInt(_Protocol): - __slots__ = () - - @abstractmethod - def __int__(self): - pass - - -class SupportsFloat(_Protocol): - __slots__ = () - - @abstractmethod - def __float__(self): - pass - - -class SupportsComplex(_Protocol): - __slots__ = () - - @abstractmethod - def __complex__(self): - pass - - -class SupportsAbs(_Protocol[T_co]): - __slots__ = () - - @abstractmethod - def __abs__(self): - pass - - -if hasattr(collections_abc, 'Reversible'): - class Reversible(Iterable[T_co]): - __slots__ = () - __extra__ = collections_abc.Reversible -else: - class Reversible(_Protocol[T_co]): - __slots__ = () - - @abstractmethod - def __reversed__(self): - pass - - -Sized = collections_abc.Sized # Not generic. - - -class Container(Generic[T_co]): - __slots__ = () - __extra__ = collections_abc.Container - - -# Callable was defined earlier. - - -class AbstractSet(Sized, Iterable[T_co], Container[T_co]): - __slots__ = () - __extra__ = collections_abc.Set - - -class MutableSet(AbstractSet[T]): - __slots__ = () - __extra__ = collections_abc.MutableSet - - -# NOTE: It is only covariant in the value type. -class Mapping(Sized, Iterable[KT], Container[KT], Generic[KT, VT_co]): - __slots__ = () - __extra__ = collections_abc.Mapping - - -class MutableMapping(Mapping[KT, VT]): - __slots__ = () - __extra__ = collections_abc.MutableMapping - - -if hasattr(collections_abc, 'Reversible'): - class Sequence(Sized, Reversible[T_co], Container[T_co]): - __slots__ = () - __extra__ = collections_abc.Sequence -else: - class Sequence(Sized, Iterable[T_co], Container[T_co]): - __slots__ = () - __extra__ = collections_abc.Sequence - - -class MutableSequence(Sequence[T]): - __slots__ = () - __extra__ = collections_abc.MutableSequence - - -class ByteString(Sequence[int]): - pass - - -ByteString.register(str) -ByteString.register(bytearray) - - -class List(list, MutableSequence[T]): - __slots__ = () - __extra__ = list - - def __new__(cls, *args, **kwds): - if _geqv(cls, List): - raise TypeError("Type List cannot be instantiated; " - "use list() instead") - return _generic_new(list, cls, *args, **kwds) - - -class Deque(collections.deque, MutableSequence[T]): - __slots__ = () - __extra__ = collections.deque - - def __new__(cls, *args, **kwds): - if _geqv(cls, Deque): - return collections.deque(*args, **kwds) - return _generic_new(collections.deque, cls, *args, **kwds) - - -class Set(set, MutableSet[T]): - __slots__ = () - __extra__ = set - - def __new__(cls, *args, **kwds): - if _geqv(cls, Set): - raise TypeError("Type Set cannot be instantiated; " - "use set() instead") - return _generic_new(set, cls, *args, **kwds) - - -class FrozenSet(frozenset, AbstractSet[T_co]): - __slots__ = () - __extra__ = frozenset - - def __new__(cls, *args, **kwds): - if _geqv(cls, FrozenSet): - raise TypeError("Type FrozenSet cannot be instantiated; " - "use frozenset() instead") - return _generic_new(frozenset, cls, *args, **kwds) - - -class MappingView(Sized, Iterable[T_co]): - __slots__ = () - __extra__ = collections_abc.MappingView - - -class KeysView(MappingView[KT], AbstractSet[KT]): - __slots__ = () - __extra__ = collections_abc.KeysView - - -class ItemsView(MappingView[Tuple[KT, VT_co]], - AbstractSet[Tuple[KT, VT_co]], - Generic[KT, VT_co]): - __slots__ = () - __extra__ = collections_abc.ItemsView - - -class ValuesView(MappingView[VT_co]): - __slots__ = () - __extra__ = collections_abc.ValuesView - - -class Dict(dict, MutableMapping[KT, VT]): - __slots__ = () - __extra__ = dict - - def __new__(cls, *args, **kwds): - if _geqv(cls, Dict): - raise TypeError("Type Dict cannot be instantiated; " - "use dict() instead") - return _generic_new(dict, cls, *args, **kwds) - - -class DefaultDict(collections.defaultdict, MutableMapping[KT, VT]): - __slots__ = () - __extra__ = collections.defaultdict - - def __new__(cls, *args, **kwds): - if _geqv(cls, DefaultDict): - return collections.defaultdict(*args, **kwds) - return _generic_new(collections.defaultdict, cls, *args, **kwds) - - -class Counter(collections.Counter, Dict[T, int]): - __slots__ = () - __extra__ = collections.Counter - - def __new__(cls, *args, **kwds): - if _geqv(cls, Counter): - return collections.Counter(*args, **kwds) - return _generic_new(collections.Counter, cls, *args, **kwds) - - -# Determine what base class to use for Generator. -if hasattr(collections_abc, 'Generator'): - # Sufficiently recent versions of 3.5 have a Generator ABC. - _G_base = collections_abc.Generator -else: - # Fall back on the exact type. - _G_base = types.GeneratorType - - -class Generator(Iterator[T_co], Generic[T_co, T_contra, V_co]): - __slots__ = () - __extra__ = _G_base - - def __new__(cls, *args, **kwds): - if _geqv(cls, Generator): - raise TypeError("Type Generator cannot be instantiated; " - "create a subclass instead") - return _generic_new(_G_base, cls, *args, **kwds) - - -# Internal type variable used for Type[]. -CT_co = TypeVar('CT_co', covariant=True, bound=type) - - -# This is not a real generic class. Don't use outside annotations. -class Type(Generic[CT_co]): - """A special construct usable to annotate class objects. - - For example, suppose we have the following classes:: - - class User: ... # Abstract base for User classes - class BasicUser(User): ... - class ProUser(User): ... - class TeamUser(User): ... - - And a function that takes a class argument that's a subclass of - User and returns an instance of the corresponding class:: - - U = TypeVar('U', bound=User) - def new_user(user_class: Type[U]) -> U: - user = user_class() - # (Here we could write the user object to a database) - return user - - joe = new_user(BasicUser) - - At this point the type checker knows that joe has type BasicUser. - """ - __slots__ = () - __extra__ = type - - -def NamedTuple(typename, fields): - """Typed version of namedtuple. - - Usage:: - - Employee = typing.NamedTuple('Employee', [('name', str), ('id', int)]) - - This is equivalent to:: - - Employee = collections.namedtuple('Employee', ['name', 'id']) - - The resulting class has one extra attribute: _field_types, - giving a dict mapping field names to types. (The field names - are in the _fields attribute, which is part of the namedtuple - API.) - """ - fields = [(n, t) for n, t in fields] - cls = collections.namedtuple(typename, [n for n, t in fields]) - cls._field_types = dict(fields) - # Set the module to the caller's module (otherwise it'd be 'typing'). - try: - cls.__module__ = sys._getframe(1).f_globals.get('__name__', '__main__') - except (AttributeError, ValueError): - pass - return cls - - -def NewType(name, tp): - """NewType creates simple unique types with almost zero - runtime overhead. NewType(name, tp) is considered a subtype of tp - by static type checkers. At runtime, NewType(name, tp) returns - a dummy function that simply returns its argument. Usage:: - - UserId = NewType('UserId', int) - - def name_by_id(user_id): - # type: (UserId) -> str - ... - - UserId('user') # Fails type check - - name_by_id(42) # Fails type check - name_by_id(UserId(42)) # OK - - num = UserId(5) + 1 # type: int - """ - - def new_type(x): - return x - - # Some versions of Python 2 complain because of making all strings unicode - new_type.__name__ = str(name) - new_type.__supertype__ = tp - return new_type - - -# Python-version-specific alias (Python 2: unicode; Python 3: str) -Text = unicode - - -# Constant that's True when type checking, but False here. -TYPE_CHECKING = False - - -class IO(Generic[AnyStr]): - """Generic base class for TextIO and BinaryIO. - - This is an abstract, generic version of the return of open(). - - NOTE: This does not distinguish between the different possible - classes (text vs. binary, read vs. write vs. read/write, - append-only, unbuffered). The TextIO and BinaryIO subclasses - below capture the distinctions between text vs. binary, which is - pervasive in the interface; however we currently do not offer a - way to track the other distinctions in the type system. - """ - - __slots__ = () - - @abstractproperty - def mode(self): - pass - - @abstractproperty - def name(self): - pass - - @abstractmethod - def close(self): - pass - - @abstractmethod - def closed(self): - pass - - @abstractmethod - def fileno(self): - pass - - @abstractmethod - def flush(self): - pass - - @abstractmethod - def isatty(self): - pass - - @abstractmethod - def read(self, n=-1): - pass - - @abstractmethod - def readable(self): - pass - - @abstractmethod - def readline(self, limit=-1): - pass - - @abstractmethod - def readlines(self, hint=-1): - pass - - @abstractmethod - def seek(self, offset, whence=0): - pass - - @abstractmethod - def seekable(self): - pass - - @abstractmethod - def tell(self): - pass - - @abstractmethod - def truncate(self, size=None): - pass - - @abstractmethod - def writable(self): - pass - - @abstractmethod - def write(self, s): - pass - - @abstractmethod - def writelines(self, lines): - pass - - @abstractmethod - def __enter__(self): - pass - - @abstractmethod - def __exit__(self, type, value, traceback): - pass - - -class BinaryIO(IO[bytes]): - """Typed version of the return of open() in binary mode.""" - - __slots__ = () - - @abstractmethod - def write(self, s): - pass - - @abstractmethod - def __enter__(self): - pass - - -class TextIO(IO[unicode]): - """Typed version of the return of open() in text mode.""" - - __slots__ = () - - @abstractproperty - def buffer(self): - pass - - @abstractproperty - def encoding(self): - pass - - @abstractproperty - def errors(self): - pass - - @abstractproperty - def line_buffering(self): - pass - - @abstractproperty - def newlines(self): - pass - - @abstractmethod - def __enter__(self): - pass - - -class io(object): - """Wrapper namespace for IO generic classes.""" - - __all__ = ['IO', 'TextIO', 'BinaryIO'] - IO = IO - TextIO = TextIO - BinaryIO = BinaryIO - - -io.__name__ = __name__ + b'.io' -sys.modules[io.__name__] = io - - -Pattern = _TypeAlias('Pattern', AnyStr, type(stdlib_re.compile('')), - lambda p: p.pattern) -Match = _TypeAlias('Match', AnyStr, type(stdlib_re.match('', '')), - lambda m: m.re.pattern) - - -class re(object): - """Wrapper namespace for re type aliases.""" - - __all__ = ['Pattern', 'Match'] - Pattern = Pattern - Match = Match - - -re.__name__ = __name__ + b'.re' -sys.modules[re.__name__] = re diff --git a/lib-typing/3.2/mod_generics_cache.py b/lib-typing/3.2/mod_generics_cache.py deleted file mode 100644 index d9a60b4b28c3..000000000000 --- a/lib-typing/3.2/mod_generics_cache.py +++ /dev/null @@ -1,14 +0,0 @@ -"""Module for testing the behavior of generics across different modules.""" - -from typing import TypeVar, Generic - -T = TypeVar('T') - - -class A(Generic[T]): - pass - - -class B(Generic[T]): - class A(Generic[T]): - pass diff --git a/lib-typing/3.2/test_typing.py b/lib-typing/3.2/test_typing.py deleted file mode 100644 index 586a7870013a..000000000000 --- a/lib-typing/3.2/test_typing.py +++ /dev/null @@ -1,2422 +0,0 @@ -import contextlib -import collections -import pickle -import re -import sys -from unittest import TestCase, main, skipUnless, SkipTest -from copy import copy, deepcopy - -from typing import Any -from typing import TypeVar, AnyStr -from typing import T, KT, VT # Not in __all__. -from typing import Union, Optional -from typing import Tuple, List, MutableMapping -from typing import Callable -from typing import Generic, ClassVar, GenericMeta -from typing import cast -from typing import get_type_hints -from typing import no_type_check, no_type_check_decorator -from typing import Type -from typing import NewType -from typing import NamedTuple -from typing import IO, TextIO, BinaryIO -from typing import Pattern, Match -import abc -import typing -import weakref -try: - import collections.abc as collections_abc -except ImportError: - import collections as collections_abc # Fallback for PY3.2. - - -class BaseTestCase(TestCase): - - def assertIsSubclass(self, cls, class_or_tuple, msg=None): - if not issubclass(cls, class_or_tuple): - message = '%r is not a subclass of %r' % (cls, class_or_tuple) - if msg is not None: - message += ' : %s' % msg - raise self.failureException(message) - - def assertNotIsSubclass(self, cls, class_or_tuple, msg=None): - if issubclass(cls, class_or_tuple): - message = '%r is a subclass of %r' % (cls, class_or_tuple) - if msg is not None: - message += ' : %s' % msg - raise self.failureException(message) - - def clear_caches(self): - for f in typing._cleanups: - f() - - -class Employee: - pass - - -class Manager(Employee): - pass - - -class Founder(Employee): - pass - - -class ManagingFounder(Manager, Founder): - pass - - -class AnyTests(BaseTestCase): - - def test_any_instance_type_error(self): - with self.assertRaises(TypeError): - isinstance(42, Any) - - def test_any_subclass_type_error(self): - with self.assertRaises(TypeError): - issubclass(Employee, Any) - with self.assertRaises(TypeError): - issubclass(Any, Employee) - - def test_repr(self): - self.assertEqual(repr(Any), 'typing.Any') - - def test_errors(self): - with self.assertRaises(TypeError): - issubclass(42, Any) - with self.assertRaises(TypeError): - Any[int] # Any is not a generic type. - - def test_cannot_subclass(self): - with self.assertRaises(TypeError): - class A(Any): - pass - with self.assertRaises(TypeError): - class A(type(Any)): - pass - - def test_cannot_instantiate(self): - with self.assertRaises(TypeError): - Any() - with self.assertRaises(TypeError): - type(Any)() - - def test_any_works_with_alias(self): - # These expressions must simply not fail. - typing.Match[Any] - typing.Pattern[Any] - typing.IO[Any] - - -class TypeVarTests(BaseTestCase): - - def test_basic_plain(self): - T = TypeVar('T') - # T equals itself. - self.assertEqual(T, T) - # T is an instance of TypeVar - self.assertIsInstance(T, TypeVar) - - def test_typevar_instance_type_error(self): - T = TypeVar('T') - with self.assertRaises(TypeError): - isinstance(42, T) - - def test_typevar_subclass_type_error(self): - T = TypeVar('T') - with self.assertRaises(TypeError): - issubclass(int, T) - with self.assertRaises(TypeError): - issubclass(T, int) - - def test_constrained_error(self): - with self.assertRaises(TypeError): - X = TypeVar('X', int) - X - - def test_union_unique(self): - X = TypeVar('X') - Y = TypeVar('Y') - self.assertNotEqual(X, Y) - self.assertEqual(Union[X], X) - self.assertNotEqual(Union[X], Union[X, Y]) - self.assertEqual(Union[X, X], X) - self.assertNotEqual(Union[X, int], Union[X]) - self.assertNotEqual(Union[X, int], Union[int]) - self.assertEqual(Union[X, int].__args__, (X, int)) - self.assertEqual(Union[X, int].__parameters__, (X,)) - self.assertIs(Union[X, int].__origin__, Union) - - def test_union_constrained(self): - A = TypeVar('A', str, bytes) - self.assertNotEqual(Union[A, str], Union[A]) - - def test_repr(self): - self.assertEqual(repr(T), '~T') - self.assertEqual(repr(KT), '~KT') - self.assertEqual(repr(VT), '~VT') - self.assertEqual(repr(AnyStr), '~AnyStr') - T_co = TypeVar('T_co', covariant=True) - self.assertEqual(repr(T_co), '+T_co') - T_contra = TypeVar('T_contra', contravariant=True) - self.assertEqual(repr(T_contra), '-T_contra') - - def test_no_redefinition(self): - self.assertNotEqual(TypeVar('T'), TypeVar('T')) - self.assertNotEqual(TypeVar('T', int, str), TypeVar('T', int, str)) - - def test_cannot_subclass_vars(self): - with self.assertRaises(TypeError): - class V(TypeVar('T')): - pass - - def test_cannot_subclass_var_itself(self): - with self.assertRaises(TypeError): - class V(TypeVar): - pass - - def test_cannot_instantiate_vars(self): - with self.assertRaises(TypeError): - TypeVar('A')() - - def test_bound_errors(self): - with self.assertRaises(TypeError): - TypeVar('X', bound=42) - with self.assertRaises(TypeError): - TypeVar('X', str, float, bound=Employee) - - def test_no_bivariant(self): - with self.assertRaises(ValueError): - TypeVar('T', covariant=True, contravariant=True) - - -class UnionTests(BaseTestCase): - - def test_basics(self): - u = Union[int, float] - self.assertNotEqual(u, Union) - - def test_subclass_error(self): - with self.assertRaises(TypeError): - issubclass(int, Union) - with self.assertRaises(TypeError): - issubclass(Union, int) - with self.assertRaises(TypeError): - issubclass(int, Union[int, str]) - with self.assertRaises(TypeError): - issubclass(Union[int, str], int) - - def test_union_any(self): - u = Union[Any] - self.assertEqual(u, Any) - u1 = Union[int, Any] - u2 = Union[Any, int] - u3 = Union[Any, object] - self.assertEqual(u1, u2) - self.assertNotEqual(u1, Any) - self.assertNotEqual(u2, Any) - self.assertNotEqual(u3, Any) - - def test_union_object(self): - u = Union[object] - self.assertEqual(u, object) - u = Union[int, object] - self.assertEqual(u, object) - u = Union[object, int] - self.assertEqual(u, object) - - def test_unordered(self): - u1 = Union[int, float] - u2 = Union[float, int] - self.assertEqual(u1, u2) - - def test_single_class_disappears(self): - t = Union[Employee] - self.assertIs(t, Employee) - - def test_base_class_disappears(self): - u = Union[Employee, Manager, int] - self.assertEqual(u, Union[int, Employee]) - u = Union[Manager, int, Employee] - self.assertEqual(u, Union[int, Employee]) - u = Union[Employee, Manager] - self.assertIs(u, Employee) - - def test_union_union(self): - u = Union[int, float] - v = Union[u, Employee] - self.assertEqual(v, Union[int, float, Employee]) - - def test_repr(self): - self.assertEqual(repr(Union), 'typing.Union') - u = Union[Employee, int] - self.assertEqual(repr(u), 'typing.Union[%s.Employee, int]' % __name__) - u = Union[int, Employee] - self.assertEqual(repr(u), 'typing.Union[int, %s.Employee]' % __name__) - T = TypeVar('T') - u = Union[T, int][int] - self.assertEqual(repr(u), repr(int)) - u = Union[List[int], int] - self.assertEqual(repr(u), 'typing.Union[typing.List[int], int]') - - def test_cannot_subclass(self): - with self.assertRaises(TypeError): - class C(Union): - pass - with self.assertRaises(TypeError): - class C(type(Union)): - pass - with self.assertRaises(TypeError): - class C(Union[int, str]): - pass - - def test_cannot_instantiate(self): - with self.assertRaises(TypeError): - Union() - with self.assertRaises(TypeError): - type(Union)() - u = Union[int, float] - with self.assertRaises(TypeError): - u() - with self.assertRaises(TypeError): - type(u)() - - def test_union_generalization(self): - self.assertFalse(Union[str, typing.Iterable[int]] == str) - self.assertFalse(Union[str, typing.Iterable[int]] == typing.Iterable[int]) - self.assertTrue(Union[str, typing.Iterable] == typing.Iterable) - - def test_union_compare_other(self): - self.assertNotEqual(Union, object) - self.assertNotEqual(Union, Any) - self.assertNotEqual(ClassVar, Union) - self.assertNotEqual(Optional, Union) - self.assertNotEqual([None], Optional) - self.assertNotEqual(Optional, typing.Mapping) - self.assertNotEqual(Optional[typing.MutableMapping], Union) - - def test_optional(self): - o = Optional[int] - u = Union[int, None] - self.assertEqual(o, u) - - def test_empty(self): - with self.assertRaises(TypeError): - Union[()] - - def test_union_instance_type_error(self): - with self.assertRaises(TypeError): - isinstance(42, Union[int, str]) - - def test_no_eval_union(self): - u = Union[int, str] - def f(x: u): ... - self.assertIs(get_type_hints(f)['x'], u) - - def test_function_repr_union(self): - def fun() -> int: ... - self.assertEqual(repr(Union[fun, int]), 'typing.Union[fun, int]') - - def test_union_str_pattern(self): - # Shouldn't crash; see http://bugs.python.org/issue25390 - A = Union[str, Pattern] - A - - def test_etree(self): - # See https://github.com/python/typing/issues/229 - # (Only relevant for Python 2.) - try: - from xml.etree.cElementTree import Element - except ImportError: - raise SkipTest("cElementTree not found") - Union[Element, str] # Shouldn't crash - - def Elem(*args): - return Element(*args) - - Union[Elem, str] # Nor should this - - -class TupleTests(BaseTestCase): - - def test_basics(self): - with self.assertRaises(TypeError): - issubclass(Tuple, Tuple[int, str]) - with self.assertRaises(TypeError): - issubclass(tuple, Tuple[int, str]) - - class TP(tuple): ... - self.assertTrue(issubclass(tuple, Tuple)) - self.assertTrue(issubclass(TP, Tuple)) - - def test_equality(self): - self.assertEqual(Tuple[int], Tuple[int]) - self.assertEqual(Tuple[int, ...], Tuple[int, ...]) - self.assertNotEqual(Tuple[int], Tuple[int, int]) - self.assertNotEqual(Tuple[int], Tuple[int, ...]) - - def test_tuple_subclass(self): - class MyTuple(tuple): - pass - self.assertTrue(issubclass(MyTuple, Tuple)) - - def test_tuple_instance_type_error(self): - with self.assertRaises(TypeError): - isinstance((0, 0), Tuple[int, int]) - self.assertIsInstance((0, 0), Tuple) - - def test_repr(self): - self.assertEqual(repr(Tuple), 'typing.Tuple') - self.assertEqual(repr(Tuple[()]), 'typing.Tuple[()]') - self.assertEqual(repr(Tuple[int, float]), 'typing.Tuple[int, float]') - self.assertEqual(repr(Tuple[int, ...]), 'typing.Tuple[int, ...]') - - def test_errors(self): - with self.assertRaises(TypeError): - issubclass(42, Tuple) - with self.assertRaises(TypeError): - issubclass(42, Tuple[int]) - - -class CallableTests(BaseTestCase): - - def test_self_subclass(self): - with self.assertRaises(TypeError): - self.assertTrue(issubclass(type(lambda x: x), Callable[[int], int])) - self.assertTrue(issubclass(type(lambda x: x), Callable)) - - def test_eq_hash(self): - self.assertEqual(Callable[[int], int], Callable[[int], int]) - self.assertEqual(len({Callable[[int], int], Callable[[int], int]}), 1) - self.assertNotEqual(Callable[[int], int], Callable[[int], str]) - self.assertNotEqual(Callable[[int], int], Callable[[str], int]) - self.assertNotEqual(Callable[[int], int], Callable[[int, int], int]) - self.assertNotEqual(Callable[[int], int], Callable[[], int]) - self.assertNotEqual(Callable[[int], int], Callable) - - def test_cannot_instantiate(self): - with self.assertRaises(TypeError): - Callable() - with self.assertRaises(TypeError): - type(Callable)() - c = Callable[[int], str] - with self.assertRaises(TypeError): - c() - with self.assertRaises(TypeError): - type(c)() - - def test_callable_wrong_forms(self): - with self.assertRaises(TypeError): - Callable[[...], int] - with self.assertRaises(TypeError): - Callable[(), int] - with self.assertRaises(TypeError): - Callable[[()], int] - with self.assertRaises(TypeError): - Callable[[int, 1], 2] - with self.assertRaises(TypeError): - Callable[int] - - def test_callable_instance_works(self): - def f(): - pass - self.assertIsInstance(f, Callable) - self.assertNotIsInstance(None, Callable) - - def test_callable_instance_type_error(self): - def f(): - pass - with self.assertRaises(TypeError): - self.assertIsInstance(f, Callable[[], None]) - with self.assertRaises(TypeError): - self.assertIsInstance(f, Callable[[], Any]) - with self.assertRaises(TypeError): - self.assertNotIsInstance(None, Callable[[], None]) - with self.assertRaises(TypeError): - self.assertNotIsInstance(None, Callable[[], Any]) - - def test_repr(self): - ct0 = Callable[[], bool] - self.assertEqual(repr(ct0), 'typing.Callable[[], bool]') - ct2 = Callable[[str, float], int] - self.assertEqual(repr(ct2), 'typing.Callable[[str, float], int]') - ctv = Callable[..., str] - self.assertEqual(repr(ctv), 'typing.Callable[..., str]') - - def test_callable_with_ellipsis(self): - - def foo(a: Callable[..., T]): - pass - - self.assertEqual(get_type_hints(foo, globals(), locals()), - {'a': Callable[..., T]}) - - def test_ellipsis_in_generic(self): - # Shouldn't crash; see https://github.com/python/typing/issues/259 - typing.List[Callable[..., str]] - - -XK = TypeVar('XK', str, bytes) -XV = TypeVar('XV') - - -class SimpleMapping(Generic[XK, XV]): - - def __getitem__(self, key: XK) -> XV: - ... - - def __setitem__(self, key: XK, value: XV): - ... - - def get(self, key: XK, default: XV = None) -> XV: - ... - - -class MySimpleMapping(SimpleMapping[XK, XV]): - - def __init__(self): - self.store = {} - - def __getitem__(self, key: str): - return self.store[key] - - def __setitem__(self, key: str, value): - self.store[key] = value - - def get(self, key: str, default=None): - try: - return self.store[key] - except KeyError: - return default - - -class ProtocolTests(BaseTestCase): - - def test_supports_int(self): - self.assertIsSubclass(int, typing.SupportsInt) - self.assertNotIsSubclass(str, typing.SupportsInt) - - def test_supports_float(self): - self.assertIsSubclass(float, typing.SupportsFloat) - self.assertNotIsSubclass(str, typing.SupportsFloat) - - def test_supports_complex(self): - - # Note: complex itself doesn't have __complex__. - class C: - def __complex__(self): - return 0j - - self.assertIsSubclass(C, typing.SupportsComplex) - self.assertNotIsSubclass(str, typing.SupportsComplex) - - def test_supports_bytes(self): - - # Note: bytes itself doesn't have __bytes__. - class B: - def __bytes__(self): - return b'' - - self.assertIsSubclass(B, typing.SupportsBytes) - self.assertNotIsSubclass(str, typing.SupportsBytes) - - def test_supports_abs(self): - self.assertIsSubclass(float, typing.SupportsAbs) - self.assertIsSubclass(int, typing.SupportsAbs) - self.assertNotIsSubclass(str, typing.SupportsAbs) - - def test_supports_round(self): - issubclass(float, typing.SupportsRound) - self.assertIsSubclass(float, typing.SupportsRound) - self.assertIsSubclass(int, typing.SupportsRound) - self.assertNotIsSubclass(str, typing.SupportsRound) - - def test_reversible(self): - self.assertIsSubclass(list, typing.Reversible) - self.assertNotIsSubclass(int, typing.Reversible) - - def test_protocol_instance_type_error(self): - with self.assertRaises(TypeError): - isinstance(0, typing.SupportsAbs) - class C1(typing.SupportsInt): - def __int__(self) -> int: - return 42 - class C2(C1): - pass - c = C2() - self.assertIsInstance(c, C1) - - -class GenericTests(BaseTestCase): - - def test_basics(self): - X = SimpleMapping[str, Any] - self.assertEqual(X.__parameters__, ()) - with self.assertRaises(TypeError): - X[str] - with self.assertRaises(TypeError): - X[str, str] - Y = SimpleMapping[XK, str] - self.assertEqual(Y.__parameters__, (XK,)) - Y[str] - with self.assertRaises(TypeError): - Y[str, str] - self.assertIsSubclass(SimpleMapping[str, int], SimpleMapping) - - def test_generic_errors(self): - T = TypeVar('T') - S = TypeVar('S') - with self.assertRaises(TypeError): - Generic[T]() - with self.assertRaises(TypeError): - Generic[T][T] - with self.assertRaises(TypeError): - Generic[T][S] - with self.assertRaises(TypeError): - isinstance([], List[int]) - with self.assertRaises(TypeError): - issubclass(list, List[int]) - with self.assertRaises(TypeError): - class NewGeneric(Generic): ... - with self.assertRaises(TypeError): - class MyGeneric(Generic[T], Generic[S]): ... - with self.assertRaises(TypeError): - class MyGeneric(List[T], Generic[S]): ... - - def test_init(self): - T = TypeVar('T') - S = TypeVar('S') - with self.assertRaises(TypeError): - Generic[T, T] - with self.assertRaises(TypeError): - Generic[T, S, T] - - def test_repr(self): - self.assertEqual(repr(SimpleMapping), - __name__ + '.' + 'SimpleMapping') - self.assertEqual(repr(MySimpleMapping), - __name__ + '.' + 'MySimpleMapping') - - def test_chain_repr(self): - T = TypeVar('T') - S = TypeVar('S') - - class C(Generic[T]): - pass - - X = C[Tuple[S, T]] - self.assertEqual(X, C[Tuple[S, T]]) - self.assertNotEqual(X, C[Tuple[T, S]]) - - Y = X[T, int] - self.assertEqual(Y, X[T, int]) - self.assertNotEqual(Y, X[S, int]) - self.assertNotEqual(Y, X[T, str]) - - Z = Y[str] - self.assertEqual(Z, Y[str]) - self.assertNotEqual(Z, Y[int]) - self.assertNotEqual(Z, Y[T]) - - self.assertTrue(str(Z).endswith( - '.C[typing.Tuple[str, int]]')) - - def test_new_repr(self): - T = TypeVar('T') - U = TypeVar('U', covariant=True) - S = TypeVar('S') - - self.assertEqual(repr(List), 'typing.List') - self.assertEqual(repr(List[T]), 'typing.List[~T]') - self.assertEqual(repr(List[U]), 'typing.List[+U]') - self.assertEqual(repr(List[S][T][int]), 'typing.List[int]') - self.assertEqual(repr(List[int]), 'typing.List[int]') - - def test_new_repr_complex(self): - T = TypeVar('T') - TS = TypeVar('TS') - - self.assertEqual(repr(typing.Mapping[T, TS][TS, T]), 'typing.Mapping[~TS, ~T]') - self.assertEqual(repr(List[Tuple[T, TS]][int, T]), - 'typing.List[typing.Tuple[int, ~T]]') - self.assertEqual( - repr(List[Tuple[T, T]][List[int]]), - 'typing.List[typing.Tuple[typing.List[int], typing.List[int]]]' - ) - - def test_new_repr_bare(self): - T = TypeVar('T') - self.assertEqual(repr(Generic[T]), 'typing.Generic[~T]') - self.assertEqual(repr(typing._Protocol[T]), 'typing.Protocol[~T]') - class C(typing.Dict[Any, Any]): ... - # this line should just work - repr(C.__mro__) - - def test_dict(self): - T = TypeVar('T') - - class B(Generic[T]): - pass - - b = B() - b.foo = 42 - self.assertEqual(b.__dict__, {'foo': 42}) - - class C(B[int]): - pass - - c = C() - c.bar = 'abc' - self.assertEqual(c.__dict__, {'bar': 'abc'}) - - def test_subscripted_generics_as_proxies(self): - T = TypeVar('T') - class C(Generic[T]): - x = 'def' - self.assertEqual(C[int].x, 'def') - self.assertEqual(C[C[int]].x, 'def') - C[C[int]].x = 'changed' - self.assertEqual(C.x, 'changed') - self.assertEqual(C[str].x, 'changed') - C[List[str]].z = 'new' - self.assertEqual(C.z, 'new') - self.assertEqual(C[Tuple[int]].z, 'new') - - self.assertEqual(C().x, 'changed') - self.assertEqual(C[Tuple[str]]().z, 'new') - - class D(C[T]): - pass - self.assertEqual(D[int].x, 'changed') - self.assertEqual(D.z, 'new') - D.z = 'from derived z' - D[int].x = 'from derived x' - self.assertEqual(C.x, 'changed') - self.assertEqual(C[int].z, 'new') - self.assertEqual(D.x, 'from derived x') - self.assertEqual(D[str].z, 'from derived z') - - def test_abc_registry_kept(self): - T = TypeVar('T') - class C(Generic[T]): ... - C.register(int) - self.assertIsInstance(1, C) - C[int] - self.assertIsInstance(1, C) - - def test_false_subclasses(self): - class MyMapping(MutableMapping[str, str]): pass - self.assertNotIsInstance({}, MyMapping) - self.assertNotIsSubclass(dict, MyMapping) - - def test_abc_bases(self): - class MM(MutableMapping[str, str]): - def __getitem__(self, k): - return None - def __setitem__(self, k, v): - pass - def __delitem__(self, k): - pass - def __iter__(self): - return iter(()) - def __len__(self): - return 0 - # this should just work - MM().update() - self.assertIsInstance(MM(), collections_abc.MutableMapping) - self.assertIsInstance(MM(), MutableMapping) - self.assertNotIsInstance(MM(), List) - self.assertNotIsInstance({}, MM) - - def test_multiple_bases(self): - class MM1(MutableMapping[str, str], collections_abc.MutableMapping): - pass - with self.assertRaises(TypeError): - # consistent MRO not possible - class MM2(collections_abc.MutableMapping, MutableMapping[str, str]): - pass - - def test_orig_bases(self): - T = TypeVar('T') - class C(typing.Dict[str, T]): ... - self.assertEqual(C.__orig_bases__, (typing.Dict[str, T],)) - - def test_naive_runtime_checks(self): - def naive_dict_check(obj, tp): - # Check if a dictionary conforms to Dict type - if len(tp.__parameters__) > 0: - raise NotImplementedError - if tp.__args__: - KT, VT = tp.__args__ - return all( - isinstance(k, KT) and isinstance(v, VT) - for k, v in obj.items() - ) - self.assertTrue(naive_dict_check({'x': 1}, typing.Dict[str, int])) - self.assertFalse(naive_dict_check({1: 'x'}, typing.Dict[str, int])) - with self.assertRaises(NotImplementedError): - naive_dict_check({1: 'x'}, typing.Dict[str, T]) - - def naive_generic_check(obj, tp): - # Check if an instance conforms to the generic class - if not hasattr(obj, '__orig_class__'): - raise NotImplementedError - return obj.__orig_class__ == tp - class Node(Generic[T]): ... - self.assertTrue(naive_generic_check(Node[int](), Node[int])) - self.assertFalse(naive_generic_check(Node[str](), Node[int])) - self.assertFalse(naive_generic_check(Node[str](), List)) - with self.assertRaises(NotImplementedError): - naive_generic_check([1, 2, 3], Node[int]) - - def naive_list_base_check(obj, tp): - # Check if list conforms to a List subclass - return all(isinstance(x, tp.__orig_bases__[0].__args__[0]) - for x in obj) - class C(List[int]): ... - self.assertTrue(naive_list_base_check([1, 2, 3], C)) - self.assertFalse(naive_list_base_check(['a', 'b'], C)) - - def test_multi_subscr_base(self): - T = TypeVar('T') - U = TypeVar('U') - V = TypeVar('V') - class C(List[T][U][V]): ... - class D(C, List[T][U][V]): ... - self.assertEqual(C.__parameters__, (V,)) - self.assertEqual(D.__parameters__, (V,)) - self.assertEqual(C[int].__parameters__, ()) - self.assertEqual(D[int].__parameters__, ()) - self.assertEqual(C[int].__args__, (int,)) - self.assertEqual(D[int].__args__, (int,)) - self.assertEqual(C.__bases__, (List,)) - self.assertEqual(D.__bases__, (C, List)) - self.assertEqual(C.__orig_bases__, (List[T][U][V],)) - self.assertEqual(D.__orig_bases__, (C, List[T][U][V])) - - def test_subscript_meta(self): - T = TypeVar('T') - self.assertEqual(Type[GenericMeta], Type[GenericMeta]) - self.assertEqual(Union[T, int][GenericMeta], Union[GenericMeta, int]) - self.assertEqual(Callable[..., GenericMeta].__args__, (Ellipsis, GenericMeta)) - - def test_generic_hashes(self): - try: - from test import mod_generics_cache - except ImportError: # for Python 3.4 and previous versions - import mod_generics_cache - class A(Generic[T]): - ... - - class B(Generic[T]): - class A(Generic[T]): - ... - - self.assertEqual(A, A) - self.assertEqual(mod_generics_cache.A[str], mod_generics_cache.A[str]) - self.assertEqual(B.A, B.A) - self.assertEqual(mod_generics_cache.B.A[B.A[str]], - mod_generics_cache.B.A[B.A[str]]) - - self.assertNotEqual(A, B.A) - self.assertNotEqual(A, mod_generics_cache.A) - self.assertNotEqual(A, mod_generics_cache.B.A) - self.assertNotEqual(B.A, mod_generics_cache.A) - self.assertNotEqual(B.A, mod_generics_cache.B.A) - - self.assertNotEqual(A[str], B.A[str]) - self.assertNotEqual(A[List[Any]], B.A[List[Any]]) - self.assertNotEqual(A[str], mod_generics_cache.A[str]) - self.assertNotEqual(A[str], mod_generics_cache.B.A[str]) - self.assertNotEqual(B.A[int], mod_generics_cache.A[int]) - self.assertNotEqual(B.A[List[Any]], mod_generics_cache.B.A[List[Any]]) - - self.assertNotEqual(Tuple[A[str]], Tuple[B.A[str]]) - self.assertNotEqual(Tuple[A[List[Any]]], Tuple[B.A[List[Any]]]) - self.assertNotEqual(Union[str, A[str]], Union[str, mod_generics_cache.A[str]]) - self.assertNotEqual(Union[A[str], A[str]], - Union[A[str], mod_generics_cache.A[str]]) - self.assertNotEqual(typing.FrozenSet[A[str]], - typing.FrozenSet[mod_generics_cache.B.A[str]]) - - if sys.version_info[:2] > (3, 2): - self.assertTrue(repr(Tuple[A[str]]).endswith('.A[str]]')) - self.assertTrue(repr(Tuple[B.A[str]]).endswith('.B.A[str]]')) - self.assertTrue(repr(Tuple[mod_generics_cache.A[str]]) - .endswith('mod_generics_cache.A[str]]')) - self.assertTrue(repr(Tuple[mod_generics_cache.B.A[str]]) - .endswith('mod_generics_cache.B.A[str]]')) - - def test_extended_generic_rules_eq(self): - T = TypeVar('T') - U = TypeVar('U') - self.assertEqual(Tuple[T, T][int], Tuple[int, int]) - self.assertEqual(typing.Iterable[Tuple[T, T]][T], typing.Iterable[Tuple[T, T]]) - with self.assertRaises(TypeError): - Tuple[T, int][()] - with self.assertRaises(TypeError): - Tuple[T, U][T, ...] - - self.assertEqual(Union[T, int][int], int) - self.assertEqual(Union[T, U][int, Union[int, str]], Union[int, str]) - class Base: ... - class Derived(Base): ... - self.assertEqual(Union[T, Base][Derived], Base) - with self.assertRaises(TypeError): - Union[T, int][1] - - self.assertEqual(Callable[[T], T][KT], Callable[[KT], KT]) - self.assertEqual(Callable[..., List[T]][int], Callable[..., List[int]]) - with self.assertRaises(TypeError): - Callable[[T], U][..., int] - with self.assertRaises(TypeError): - Callable[[T], U][[], int] - - def test_extended_generic_rules_repr(self): - T = TypeVar('T') - self.assertEqual(repr(Union[Tuple, Callable]).replace('typing.', ''), - 'Union[Tuple, Callable]') - self.assertEqual(repr(Union[Tuple, Tuple[int]]).replace('typing.', ''), - 'Tuple') - self.assertEqual(repr(Callable[..., Optional[T]][int]).replace('typing.', ''), - 'Callable[..., Union[int, NoneType]]') - self.assertEqual(repr(Callable[[], List[T]][int]).replace('typing.', ''), - 'Callable[[], List[int]]') - - def test_generic_forward_ref(self): - def foobar(x: List[List['CC']]): ... - class CC: ... - self.assertEqual( - get_type_hints(foobar, globals(), locals()), - {'x': List[List[CC]]} - ) - T = TypeVar('T') - AT = Tuple[T, ...] - def barfoo(x: AT): ... - self.assertIs(get_type_hints(barfoo, globals(), locals())['x'], AT) - CT = Callable[..., List[T]] - def barfoo2(x: CT): ... - self.assertIs(get_type_hints(barfoo2, globals(), locals())['x'], CT) - - def test_extended_generic_rules_subclassing(self): - class T1(Tuple[T, KT]): ... - class T2(Tuple[T, ...]): ... - class C1(Callable[[T], T]): ... - class C2(Callable[..., int]): - def __call__(self): - return None - - self.assertEqual(T1.__parameters__, (T, KT)) - self.assertEqual(T1[int, str].__args__, (int, str)) - self.assertEqual(T1[int, T].__origin__, T1) - - self.assertEqual(T2.__parameters__, (T,)) - with self.assertRaises(TypeError): - T1[int] - with self.assertRaises(TypeError): - T2[int, str] - - self.assertEqual(repr(C1[int]).split('.')[-1], 'C1[int]') - self.assertEqual(C2.__parameters__, ()) - self.assertIsInstance(C2(), collections_abc.Callable) - self.assertIsSubclass(C2, collections_abc.Callable) - self.assertIsSubclass(C1, collections_abc.Callable) - self.assertIsInstance(T1(), tuple) - self.assertIsSubclass(T2, tuple) - self.assertIsSubclass(Tuple[int, ...], typing.Sequence) - self.assertIsSubclass(Tuple[int, ...], typing.Iterable) - - def test_fail_with_bare_union(self): - with self.assertRaises(TypeError): - List[Union] - with self.assertRaises(TypeError): - Tuple[Optional] - with self.assertRaises(TypeError): - ClassVar[ClassVar] - with self.assertRaises(TypeError): - List[ClassVar[int]] - - def test_fail_with_bare_generic(self): - T = TypeVar('T') - with self.assertRaises(TypeError): - List[Generic] - with self.assertRaises(TypeError): - Tuple[Generic[T]] - with self.assertRaises(TypeError): - List[typing._Protocol] - with self.assertRaises(TypeError): - isinstance(1, Generic) - - def test_type_erasure_special(self): - T = TypeVar('T') - # this is the only test that checks type caching - self.clear_caches() - class MyTup(Tuple[T, T]): ... - self.assertIs(MyTup[int]().__class__, MyTup) - self.assertIs(MyTup[int]().__orig_class__, MyTup[int]) - class MyCall(Callable[..., T]): - def __call__(self): return None - self.assertIs(MyCall[T]().__class__, MyCall) - self.assertIs(MyCall[T]().__orig_class__, MyCall[T]) - class MyDict(typing.Dict[T, T]): ... - self.assertIs(MyDict[int]().__class__, MyDict) - self.assertIs(MyDict[int]().__orig_class__, MyDict[int]) - class MyDef(typing.DefaultDict[str, T]): ... - self.assertIs(MyDef[int]().__class__, MyDef) - self.assertIs(MyDef[int]().__orig_class__, MyDef[int]) - # ChainMap was added in 3.3 - if sys.version_info >= (3, 3): - class MyChain(typing.ChainMap[str, T]): ... - self.assertIs(MyChain[int]().__class__, MyChain) - self.assertIs(MyChain[int]().__orig_class__, MyChain[int]) - - def test_all_repr_eq_any(self): - objs = (getattr(typing, el) for el in typing.__all__) - for obj in objs: - self.assertNotEqual(repr(obj), '') - self.assertEqual(obj, obj) - if getattr(obj, '__parameters__', None) and len(obj.__parameters__) == 1: - self.assertEqual(obj[Any].__args__, (Any,)) - if isinstance(obj, type): - for base in obj.__mro__: - self.assertNotEqual(repr(base), '') - self.assertEqual(base, base) - - def test_substitution_helper(self): - T = TypeVar('T') - KT = TypeVar('KT') - VT = TypeVar('VT') - class Map(Generic[KT, VT]): - def meth(self, k: KT, v: VT): ... - StrMap = Map[str, T] - obj = StrMap[int]() - - new_args = typing._subs_tree(obj.__orig_class__) - new_annots = {k: typing._replace_arg(v, type(obj).__parameters__, new_args) - for k, v in obj.meth.__annotations__.items()} - - self.assertEqual(new_annots, {'k': str, 'v': int}) - - def test_pickle(self): - global C # pickle wants to reference the class by name - T = TypeVar('T') - - class B(Generic[T]): - pass - - class C(B[int]): - pass - - c = C() - c.foo = 42 - c.bar = 'abc' - for proto in range(pickle.HIGHEST_PROTOCOL + 1): - z = pickle.dumps(c, proto) - x = pickle.loads(z) - self.assertEqual(x.foo, 42) - self.assertEqual(x.bar, 'abc') - self.assertEqual(x.__dict__, {'foo': 42, 'bar': 'abc'}) - simples = [Any, Union, Tuple, Callable, ClassVar, List, typing.Iterable] - for s in simples: - for proto in range(pickle.HIGHEST_PROTOCOL + 1): - z = pickle.dumps(s, proto) - x = pickle.loads(z) - self.assertEqual(s, x) - - def test_copy_and_deepcopy(self): - T = TypeVar('T') - class Node(Generic[T]): ... - things = [Union[T, int], Tuple[T, int], Callable[..., T], Callable[[int], int], - Tuple[Any, Any], Node[T], Node[int], Node[Any], typing.Iterable[T], - typing.Iterable[Any], typing.Iterable[int], typing.Dict[int, str], - typing.Dict[T, Any], ClassVar[int], ClassVar[List[T]], Tuple['T', 'T'], - Union['T', int], List['T'], typing.Mapping['T', int]] - for t in things + [Any]: - self.assertEqual(t, copy(t)) - self.assertEqual(t, deepcopy(t)) - - def test_weakref_all(self): - T = TypeVar('T') - things = [Any, Union[T, int], Callable[..., T], Tuple[Any, Any], - Optional[List[int]], typing.Mapping[int, str], - typing.re.Match[bytes], typing.Iterable['whatever']] - for t in things: - self.assertEqual(weakref.ref(t)(), t) - - def test_parameterized_slots(self): - T = TypeVar('T') - class C(Generic[T]): - __slots__ = ('potato',) - - c = C() - c_int = C[int]() - self.assertEqual(C.__slots__, C[str].__slots__) - - c.potato = 0 - c_int.potato = 0 - with self.assertRaises(AttributeError): - c.tomato = 0 - with self.assertRaises(AttributeError): - c_int.tomato = 0 - - def foo(x: C['C']): ... - self.assertEqual(get_type_hints(foo, globals(), locals())['x'], C[C]) - self.assertEqual(get_type_hints(foo, globals(), locals())['x'].__slots__, - C.__slots__) - self.assertEqual(copy(C[int]), deepcopy(C[int])) - - def test_parameterized_slots_dict(self): - T = TypeVar('T') - class D(Generic[T]): - __slots__ = {'banana': 42} - - d = D() - d_int = D[int]() - self.assertEqual(D.__slots__, D[str].__slots__) - - d.banana = 'yes' - d_int.banana = 'yes' - with self.assertRaises(AttributeError): - d.foobar = 'no' - with self.assertRaises(AttributeError): - d_int.foobar = 'no' - - def test_errors(self): - with self.assertRaises(TypeError): - B = SimpleMapping[XK, Any] - - class C(Generic[B]): - pass - - def test_repr_2(self): - PY32 = sys.version_info[:2] < (3, 3) - - class C(Generic[T]): - pass - - self.assertEqual(C.__module__, __name__) - if not PY32: - self.assertEqual(C.__qualname__, - 'GenericTests.test_repr_2..C') - self.assertEqual(repr(C).split('.')[-1], 'C') - X = C[int] - self.assertEqual(X.__module__, __name__) - if not PY32: - self.assertTrue(X.__qualname__.endswith('..C')) - self.assertEqual(repr(X).split('.')[-1], 'C[int]') - - class Y(C[int]): - pass - - self.assertEqual(Y.__module__, __name__) - if not PY32: - self.assertEqual(Y.__qualname__, - 'GenericTests.test_repr_2..Y') - self.assertEqual(repr(Y).split('.')[-1], 'Y') - - def test_eq_1(self): - self.assertEqual(Generic, Generic) - self.assertEqual(Generic[T], Generic[T]) - self.assertNotEqual(Generic[KT], Generic[VT]) - - def test_eq_2(self): - - class A(Generic[T]): - pass - - class B(Generic[T]): - pass - - self.assertEqual(A, A) - self.assertNotEqual(A, B) - self.assertEqual(A[T], A[T]) - self.assertNotEqual(A[T], B[T]) - - def test_multiple_inheritance(self): - - class A(Generic[T, VT]): - pass - - class B(Generic[KT, T]): - pass - - class C(A[T, VT], Generic[VT, T, KT], B[KT, T]): - pass - - self.assertEqual(C.__parameters__, (VT, T, KT)) - - def test_nested(self): - - G = Generic - - class Visitor(G[T]): - - a = None - - def set(self, a: T): - self.a = a - - def get(self): - return self.a - - def visit(self) -> T: - return self.a - - V = Visitor[typing.List[int]] - - class IntListVisitor(V): - - def append(self, x: int): - self.a.append(x) - - a = IntListVisitor() - a.set([]) - a.append(1) - a.append(42) - self.assertEqual(a.get(), [1, 42]) - - def test_type_erasure(self): - T = TypeVar('T') - - class Node(Generic[T]): - def __init__(self, label: T, - left: 'Node[T]' = None, - right: 'Node[T]' = None): - self.label = label # type: T - self.left = left # type: Optional[Node[T]] - self.right = right # type: Optional[Node[T]] - - def foo(x: T): - a = Node(x) - b = Node[T](x) - c = Node[Any](x) - self.assertIs(type(a), Node) - self.assertIs(type(b), Node) - self.assertIs(type(c), Node) - self.assertEqual(a.label, x) - self.assertEqual(b.label, x) - self.assertEqual(c.label, x) - - foo(42) - - def test_implicit_any(self): - T = TypeVar('T') - - class C(Generic[T]): - pass - - class D(C): - pass - - self.assertEqual(D.__parameters__, ()) - - with self.assertRaises(Exception): - D[int] - with self.assertRaises(Exception): - D[Any] - with self.assertRaises(Exception): - D[T] - - -class ClassVarTests(BaseTestCase): - - def test_basics(self): - with self.assertRaises(TypeError): - ClassVar[1] - with self.assertRaises(TypeError): - ClassVar[int, str] - with self.assertRaises(TypeError): - ClassVar[int][str] - - def test_repr(self): - self.assertEqual(repr(ClassVar), 'typing.ClassVar') - cv = ClassVar[int] - self.assertEqual(repr(cv), 'typing.ClassVar[int]') - cv = ClassVar[Employee] - self.assertEqual(repr(cv), 'typing.ClassVar[%s.Employee]' % __name__) - - def test_cannot_subclass(self): - with self.assertRaises(TypeError): - class C(type(ClassVar)): - pass - with self.assertRaises(TypeError): - class C(type(ClassVar[int])): - pass - - def test_cannot_init(self): - with self.assertRaises(TypeError): - ClassVar() - with self.assertRaises(TypeError): - type(ClassVar)() - with self.assertRaises(TypeError): - type(ClassVar[Optional[int]])() - - def test_no_isinstance(self): - with self.assertRaises(TypeError): - isinstance(1, ClassVar[int]) - with self.assertRaises(TypeError): - issubclass(int, ClassVar) - - -class CastTests(BaseTestCase): - - def test_basics(self): - self.assertEqual(cast(int, 42), 42) - self.assertEqual(cast(float, 42), 42) - self.assertIs(type(cast(float, 42)), int) - self.assertEqual(cast(Any, 42), 42) - self.assertEqual(cast(list, 42), 42) - self.assertEqual(cast(Union[str, float], 42), 42) - self.assertEqual(cast(AnyStr, 42), 42) - self.assertEqual(cast(None, 42), 42) - - def test_errors(self): - # Bogus calls are not expected to fail. - cast(42, 42) - cast('hello', 42) - - -class ForwardRefTests(BaseTestCase): - - def test_basics(self): - - class Node(Generic[T]): - - def __init__(self, label: T): - self.label = label - self.left = self.right = None - - def add_both(self, - left: 'Optional[Node[T]]', - right: 'Node[T]' = None, - stuff: int = None, - blah=None): - self.left = left - self.right = right - - def add_left(self, node: Optional['Node[T]']): - self.add_both(node, None) - - def add_right(self, node: 'Node[T]' = None): - self.add_both(None, node) - - t = Node[int] - both_hints = get_type_hints(t.add_both, globals(), locals()) - self.assertEqual(both_hints['left'], Optional[Node[T]]) - self.assertEqual(both_hints['right'], Optional[Node[T]]) - self.assertEqual(both_hints['left'], both_hints['right']) - self.assertEqual(both_hints['stuff'], Optional[int]) - self.assertNotIn('blah', both_hints) - - left_hints = get_type_hints(t.add_left, globals(), locals()) - self.assertEqual(left_hints['node'], Optional[Node[T]]) - - right_hints = get_type_hints(t.add_right, globals(), locals()) - self.assertEqual(right_hints['node'], Optional[Node[T]]) - - def test_forwardref_instance_type_error(self): - fr = typing._ForwardRef('int') - with self.assertRaises(TypeError): - isinstance(42, fr) - - def test_forwardref_subclass_type_error(self): - fr = typing._ForwardRef('int') - with self.assertRaises(TypeError): - issubclass(int, fr) - - def test_forward_equality(self): - fr = typing._ForwardRef('int') - self.assertEqual(fr, typing._ForwardRef('int')) - self.assertNotEqual(List['int'], List[int]) - - def test_forward_repr(self): - self.assertEqual(repr(List['int']), "typing.List[_ForwardRef('int')]") - - def test_union_forward(self): - - def foo(a: Union['T']): - pass - - self.assertEqual(get_type_hints(foo, globals(), locals()), - {'a': Union[T]}) - - def test_tuple_forward(self): - - def foo(a: Tuple['T']): - pass - - self.assertEqual(get_type_hints(foo, globals(), locals()), - {'a': Tuple[T]}) - - def test_callable_forward(self): - - def foo(a: Callable[['T'], 'T']): - pass - - self.assertEqual(get_type_hints(foo, globals(), locals()), - {'a': Callable[[T], T]}) - - def test_callable_with_ellipsis_forward(self): - - def foo(a: 'Callable[..., T]'): - pass - - self.assertEqual(get_type_hints(foo, globals(), locals()), - {'a': Callable[..., T]}) - - def test_syntax_error(self): - - with self.assertRaises(SyntaxError): - Generic['/T'] - - def test_delayed_syntax_error(self): - - def foo(a: 'Node[T'): - pass - - with self.assertRaises(SyntaxError): - get_type_hints(foo) - - def test_type_error(self): - - def foo(a: Tuple['42']): - pass - - with self.assertRaises(TypeError): - get_type_hints(foo) - - def test_name_error(self): - - def foo(a: 'Noode[T]'): - pass - - with self.assertRaises(NameError): - get_type_hints(foo, locals()) - - def test_no_type_check(self): - - @no_type_check - def foo(a: 'whatevers') -> {}: - pass - - th = get_type_hints(foo) - self.assertEqual(th, {}) - - def test_no_type_check_class(self): - - @no_type_check - class C: - def foo(a: 'whatevers') -> {}: - pass - - cth = get_type_hints(C.foo) - self.assertEqual(cth, {}) - ith = get_type_hints(C().foo) - self.assertEqual(ith, {}) - - def test_no_type_check_no_bases(self): - class C: - def meth(self, x: int): ... - @no_type_check - class D(C): - c = C - # verify that @no_type_check never affects bases - self.assertEqual(get_type_hints(C.meth), {'x': int}) - - def test_meta_no_type_check(self): - - @no_type_check_decorator - def magic_decorator(deco): - return deco - - self.assertEqual(magic_decorator.__name__, 'magic_decorator') - - @magic_decorator - def foo(a: 'whatevers') -> {}: - pass - - @magic_decorator - class C: - def foo(a: 'whatevers') -> {}: - pass - - self.assertEqual(foo.__name__, 'foo') - th = get_type_hints(foo) - self.assertEqual(th, {}) - cth = get_type_hints(C.foo) - self.assertEqual(cth, {}) - ith = get_type_hints(C().foo) - self.assertEqual(ith, {}) - - def test_default_globals(self): - code = ("class C:\n" - " def foo(self, a: 'C') -> 'D': pass\n" - "class D:\n" - " def bar(self, b: 'D') -> C: pass\n" - ) - ns = {} - exec(code, ns) - hints = get_type_hints(ns['C'].foo) - self.assertEqual(hints, {'a': ns['C'], 'return': ns['D']}) - - -class OverloadTests(BaseTestCase): - - def test_overload_fails(self): - from typing import overload - - with self.assertRaises(RuntimeError): - - @overload - def blah(): - pass - - blah() - - def test_overload_succeeds(self): - from typing import overload - - @overload - def blah(): - pass - - def blah(): - pass - - blah() - - -ASYNCIO = sys.version_info[:2] >= (3, 5) - -ASYNCIO_TESTS = """ -import asyncio - -T_a = TypeVar('T_a') - -class AwaitableWrapper(typing.Awaitable[T_a]): - - def __init__(self, value): - self.value = value - - def __await__(self) -> typing.Iterator[T_a]: - yield - return self.value - -class AsyncIteratorWrapper(typing.AsyncIterator[T_a]): - - def __init__(self, value: typing.Iterable[T_a]): - self.value = value - - def __aiter__(self) -> typing.AsyncIterator[T_a]: - return self - - @asyncio.coroutine - def __anext__(self) -> T_a: - data = yield from self.value - if data: - return data - else: - raise StopAsyncIteration -""" - -if ASYNCIO: - try: - exec(ASYNCIO_TESTS) - except ImportError: - ASYNCIO = False -else: - # fake names for the sake of static analysis - asyncio = None - AwaitableWrapper = AsyncIteratorWrapper = object - -PY36 = sys.version_info[:2] >= (3, 6) - -PY36_TESTS = """ -from test import ann_module, ann_module2, ann_module3 - -class A: - y: float -class B(A): - x: ClassVar[Optional['B']] = None - y: int - b: int -class CSub(B): - z: ClassVar['CSub'] = B() -class G(Generic[T]): - lst: ClassVar[List[T]] = [] - -class NoneAndForward: - parent: 'NoneAndForward' - meaning: None - -class CoolEmployee(NamedTuple): - name: str - cool: int - -class CoolEmployeeWithDefault(NamedTuple): - name: str - cool: int = 0 - -class XMeth(NamedTuple): - x: int - def double(self): - return 2 * self.x - -class XRepr(NamedTuple): - x: int - y: int = 1 - def __str__(self): - return f'{self.x} -> {self.y}' - def __add__(self, other): - return 0 -""" - -if PY36: - exec(PY36_TESTS) -else: - # fake names for the sake of static analysis - ann_module = ann_module2 = ann_module3 = None - A = B = CSub = G = CoolEmployee = CoolEmployeeWithDefault = object - XMeth = XRepr = NoneAndForward = object - -gth = get_type_hints - - -class GetTypeHintTests(BaseTestCase): - def test_get_type_hints_from_various_objects(self): - # For invalid objects should fail with TypeError (not AttributeError etc). - with self.assertRaises(TypeError): - gth(123) - with self.assertRaises(TypeError): - gth('abc') - with self.assertRaises(TypeError): - gth(None) - - @skipUnless(PY36, 'Python 3.6 required') - def test_get_type_hints_modules(self): - ann_module_type_hints = {1: 2, 'f': Tuple[int, int], 'x': int, 'y': str} - self.assertEqual(gth(ann_module), ann_module_type_hints) - self.assertEqual(gth(ann_module2), {}) - self.assertEqual(gth(ann_module3), {}) - - @skipUnless(PY36, 'Python 3.6 required') - def test_get_type_hints_classes(self): - self.assertEqual(gth(ann_module.C, ann_module.__dict__), - {'y': Optional[ann_module.C]}) - self.assertIsInstance(gth(ann_module.j_class), dict) - self.assertEqual(gth(ann_module.M), {'123': 123, 'o': type}) - self.assertEqual(gth(ann_module.D), - {'j': str, 'k': str, 'y': Optional[ann_module.C]}) - self.assertEqual(gth(ann_module.Y), {'z': int}) - self.assertEqual(gth(ann_module.h_class), - {'y': Optional[ann_module.C]}) - self.assertEqual(gth(ann_module.S), {'x': str, 'y': str}) - self.assertEqual(gth(ann_module.foo), {'x': int}) - self.assertEqual(gth(NoneAndForward, globals()), - {'parent': NoneAndForward, 'meaning': type(None)}) - - @skipUnless(PY36, 'Python 3.6 required') - def test_respect_no_type_check(self): - @no_type_check - class NoTpCheck: - class Inn: - def __init__(self, x: 'not a type'): ... - self.assertTrue(NoTpCheck.__no_type_check__) - self.assertTrue(NoTpCheck.Inn.__init__.__no_type_check__) - self.assertEqual(gth(ann_module2.NTC.meth), {}) - class ABase(Generic[T]): - def meth(x: int): ... - @no_type_check - class Der(ABase): ... - self.assertEqual(gth(ABase.meth), {'x': int}) - - def test_get_type_hints_for_builtins(self): - # Should not fail for built-in classes and functions. - self.assertEqual(gth(int), {}) - self.assertEqual(gth(type), {}) - self.assertEqual(gth(dir), {}) - self.assertEqual(gth(len), {}) - self.assertEqual(gth(object.__str__), {}) - self.assertEqual(gth(object().__str__), {}) - self.assertEqual(gth(str.join), {}) - - def test_previous_behavior(self): - def testf(x, y): ... - testf.__annotations__['x'] = 'int' - self.assertEqual(gth(testf), {'x': int}) - def testg(x: None): ... - self.assertEqual(gth(testg), {'x': type(None)}) - - def test_get_type_hints_for_object_with_annotations(self): - class A: ... - class B: ... - b = B() - b.__annotations__ = {'x': 'A'} - self.assertEqual(gth(b, locals()), {'x': A}) - - @skipUnless(PY36, 'Python 3.6 required') - def test_get_type_hints_ClassVar(self): - self.assertEqual(gth(ann_module2.CV, ann_module2.__dict__), - {'var': typing.ClassVar[ann_module2.CV]}) - self.assertEqual(gth(B, globals()), - {'y': int, 'x': ClassVar[Optional[B]], 'b': int}) - self.assertEqual(gth(CSub, globals()), - {'z': ClassVar[CSub], 'y': int, 'b': int, - 'x': ClassVar[Optional[B]]}) - self.assertEqual(gth(G), {'lst': ClassVar[List[T]]}) - - -class CollectionsAbcTests(BaseTestCase): - - def test_hashable(self): - self.assertIsInstance(42, typing.Hashable) - self.assertNotIsInstance([], typing.Hashable) - - def test_iterable(self): - self.assertIsInstance([], typing.Iterable) - # Due to ABC caching, the second time takes a separate code - # path and could fail. So call this a few times. - self.assertIsInstance([], typing.Iterable) - self.assertIsInstance([], typing.Iterable) - self.assertNotIsInstance(42, typing.Iterable) - # Just in case, also test issubclass() a few times. - self.assertIsSubclass(list, typing.Iterable) - self.assertIsSubclass(list, typing.Iterable) - - def test_iterator(self): - it = iter([]) - self.assertIsInstance(it, typing.Iterator) - self.assertNotIsInstance(42, typing.Iterator) - - @skipUnless(ASYNCIO, 'Python 3.5 and multithreading required') - def test_awaitable(self): - ns = {} - exec( - "async def foo() -> typing.Awaitable[int]:\n" - " return await AwaitableWrapper(42)\n", - globals(), ns) - foo = ns['foo'] - g = foo() - self.assertIsInstance(g, typing.Awaitable) - self.assertNotIsInstance(foo, typing.Awaitable) - g.send(None) # Run foo() till completion, to avoid warning. - - @skipUnless(ASYNCIO, 'Python 3.5 and multithreading required') - def test_coroutine(self): - ns = {} - exec( - "async def foo():\n" - " return\n", - globals(), ns) - foo = ns['foo'] - g = foo() - self.assertIsInstance(g, typing.Coroutine) - with self.assertRaises(TypeError): - isinstance(g, typing.Coroutine[int]) - self.assertNotIsInstance(foo, typing.Coroutine) - try: - g.send(None) - except StopIteration: - pass - - @skipUnless(ASYNCIO, 'Python 3.5 and multithreading required') - def test_async_iterable(self): - base_it = range(10) # type: Iterator[int] - it = AsyncIteratorWrapper(base_it) - self.assertIsInstance(it, typing.AsyncIterable) - self.assertIsInstance(it, typing.AsyncIterable) - self.assertNotIsInstance(42, typing.AsyncIterable) - - @skipUnless(ASYNCIO, 'Python 3.5 and multithreading required') - def test_async_iterator(self): - base_it = range(10) # type: Iterator[int] - it = AsyncIteratorWrapper(base_it) - self.assertIsInstance(it, typing.AsyncIterator) - self.assertNotIsInstance(42, typing.AsyncIterator) - - def test_sized(self): - self.assertIsInstance([], typing.Sized) - self.assertNotIsInstance(42, typing.Sized) - - def test_container(self): - self.assertIsInstance([], typing.Container) - self.assertNotIsInstance(42, typing.Container) - - def test_collection(self): - if hasattr(typing, 'Collection'): - self.assertIsInstance(tuple(), typing.Collection) - self.assertIsInstance(frozenset(), typing.Collection) - self.assertIsSubclass(dict, typing.Collection) - self.assertNotIsInstance(42, typing.Collection) - - def test_abstractset(self): - self.assertIsInstance(set(), typing.AbstractSet) - self.assertNotIsInstance(42, typing.AbstractSet) - - def test_mutableset(self): - self.assertIsInstance(set(), typing.MutableSet) - self.assertNotIsInstance(frozenset(), typing.MutableSet) - - def test_mapping(self): - self.assertIsInstance({}, typing.Mapping) - self.assertNotIsInstance(42, typing.Mapping) - - def test_mutablemapping(self): - self.assertIsInstance({}, typing.MutableMapping) - self.assertNotIsInstance(42, typing.MutableMapping) - - def test_sequence(self): - self.assertIsInstance([], typing.Sequence) - self.assertNotIsInstance(42, typing.Sequence) - - def test_mutablesequence(self): - self.assertIsInstance([], typing.MutableSequence) - self.assertNotIsInstance((), typing.MutableSequence) - - def test_bytestring(self): - self.assertIsInstance(b'', typing.ByteString) - self.assertIsInstance(bytearray(b''), typing.ByteString) - - def test_list(self): - self.assertIsSubclass(list, typing.List) - - def test_deque(self): - self.assertIsSubclass(collections.deque, typing.Deque) - class MyDeque(typing.Deque[int]): ... - self.assertIsInstance(MyDeque(), collections.deque) - - def test_counter(self): - self.assertIsSubclass(collections.Counter, typing.Counter) - - def test_set(self): - self.assertIsSubclass(set, typing.Set) - self.assertNotIsSubclass(frozenset, typing.Set) - - def test_frozenset(self): - self.assertIsSubclass(frozenset, typing.FrozenSet) - self.assertNotIsSubclass(set, typing.FrozenSet) - - def test_dict(self): - self.assertIsSubclass(dict, typing.Dict) - - def test_no_list_instantiation(self): - with self.assertRaises(TypeError): - typing.List() - with self.assertRaises(TypeError): - typing.List[T]() - with self.assertRaises(TypeError): - typing.List[int]() - - def test_list_subclass(self): - - class MyList(typing.List[int]): - pass - - a = MyList() - self.assertIsInstance(a, MyList) - self.assertIsInstance(a, typing.Sequence) - - self.assertIsSubclass(MyList, list) - self.assertNotIsSubclass(list, MyList) - - def test_no_dict_instantiation(self): - with self.assertRaises(TypeError): - typing.Dict() - with self.assertRaises(TypeError): - typing.Dict[KT, VT]() - with self.assertRaises(TypeError): - typing.Dict[str, int]() - - def test_dict_subclass(self): - - class MyDict(typing.Dict[str, int]): - pass - - d = MyDict() - self.assertIsInstance(d, MyDict) - self.assertIsInstance(d, typing.MutableMapping) - - self.assertIsSubclass(MyDict, dict) - self.assertNotIsSubclass(dict, MyDict) - - def test_defaultdict_instantiation(self): - self.assertIs(type(typing.DefaultDict()), collections.defaultdict) - self.assertIs(type(typing.DefaultDict[KT, VT]()), collections.defaultdict) - self.assertIs(type(typing.DefaultDict[str, int]()), collections.defaultdict) - - def test_defaultdict_subclass(self): - - class MyDefDict(typing.DefaultDict[str, int]): - pass - - dd = MyDefDict() - self.assertIsInstance(dd, MyDefDict) - - self.assertIsSubclass(MyDefDict, collections.defaultdict) - self.assertNotIsSubclass(collections.defaultdict, MyDefDict) - - @skipUnless(sys.version_info >= (3, 3), 'ChainMap was added in 3.3') - def test_chainmap_instantiation(self): - self.assertIs(type(typing.ChainMap()), collections.ChainMap) - self.assertIs(type(typing.ChainMap[KT, VT]()), collections.ChainMap) - self.assertIs(type(typing.ChainMap[str, int]()), collections.ChainMap) - class CM(typing.ChainMap[KT, VT]): ... - self.assertIs(type(CM[int, str]()), CM) - - @skipUnless(sys.version_info >= (3, 3), 'ChainMap was added in 3.3') - def test_chainmap_subclass(self): - - class MyChainMap(typing.ChainMap[str, int]): - pass - - cm = MyChainMap() - self.assertIsInstance(cm, MyChainMap) - - self.assertIsSubclass(MyChainMap, collections.ChainMap) - self.assertNotIsSubclass(collections.ChainMap, MyChainMap) - - def test_deque_instantiation(self): - self.assertIs(type(typing.Deque()), collections.deque) - self.assertIs(type(typing.Deque[T]()), collections.deque) - self.assertIs(type(typing.Deque[int]()), collections.deque) - class D(typing.Deque[T]): ... - self.assertIs(type(D[int]()), D) - - def test_counter_instantiation(self): - self.assertIs(type(typing.Counter()), collections.Counter) - self.assertIs(type(typing.Counter[T]()), collections.Counter) - self.assertIs(type(typing.Counter[int]()), collections.Counter) - class C(typing.Counter[T]): ... - self.assertIs(type(C[int]()), C) - - def test_counter_subclass_instantiation(self): - - class MyCounter(typing.Counter[int]): - pass - - d = MyCounter() - self.assertIsInstance(d, MyCounter) - self.assertIsInstance(d, typing.Counter) - self.assertIsInstance(d, collections.Counter) - - def test_no_set_instantiation(self): - with self.assertRaises(TypeError): - typing.Set() - with self.assertRaises(TypeError): - typing.Set[T]() - with self.assertRaises(TypeError): - typing.Set[int]() - - def test_set_subclass_instantiation(self): - - class MySet(typing.Set[int]): - pass - - d = MySet() - self.assertIsInstance(d, MySet) - - def test_no_frozenset_instantiation(self): - with self.assertRaises(TypeError): - typing.FrozenSet() - with self.assertRaises(TypeError): - typing.FrozenSet[T]() - with self.assertRaises(TypeError): - typing.FrozenSet[int]() - - def test_frozenset_subclass_instantiation(self): - - class MyFrozenSet(typing.FrozenSet[int]): - pass - - d = MyFrozenSet() - self.assertIsInstance(d, MyFrozenSet) - - def test_no_tuple_instantiation(self): - with self.assertRaises(TypeError): - Tuple() - with self.assertRaises(TypeError): - Tuple[T]() - with self.assertRaises(TypeError): - Tuple[int]() - - def test_generator(self): - def foo(): - yield 42 - g = foo() - self.assertIsSubclass(type(g), typing.Generator) - - def test_no_generator_instantiation(self): - with self.assertRaises(TypeError): - typing.Generator() - with self.assertRaises(TypeError): - typing.Generator[T, T, T]() - with self.assertRaises(TypeError): - typing.Generator[int, int, int]() - - @skipUnless(PY36, 'Python 3.6 required') - def test_async_generator(self): - ns = {} - exec("async def f():\n" - " yield 42\n", globals(), ns) - g = ns['f']() - self.assertIsSubclass(type(g), typing.AsyncGenerator) - - @skipUnless(PY36, 'Python 3.6 required') - def test_no_async_generator_instantiation(self): - with self.assertRaises(TypeError): - typing.AsyncGenerator() - with self.assertRaises(TypeError): - typing.AsyncGenerator[T, T]() - with self.assertRaises(TypeError): - typing.AsyncGenerator[int, int]() - - def test_subclassing(self): - - class MMA(typing.MutableMapping): - pass - - with self.assertRaises(TypeError): # It's abstract - MMA() - - class MMC(MMA): - def __getitem__(self, k): - return None - def __setitem__(self, k, v): - pass - def __delitem__(self, k): - pass - def __iter__(self): - return iter(()) - def __len__(self): - return 0 - - self.assertEqual(len(MMC()), 0) - assert callable(MMC.update) - self.assertIsInstance(MMC(), typing.Mapping) - - class MMB(typing.MutableMapping[KT, VT]): - def __getitem__(self, k): - return None - def __setitem__(self, k, v): - pass - def __delitem__(self, k): - pass - def __iter__(self): - return iter(()) - def __len__(self): - return 0 - - self.assertEqual(len(MMB()), 0) - self.assertEqual(len(MMB[str, str]()), 0) - self.assertEqual(len(MMB[KT, VT]()), 0) - - self.assertNotIsSubclass(dict, MMA) - self.assertNotIsSubclass(dict, MMB) - - self.assertIsSubclass(MMA, typing.Mapping) - self.assertIsSubclass(MMB, typing.Mapping) - self.assertIsSubclass(MMC, typing.Mapping) - - self.assertIsInstance(MMB[KT, VT](), typing.Mapping) - self.assertIsInstance(MMB[KT, VT](), collections.Mapping) - - self.assertIsSubclass(MMA, collections.Mapping) - self.assertIsSubclass(MMB, collections.Mapping) - self.assertIsSubclass(MMC, collections.Mapping) - - self.assertIsSubclass(MMB[str, str], typing.Mapping) - self.assertIsSubclass(MMC, MMA) - - class I(typing.Iterable): ... - self.assertNotIsSubclass(list, I) - - class G(typing.Generator[int, int, int]): ... - def g(): yield 0 - self.assertIsSubclass(G, typing.Generator) - self.assertIsSubclass(G, typing.Iterable) - if hasattr(collections, 'Generator'): - self.assertIsSubclass(G, collections.Generator) - self.assertIsSubclass(G, collections.Iterable) - self.assertNotIsSubclass(type(g), G) - - @skipUnless(PY36, 'Python 3.6 required') - def test_subclassing_async_generator(self): - class G(typing.AsyncGenerator[int, int]): - def asend(self, value): - pass - def athrow(self, typ, val=None, tb=None): - pass - - ns = {} - exec('async def g(): yield 0', globals(), ns) - g = ns['g'] - self.assertIsSubclass(G, typing.AsyncGenerator) - self.assertIsSubclass(G, typing.AsyncIterable) - self.assertIsSubclass(G, collections.AsyncGenerator) - self.assertIsSubclass(G, collections.AsyncIterable) - self.assertNotIsSubclass(type(g), G) - - instance = G() - self.assertIsInstance(instance, typing.AsyncGenerator) - self.assertIsInstance(instance, typing.AsyncIterable) - self.assertIsInstance(instance, collections.AsyncGenerator) - self.assertIsInstance(instance, collections.AsyncIterable) - self.assertNotIsInstance(type(g), G) - self.assertNotIsInstance(g, G) - - def test_subclassing_subclasshook(self): - - class Base(typing.Iterable): - @classmethod - def __subclasshook__(cls, other): - if other.__name__ == 'Foo': - return True - else: - return False - - class C(Base): ... - class Foo: ... - class Bar: ... - self.assertIsSubclass(Foo, Base) - self.assertIsSubclass(Foo, C) - self.assertNotIsSubclass(Bar, C) - - def test_subclassing_register(self): - - class A(typing.Container): ... - class B(A): ... - - class C: ... - A.register(C) - self.assertIsSubclass(C, A) - self.assertNotIsSubclass(C, B) - - class D: ... - B.register(D) - self.assertIsSubclass(D, A) - self.assertIsSubclass(D, B) - - class M(): ... - collections.MutableMapping.register(M) - self.assertIsSubclass(M, typing.Mapping) - - def test_collections_as_base(self): - - class M(collections.Mapping): ... - self.assertIsSubclass(M, typing.Mapping) - self.assertIsSubclass(M, typing.Iterable) - - class S(collections.MutableSequence): ... - self.assertIsSubclass(S, typing.MutableSequence) - self.assertIsSubclass(S, typing.Iterable) - - class I(collections.Iterable): ... - self.assertIsSubclass(I, typing.Iterable) - - class A(collections.Mapping, metaclass=abc.ABCMeta): ... - class B: ... - A.register(B) - self.assertIsSubclass(B, typing.Mapping) - - -class OtherABCTests(BaseTestCase): - - @skipUnless(hasattr(typing, 'ContextManager'), - 'requires typing.ContextManager') - def test_contextmanager(self): - @contextlib.contextmanager - def manager(): - yield 42 - - cm = manager() - self.assertIsInstance(cm, typing.ContextManager) - self.assertNotIsInstance(42, typing.ContextManager) - - -class TypeTests(BaseTestCase): - - def test_type_basic(self): - - class User: pass - class BasicUser(User): pass - class ProUser(User): pass - - def new_user(user_class: Type[User]) -> User: - return user_class() - - new_user(BasicUser) - - def test_type_typevar(self): - - class User: pass - class BasicUser(User): pass - class ProUser(User): pass - - U = TypeVar('U', bound=User) - - def new_user(user_class: Type[U]) -> U: - return user_class() - - new_user(BasicUser) - - def test_type_optional(self): - A = Optional[Type[BaseException]] - - def foo(a: A) -> Optional[BaseException]: - if a is None: - return None - else: - return a() - - assert isinstance(foo(KeyboardInterrupt), KeyboardInterrupt) - assert foo(None) is None - - -class NewTypeTests(BaseTestCase): - - def test_basic(self): - UserId = NewType('UserId', int) - UserName = NewType('UserName', str) - self.assertIsInstance(UserId(5), int) - self.assertIsInstance(UserName('Joe'), str) - self.assertEqual(UserId(5) + 1, 6) - - def test_errors(self): - UserId = NewType('UserId', int) - UserName = NewType('UserName', str) - with self.assertRaises(TypeError): - issubclass(UserId, int) - with self.assertRaises(TypeError): - class D(UserName): - pass - - -class NamedTupleTests(BaseTestCase): - - def test_basics(self): - Emp = NamedTuple('Emp', [('name', str), ('id', int)]) - self.assertIsSubclass(Emp, tuple) - joe = Emp('Joe', 42) - jim = Emp(name='Jim', id=1) - self.assertIsInstance(joe, Emp) - self.assertIsInstance(joe, tuple) - self.assertEqual(joe.name, 'Joe') - self.assertEqual(joe.id, 42) - self.assertEqual(jim.name, 'Jim') - self.assertEqual(jim.id, 1) - self.assertEqual(Emp.__name__, 'Emp') - self.assertEqual(Emp._fields, ('name', 'id')) - self.assertEqual(Emp.__annotations__, - collections.OrderedDict([('name', str), ('id', int)])) - self.assertIs(Emp._field_types, Emp.__annotations__) - - def test_namedtuple_pyversion(self): - if sys.version_info[:2] < (3, 6): - with self.assertRaises(TypeError): - NamedTuple('Name', one=int, other=str) - with self.assertRaises(TypeError): - class NotYet(NamedTuple): - whatever = 0 - - @skipUnless(PY36, 'Python 3.6 required') - def test_annotation_usage(self): - tim = CoolEmployee('Tim', 9000) - self.assertIsInstance(tim, CoolEmployee) - self.assertIsInstance(tim, tuple) - self.assertEqual(tim.name, 'Tim') - self.assertEqual(tim.cool, 9000) - self.assertEqual(CoolEmployee.__name__, 'CoolEmployee') - self.assertEqual(CoolEmployee._fields, ('name', 'cool')) - self.assertEqual(CoolEmployee.__annotations__, - collections.OrderedDict(name=str, cool=int)) - self.assertIs(CoolEmployee._field_types, CoolEmployee.__annotations__) - - @skipUnless(PY36, 'Python 3.6 required') - def test_annotation_usage_with_default(self): - jelle = CoolEmployeeWithDefault('Jelle') - self.assertIsInstance(jelle, CoolEmployeeWithDefault) - self.assertIsInstance(jelle, tuple) - self.assertEqual(jelle.name, 'Jelle') - self.assertEqual(jelle.cool, 0) - cooler_employee = CoolEmployeeWithDefault('Sjoerd', 1) - self.assertEqual(cooler_employee.cool, 1) - - self.assertEqual(CoolEmployeeWithDefault.__name__, 'CoolEmployeeWithDefault') - self.assertEqual(CoolEmployeeWithDefault._fields, ('name', 'cool')) - self.assertEqual(CoolEmployeeWithDefault._field_types, dict(name=str, cool=int)) - self.assertEqual(CoolEmployeeWithDefault._field_defaults, dict(cool=0)) - - with self.assertRaises(TypeError): - exec(""" -class NonDefaultAfterDefault(NamedTuple): - x: int = 3 - y: int -""") - - @skipUnless(PY36, 'Python 3.6 required') - def test_annotation_usage_with_methods(self): - self.assertEqual(XMeth(1).double(), 2) - self.assertEqual(XMeth(42).x, XMeth(42)[0]) - self.assertEqual(str(XRepr(42)), '42 -> 1') - self.assertEqual(XRepr(1, 2) + XRepr(3), 0) - - with self.assertRaises(AttributeError): - exec(""" -class XMethBad(NamedTuple): - x: int - def _fields(self): - return 'no chance for this' -""") - - @skipUnless(PY36, 'Python 3.6 required') - def test_namedtuple_keyword_usage(self): - LocalEmployee = NamedTuple("LocalEmployee", name=str, age=int) - nick = LocalEmployee('Nick', 25) - self.assertIsInstance(nick, tuple) - self.assertEqual(nick.name, 'Nick') - self.assertEqual(LocalEmployee.__name__, 'LocalEmployee') - self.assertEqual(LocalEmployee._fields, ('name', 'age')) - self.assertEqual(LocalEmployee.__annotations__, dict(name=str, age=int)) - self.assertIs(LocalEmployee._field_types, LocalEmployee.__annotations__) - with self.assertRaises(TypeError): - NamedTuple('Name', [('x', int)], y=str) - with self.assertRaises(TypeError): - NamedTuple('Name', x=1, y='a') - - def test_pickle(self): - global Emp # pickle wants to reference the class by name - Emp = NamedTuple('Emp', [('name', str), ('id', int)]) - jane = Emp('jane', 37) - for proto in range(pickle.HIGHEST_PROTOCOL + 1): - z = pickle.dumps(jane, proto) - jane2 = pickle.loads(z) - self.assertEqual(jane2, jane) - - -class IOTests(BaseTestCase): - - def test_io(self): - - def stuff(a: IO) -> AnyStr: - return a.readline() - - a = stuff.__annotations__['a'] - self.assertEqual(a.__parameters__, (AnyStr,)) - - def test_textio(self): - - def stuff(a: TextIO) -> str: - return a.readline() - - a = stuff.__annotations__['a'] - self.assertEqual(a.__parameters__, ()) - - def test_binaryio(self): - - def stuff(a: BinaryIO) -> bytes: - return a.readline() - - a = stuff.__annotations__['a'] - self.assertEqual(a.__parameters__, ()) - - def test_io_submodule(self): - from typing.io import IO, TextIO, BinaryIO, __all__, __name__ - self.assertIs(IO, typing.IO) - self.assertIs(TextIO, typing.TextIO) - self.assertIs(BinaryIO, typing.BinaryIO) - self.assertEqual(set(__all__), set(['IO', 'TextIO', 'BinaryIO'])) - self.assertEqual(__name__, 'typing.io') - - -class RETests(BaseTestCase): - # Much of this is really testing _TypeAlias. - - def test_basics(self): - pat = re.compile('[a-z]+', re.I) - self.assertIsSubclass(pat.__class__, Pattern) - self.assertIsSubclass(type(pat), Pattern) - self.assertIsInstance(pat, Pattern) - - mat = pat.search('12345abcde.....') - self.assertIsSubclass(mat.__class__, Match) - self.assertIsSubclass(type(mat), Match) - self.assertIsInstance(mat, Match) - - # these should just work - Pattern[Union[str, bytes]] - Match[Union[bytes, str]] - - def test_alias_equality(self): - self.assertEqual(Pattern[str], Pattern[str]) - self.assertNotEqual(Pattern[str], Pattern[bytes]) - self.assertNotEqual(Pattern[str], Match[str]) - self.assertNotEqual(Pattern[str], str) - - def test_errors(self): - with self.assertRaises(TypeError): - # Doesn't fit AnyStr. - Pattern[int] - with self.assertRaises(TypeError): - # Can't change type vars? - Match[T] - m = Match[Union[str, bytes]] - with self.assertRaises(TypeError): - # Too complicated? - m[str] - with self.assertRaises(TypeError): - # We don't support isinstance(). - isinstance(42, Pattern[str]) - with self.assertRaises(TypeError): - # We don't support issubclass(). - issubclass(Pattern[bytes], Pattern[str]) - - def test_repr(self): - self.assertEqual(repr(Pattern), 'Pattern[~AnyStr]') - self.assertEqual(repr(Pattern[str]), 'Pattern[str]') - self.assertEqual(repr(Pattern[bytes]), 'Pattern[bytes]') - self.assertEqual(repr(Match), 'Match[~AnyStr]') - self.assertEqual(repr(Match[str]), 'Match[str]') - self.assertEqual(repr(Match[bytes]), 'Match[bytes]') - - def test_re_submodule(self): - from typing.re import Match, Pattern, __all__, __name__ - self.assertIs(Match, typing.Match) - self.assertIs(Pattern, typing.Pattern) - self.assertEqual(set(__all__), set(['Match', 'Pattern'])) - self.assertEqual(__name__, 'typing.re') - - def test_cannot_subclass(self): - with self.assertRaises(TypeError) as ex: - - class A(typing.Match): - pass - - self.assertEqual(str(ex.exception), - "Cannot subclass typing._TypeAlias") - - -class AllTests(BaseTestCase): - """Tests for __all__.""" - - def test_all(self): - from typing import __all__ as a - # Just spot-check the first and last of every category. - self.assertIn('AbstractSet', a) - self.assertIn('ValuesView', a) - self.assertIn('cast', a) - self.assertIn('overload', a) - if hasattr(contextlib, 'AbstractContextManager'): - self.assertIn('ContextManager', a) - # Check that io and re are not exported. - self.assertNotIn('io', a) - self.assertNotIn('re', a) - # Spot-check that stdlib modules aren't exported. - self.assertNotIn('os', a) - self.assertNotIn('sys', a) - # Check that Text is defined. - self.assertIn('Text', a) - - -if __name__ == '__main__': - main() diff --git a/lib-typing/3.2/typing.py b/lib-typing/3.2/typing.py deleted file mode 100644 index 9a0f49099a31..000000000000 --- a/lib-typing/3.2/typing.py +++ /dev/null @@ -1,2335 +0,0 @@ -import abc -from abc import abstractmethod, abstractproperty -import collections -import contextlib -import functools -import re as stdlib_re # Avoid confusion with the re we export. -import sys -import types -try: - import collections.abc as collections_abc -except ImportError: - import collections as collections_abc # Fallback for PY3.2. -try: - from types import SlotWrapperType, MethodWrapperType, MethodDescriptorType -except ImportError: - SlotWrapperType = type(object.__init__) - MethodWrapperType = type(object().__str__) - MethodDescriptorType = type(str.join) - - -# Please keep __all__ alphabetized within each category. -__all__ = [ - # Super-special typing primitives. - 'Any', - 'Callable', - 'ClassVar', - 'Generic', - 'Optional', - 'Tuple', - 'Type', - 'TypeVar', - 'Union', - - # ABCs (from collections.abc). - 'AbstractSet', # collections.abc.Set. - 'GenericMeta', # subclass of abc.ABCMeta and a metaclass - # for 'Generic' and ABCs below. - 'ByteString', - 'Container', - 'Hashable', - 'ItemsView', - 'Iterable', - 'Iterator', - 'KeysView', - 'Mapping', - 'MappingView', - 'MutableMapping', - 'MutableSequence', - 'MutableSet', - 'Sequence', - 'Sized', - 'ValuesView', - # The following are added depending on presence - # of their non-generic counterparts in stdlib: - # Awaitable, - # AsyncIterator, - # AsyncIterable, - # Coroutine, - # Collection, - # ContextManager, - # AsyncGenerator, - - # Structural checks, a.k.a. protocols. - 'Reversible', - 'SupportsAbs', - 'SupportsFloat', - 'SupportsInt', - 'SupportsRound', - - # Concrete collection types. - 'Counter', - 'Deque', - 'Dict', - 'DefaultDict', - 'List', - 'Set', - 'FrozenSet', - 'NamedTuple', # Not really a type. - 'Generator', - - # One-off things. - 'AnyStr', - 'cast', - 'get_type_hints', - 'NewType', - 'no_type_check', - 'no_type_check_decorator', - 'overload', - 'Text', - 'TYPE_CHECKING', -] - -# The pseudo-submodules 're' and 'io' are part of the public -# namespace, but excluded from __all__ because they might stomp on -# legitimate imports of those modules. - - -def _qualname(x): - if sys.version_info[:2] >= (3, 3): - return x.__qualname__ - else: - # Fall back to just name. - return x.__name__ - - -def _trim_name(nm): - whitelist = ('_TypeAlias', '_ForwardRef', '_TypingBase', '_FinalTypingBase') - if nm.startswith('_') and nm not in whitelist: - nm = nm[1:] - return nm - - -class TypingMeta(type): - """Metaclass for most types defined in typing module - (not a part of public API). - - This overrides __new__() to require an extra keyword parameter - '_root', which serves as a guard against naive subclassing of the - typing classes. Any legitimate class defined using a metaclass - derived from TypingMeta must pass _root=True. - - This also defines a dummy constructor (all the work for most typing - constructs is done in __new__) and a nicer repr(). - """ - - _is_protocol = False - - def __new__(cls, name, bases, namespace, *, _root=False): - if not _root: - raise TypeError("Cannot subclass %s" % - (', '.join(map(_type_repr, bases)) or '()')) - return super().__new__(cls, name, bases, namespace) - - def __init__(self, *args, **kwds): - pass - - def _eval_type(self, globalns, localns): - """Override this in subclasses to interpret forward references. - - For example, List['C'] is internally stored as - List[_ForwardRef('C')], which should evaluate to List[C], - where C is an object found in globalns or localns (searching - localns first, of course). - """ - return self - - def _get_type_vars(self, tvars): - pass - - def __repr__(self): - qname = _trim_name(_qualname(self)) - return '%s.%s' % (self.__module__, qname) - - -class _TypingBase(metaclass=TypingMeta, _root=True): - """Internal indicator of special typing constructs.""" - - __slots__ = ('__weakref__',) - - def __init__(self, *args, **kwds): - pass - - def __new__(cls, *args, **kwds): - """Constructor. - - This only exists to give a better error message in case - someone tries to subclass a special typing object (not a good idea). - """ - if (len(args) == 3 and - isinstance(args[0], str) and - isinstance(args[1], tuple)): - # Close enough. - raise TypeError("Cannot subclass %r" % cls) - return super().__new__(cls) - - # Things that are not classes also need these. - def _eval_type(self, globalns, localns): - return self - - def _get_type_vars(self, tvars): - pass - - def __repr__(self): - cls = type(self) - qname = _trim_name(_qualname(cls)) - return '%s.%s' % (cls.__module__, qname) - - def __call__(self, *args, **kwds): - raise TypeError("Cannot instantiate %r" % type(self)) - - -class _FinalTypingBase(_TypingBase, _root=True): - """Internal mix-in class to prevent instantiation. - - Prevents instantiation unless _root=True is given in class call. - It is used to create pseudo-singleton instances Any, Union, Optional, etc. - """ - - __slots__ = () - - def __new__(cls, *args, _root=False, **kwds): - self = super().__new__(cls, *args, **kwds) - if _root is True: - return self - raise TypeError("Cannot instantiate %r" % cls) - - def __reduce__(self): - return _trim_name(type(self).__name__) - - -class _ForwardRef(_TypingBase, _root=True): - """Internal wrapper to hold a forward reference.""" - - __slots__ = ('__forward_arg__', '__forward_code__', - '__forward_evaluated__', '__forward_value__') - - def __init__(self, arg): - super().__init__(arg) - if not isinstance(arg, str): - raise TypeError('Forward reference must be a string -- got %r' % (arg,)) - try: - code = compile(arg, '', 'eval') - except SyntaxError: - raise SyntaxError('Forward reference must be an expression -- got %r' % - (arg,)) - self.__forward_arg__ = arg - self.__forward_code__ = code - self.__forward_evaluated__ = False - self.__forward_value__ = None - - def _eval_type(self, globalns, localns): - if not self.__forward_evaluated__ or localns is not globalns: - if globalns is None and localns is None: - globalns = localns = {} - elif globalns is None: - globalns = localns - elif localns is None: - localns = globalns - self.__forward_value__ = _type_check( - eval(self.__forward_code__, globalns, localns), - "Forward references must evaluate to types.") - self.__forward_evaluated__ = True - return self.__forward_value__ - - def __eq__(self, other): - if not isinstance(other, _ForwardRef): - return NotImplemented - return (self.__forward_arg__ == other.__forward_arg__ and - self.__forward_value__ == other.__forward_value__) - - def __hash__(self): - return hash((self.__forward_arg__, self.__forward_value__)) - - def __instancecheck__(self, obj): - raise TypeError("Forward references cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - raise TypeError("Forward references cannot be used with issubclass().") - - def __repr__(self): - return '_ForwardRef(%r)' % (self.__forward_arg__,) - - -class _TypeAlias(_TypingBase, _root=True): - """Internal helper class for defining generic variants of concrete types. - - Note that this is not a type; let's call it a pseudo-type. It cannot - be used in instance and subclass checks in parameterized form, i.e. - ``isinstance(42, Match[str])`` raises ``TypeError`` instead of returning - ``False``. - """ - - __slots__ = ('name', 'type_var', 'impl_type', 'type_checker') - - def __init__(self, name, type_var, impl_type, type_checker): - """Initializer. - - Args: - name: The name, e.g. 'Pattern'. - type_var: The type parameter, e.g. AnyStr, or the - specific type, e.g. str. - impl_type: The implementation type. - type_checker: Function that takes an impl_type instance. - and returns a value that should be a type_var instance. - """ - assert isinstance(name, str), repr(name) - assert isinstance(impl_type, type), repr(impl_type) - assert not isinstance(impl_type, TypingMeta), repr(impl_type) - assert isinstance(type_var, (type, _TypingBase)), repr(type_var) - self.name = name - self.type_var = type_var - self.impl_type = impl_type - self.type_checker = type_checker - - def __repr__(self): - return "%s[%s]" % (self.name, _type_repr(self.type_var)) - - def __getitem__(self, parameter): - if not isinstance(self.type_var, TypeVar): - raise TypeError("%s cannot be further parameterized." % self) - if self.type_var.__constraints__ and isinstance(parameter, type): - if not issubclass(parameter, self.type_var.__constraints__): - raise TypeError("%s is not a valid substitution for %s." % - (parameter, self.type_var)) - if isinstance(parameter, TypeVar) and parameter is not self.type_var: - raise TypeError("%s cannot be re-parameterized." % self) - return self.__class__(self.name, parameter, - self.impl_type, self.type_checker) - - def __eq__(self, other): - if not isinstance(other, _TypeAlias): - return NotImplemented - return self.name == other.name and self.type_var == other.type_var - - def __hash__(self): - return hash((self.name, self.type_var)) - - def __instancecheck__(self, obj): - if not isinstance(self.type_var, TypeVar): - raise TypeError("Parameterized type aliases cannot be used " - "with isinstance().") - return isinstance(obj, self.impl_type) - - def __subclasscheck__(self, cls): - if not isinstance(self.type_var, TypeVar): - raise TypeError("Parameterized type aliases cannot be used " - "with issubclass().") - return issubclass(cls, self.impl_type) - - -def _get_type_vars(types, tvars): - for t in types: - if isinstance(t, TypingMeta) or isinstance(t, _TypingBase): - t._get_type_vars(tvars) - - -def _type_vars(types): - tvars = [] - _get_type_vars(types, tvars) - return tuple(tvars) - - -def _eval_type(t, globalns, localns): - if isinstance(t, TypingMeta) or isinstance(t, _TypingBase): - return t._eval_type(globalns, localns) - return t - - -def _type_check(arg, msg): - """Check that the argument is a type, and return it (internal helper). - - As a special case, accept None and return type(None) instead. - Also, _TypeAlias instances (e.g. Match, Pattern) are acceptable. - - The msg argument is a human-readable error message, e.g. - - "Union[arg, ...]: arg should be a type." - - We append the repr() of the actual value (truncated to 100 chars). - """ - if arg is None: - return type(None) - if isinstance(arg, str): - arg = _ForwardRef(arg) - if ( - isinstance(arg, _TypingBase) and type(arg).__name__ == '_ClassVar' or - not isinstance(arg, (type, _TypingBase)) and not callable(arg) - ): - raise TypeError(msg + " Got %.100r." % (arg,)) - # Bare Union etc. are not valid as type arguments - if ( - type(arg).__name__ in ('_Union', '_Optional') and - not getattr(arg, '__origin__', None) or - isinstance(arg, TypingMeta) and _gorg(arg) in (Generic, _Protocol) - ): - raise TypeError("Plain %s is not valid as type argument" % arg) - return arg - - -def _type_repr(obj): - """Return the repr() of an object, special-casing types (internal helper). - - If obj is a type, we return a shorter version than the default - type.__repr__, based on the module and qualified name, which is - typically enough to uniquely identify a type. For everything - else, we fall back on repr(obj). - """ - if isinstance(obj, type) and not isinstance(obj, TypingMeta): - if obj.__module__ == 'builtins': - return _qualname(obj) - return '%s.%s' % (obj.__module__, _qualname(obj)) - if obj is ...: - return('...') - if isinstance(obj, types.FunctionType): - return obj.__name__ - return repr(obj) - - -class _Any(_FinalTypingBase, _root=True): - """Special type indicating an unconstrained type. - - - Any is compatible with every type. - - Any assumed to have all methods. - - All values assumed to be instances of Any. - - Note that all the above statements are true from the point of view of - static type checkers. At runtime, Any should not be used with instance - or class checks. - """ - - __slots__ = () - - def __instancecheck__(self, obj): - raise TypeError("Any cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - raise TypeError("Any cannot be used with issubclass().") - - -Any = _Any(_root=True) - - -class TypeVar(_TypingBase, _root=True): - """Type variable. - - Usage:: - - T = TypeVar('T') # Can be anything - A = TypeVar('A', str, bytes) # Must be str or bytes - - Type variables exist primarily for the benefit of static type - checkers. They serve as the parameters for generic types as well - as for generic function definitions. See class Generic for more - information on generic types. Generic functions work as follows: - - def repeat(x: T, n: int) -> List[T]: - '''Return a list containing n references to x.''' - return [x]*n - - def longest(x: A, y: A) -> A: - '''Return the longest of two strings.''' - return x if len(x) >= len(y) else y - - The latter example's signature is essentially the overloading - of (str, str) -> str and (bytes, bytes) -> bytes. Also note - that if the arguments are instances of some subclass of str, - the return type is still plain str. - - At runtime, isinstance(x, T) and issubclass(C, T) will raise TypeError. - - Type variables defined with covariant=True or contravariant=True - can be used do declare covariant or contravariant generic types. - See PEP 484 for more details. By default generic types are invariant - in all type variables. - - Type variables can be introspected. e.g.: - - T.__name__ == 'T' - T.__constraints__ == () - T.__covariant__ == False - T.__contravariant__ = False - A.__constraints__ == (str, bytes) - """ - - __slots__ = ('__name__', '__bound__', '__constraints__', - '__covariant__', '__contravariant__') - - def __init__(self, name, *constraints, bound=None, - covariant=False, contravariant=False): - super().__init__(name, *constraints, bound=bound, - covariant=covariant, contravariant=contravariant) - self.__name__ = name - if covariant and contravariant: - raise ValueError("Bivariant types are not supported.") - self.__covariant__ = bool(covariant) - self.__contravariant__ = bool(contravariant) - if constraints and bound is not None: - raise TypeError("Constraints cannot be combined with bound=...") - if constraints and len(constraints) == 1: - raise TypeError("A single constraint is not allowed") - msg = "TypeVar(name, constraint, ...): constraints must be types." - self.__constraints__ = tuple(_type_check(t, msg) for t in constraints) - if bound: - self.__bound__ = _type_check(bound, "Bound must be a type.") - else: - self.__bound__ = None - - def _get_type_vars(self, tvars): - if self not in tvars: - tvars.append(self) - - def __repr__(self): - if self.__covariant__: - prefix = '+' - elif self.__contravariant__: - prefix = '-' - else: - prefix = '~' - return prefix + self.__name__ - - def __instancecheck__(self, instance): - raise TypeError("Type variables cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - raise TypeError("Type variables cannot be used with issubclass().") - - -# Some unconstrained type variables. These are used by the container types. -# (These are not for export.) -T = TypeVar('T') # Any type. -KT = TypeVar('KT') # Key type. -VT = TypeVar('VT') # Value type. -T_co = TypeVar('T_co', covariant=True) # Any type covariant containers. -V_co = TypeVar('V_co', covariant=True) # Any type covariant containers. -VT_co = TypeVar('VT_co', covariant=True) # Value type covariant containers. -T_contra = TypeVar('T_contra', contravariant=True) # Ditto contravariant. - -# A useful type variable with constraints. This represents string types. -# (This one *is* for export!) -AnyStr = TypeVar('AnyStr', bytes, str) - - -def _replace_arg(arg, tvars, args): - """An internal helper function: replace arg if it is a type variable - found in tvars with corresponding substitution from args or - with corresponding substitution sub-tree if arg is a generic type. - """ - - if tvars is None: - tvars = [] - if hasattr(arg, '_subs_tree') and isinstance(arg, (GenericMeta, _TypingBase)): - return arg._subs_tree(tvars, args) - if isinstance(arg, TypeVar): - for i, tvar in enumerate(tvars): - if arg == tvar: - return args[i] - return arg - - -# Special typing constructs Union, Optional, Generic, Callable and Tuple -# use three special attributes for internal bookkeeping of generic types: -# * __parameters__ is a tuple of unique free type parameters of a generic -# type, for example, Dict[T, T].__parameters__ == (T,); -# * __origin__ keeps a reference to a type that was subscripted, -# e.g., Union[T, int].__origin__ == Union; -# * __args__ is a tuple of all arguments used in subscripting, -# e.g., Dict[T, int].__args__ == (T, int). - - -def _subs_tree(cls, tvars=None, args=None): - """An internal helper function: calculate substitution tree - for generic cls after replacing its type parameters with - substitutions in tvars -> args (if any). - Repeat the same following __origin__'s. - - Return a list of arguments with all possible substitutions - performed. Arguments that are generic classes themselves are represented - as tuples (so that no new classes are created by this function). - For example: _subs_tree(List[Tuple[int, T]][str]) == [(Tuple, int, str)] - """ - - if cls.__origin__ is None: - return cls - # Make of chain of origins (i.e. cls -> cls.__origin__) - current = cls.__origin__ - orig_chain = [] - while current.__origin__ is not None: - orig_chain.append(current) - current = current.__origin__ - # Replace type variables in __args__ if asked ... - tree_args = [] - for arg in cls.__args__: - tree_args.append(_replace_arg(arg, tvars, args)) - # ... then continue replacing down the origin chain. - for ocls in orig_chain: - new_tree_args = [] - for arg in ocls.__args__: - new_tree_args.append(_replace_arg(arg, ocls.__parameters__, tree_args)) - tree_args = new_tree_args - return tree_args - - -def _remove_dups_flatten(parameters): - """An internal helper for Union creation and substitution: flatten Union's - among parameters, then remove duplicates and strict subclasses. - """ - - # Flatten out Union[Union[...], ...]. - params = [] - for p in parameters: - if isinstance(p, _Union) and p.__origin__ is Union: - params.extend(p.__args__) - elif isinstance(p, tuple) and len(p) > 0 and p[0] is Union: - params.extend(p[1:]) - else: - params.append(p) - # Weed out strict duplicates, preserving the first of each occurrence. - all_params = set(params) - if len(all_params) < len(params): - new_params = [] - for t in params: - if t in all_params: - new_params.append(t) - all_params.remove(t) - params = new_params - assert not all_params, all_params - # Weed out subclasses. - # E.g. Union[int, Employee, Manager] == Union[int, Employee]. - # If object is present it will be sole survivor among proper classes. - # Never discard type variables. - # (In particular, Union[str, AnyStr] != AnyStr.) - all_params = set(params) - for t1 in params: - if not isinstance(t1, type): - continue - if any(isinstance(t2, type) and issubclass(t1, t2) - for t2 in all_params - {t1} - if not (isinstance(t2, GenericMeta) and - t2.__origin__ is not None)): - all_params.remove(t1) - return tuple(t for t in params if t in all_params) - - -def _check_generic(cls, parameters): - # Check correct count for parameters of a generic cls (internal helper). - if not cls.__parameters__: - raise TypeError("%s is not a generic class" % repr(cls)) - alen = len(parameters) - elen = len(cls.__parameters__) - if alen != elen: - raise TypeError("Too %s parameters for %s; actual %s, expected %s" % - ("many" if alen > elen else "few", repr(cls), alen, elen)) - - -_cleanups = [] - - -def _tp_cache(func): - """Internal wrapper caching __getitem__ of generic types with a fallback to - original function for non-hashable arguments. - """ - - cached = functools.lru_cache()(func) - _cleanups.append(cached.cache_clear) - - @functools.wraps(func) - def inner(*args, **kwds): - try: - return cached(*args, **kwds) - except TypeError: - pass # All real errors (not unhashable args) are raised below. - return func(*args, **kwds) - return inner - - -class _Union(_FinalTypingBase, _root=True): - """Union type; Union[X, Y] means either X or Y. - - To define a union, use e.g. Union[int, str]. Details: - - - The arguments must be types and there must be at least one. - - - None as an argument is a special case and is replaced by - type(None). - - - Unions of unions are flattened, e.g.:: - - Union[Union[int, str], float] == Union[int, str, float] - - - Unions of a single argument vanish, e.g.:: - - Union[int] == int # The constructor actually returns int - - - Redundant arguments are skipped, e.g.:: - - Union[int, str, int] == Union[int, str] - - - When comparing unions, the argument order is ignored, e.g.:: - - Union[int, str] == Union[str, int] - - - When two arguments have a subclass relationship, the least - derived argument is kept, e.g.:: - - class Employee: pass - class Manager(Employee): pass - Union[int, Employee, Manager] == Union[int, Employee] - Union[Manager, int, Employee] == Union[int, Employee] - Union[Employee, Manager] == Employee - - - Similar for object:: - - Union[int, object] == object - - - You cannot subclass or instantiate a union. - - - You can use Optional[X] as a shorthand for Union[X, None]. - """ - - __slots__ = ('__parameters__', '__args__', '__origin__', '__tree_hash__') - - def __new__(cls, parameters=None, origin=None, *args, _root=False): - self = super().__new__(cls, parameters, origin, *args, _root=_root) - if origin is None: - self.__parameters__ = None - self.__args__ = None - self.__origin__ = None - self.__tree_hash__ = hash(frozenset(('Union',))) - return self - if not isinstance(parameters, tuple): - raise TypeError("Expected parameters=") - if origin is Union: - parameters = _remove_dups_flatten(parameters) - # It's not a union if there's only one type left. - if len(parameters) == 1: - return parameters[0] - self.__parameters__ = _type_vars(parameters) - self.__args__ = parameters - self.__origin__ = origin - # Pre-calculate the __hash__ on instantiation. - # This improves speed for complex substitutions. - subs_tree = self._subs_tree() - if isinstance(subs_tree, tuple): - self.__tree_hash__ = hash(frozenset(subs_tree)) - else: - self.__tree_hash__ = hash(subs_tree) - return self - - def _eval_type(self, globalns, localns): - if self.__args__ is None: - return self - ev_args = tuple(_eval_type(t, globalns, localns) for t in self.__args__) - ev_origin = _eval_type(self.__origin__, globalns, localns) - if ev_args == self.__args__ and ev_origin == self.__origin__: - # Everything is already evaluated. - return self - return self.__class__(ev_args, ev_origin, _root=True) - - def _get_type_vars(self, tvars): - if self.__origin__ and self.__parameters__: - _get_type_vars(self.__parameters__, tvars) - - def __repr__(self): - if self.__origin__ is None: - return super().__repr__() - tree = self._subs_tree() - if not isinstance(tree, tuple): - return repr(tree) - return tree[0]._tree_repr(tree) - - def _tree_repr(self, tree): - arg_list = [] - for arg in tree[1:]: - if not isinstance(arg, tuple): - arg_list.append(_type_repr(arg)) - else: - arg_list.append(arg[0]._tree_repr(arg)) - return super().__repr__() + '[%s]' % ', '.join(arg_list) - - @_tp_cache - def __getitem__(self, parameters): - if parameters == (): - raise TypeError("Cannot take a Union of no types.") - if not isinstance(parameters, tuple): - parameters = (parameters,) - if self.__origin__ is None: - msg = "Union[arg, ...]: each arg must be a type." - else: - msg = "Parameters to generic types must be types." - parameters = tuple(_type_check(p, msg) for p in parameters) - if self is not Union: - _check_generic(self, parameters) - return self.__class__(parameters, origin=self, _root=True) - - def _subs_tree(self, tvars=None, args=None): - if self is Union: - return Union # Nothing to substitute - tree_args = _subs_tree(self, tvars, args) - tree_args = _remove_dups_flatten(tree_args) - if len(tree_args) == 1: - return tree_args[0] # Union of a single type is that type - return (Union,) + tree_args - - def __eq__(self, other): - if isinstance(other, _Union): - return self.__tree_hash__ == other.__tree_hash__ - elif self is not Union: - return self._subs_tree() == other - else: - return self is other - - def __hash__(self): - return self.__tree_hash__ - - def __instancecheck__(self, obj): - raise TypeError("Unions cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - raise TypeError("Unions cannot be used with issubclass().") - - -Union = _Union(_root=True) - - -class _Optional(_FinalTypingBase, _root=True): - """Optional type. - - Optional[X] is equivalent to Union[X, None]. - """ - - __slots__ = () - - @_tp_cache - def __getitem__(self, arg): - arg = _type_check(arg, "Optional[t] requires a single type.") - return Union[arg, type(None)] - - -Optional = _Optional(_root=True) - - -def _gorg(a): - """Return the farthest origin of a generic class (internal helper).""" - assert isinstance(a, GenericMeta) - while a.__origin__ is not None: - a = a.__origin__ - return a - - -def _geqv(a, b): - """Return whether two generic classes are equivalent (internal helper). - - The intention is to consider generic class X and any of its - parameterized forms (X[T], X[int], etc.) as equivalent. - - However, X is not equivalent to a subclass of X. - - The relation is reflexive, symmetric and transitive. - """ - assert isinstance(a, GenericMeta) and isinstance(b, GenericMeta) - # Reduce each to its origin. - return _gorg(a) is _gorg(b) - - -def _next_in_mro(cls): - """Helper for Generic.__new__. - - Returns the class after the last occurrence of Generic or - Generic[...] in cls.__mro__. - """ - next_in_mro = object - # Look for the last occurrence of Generic or Generic[...]. - for i, c in enumerate(cls.__mro__[:-1]): - if isinstance(c, GenericMeta) and _gorg(c) is Generic: - next_in_mro = cls.__mro__[i + 1] - return next_in_mro - - -def _make_subclasshook(cls): - """Construct a __subclasshook__ callable that incorporates - the associated __extra__ class in subclass checks performed - against cls. - """ - if isinstance(cls.__extra__, abc.ABCMeta): - # The logic mirrors that of ABCMeta.__subclasscheck__. - # Registered classes need not be checked here because - # cls and its extra share the same _abc_registry. - def __extrahook__(subclass): - res = cls.__extra__.__subclasshook__(subclass) - if res is not NotImplemented: - return res - if cls.__extra__ in subclass.__mro__: - return True - for scls in cls.__extra__.__subclasses__(): - if isinstance(scls, GenericMeta): - continue - if issubclass(subclass, scls): - return True - return NotImplemented - else: - # For non-ABC extras we'll just call issubclass(). - def __extrahook__(subclass): - if cls.__extra__ and issubclass(subclass, cls.__extra__): - return True - return NotImplemented - return __extrahook__ - - -def _no_slots_copy(dct): - """Internal helper: copy class __dict__ and clean slots class variables. - (They will be re-created if necessary by normal class machinery.) - """ - dict_copy = dict(dct) - if '__slots__' in dict_copy: - for slot in dict_copy['__slots__']: - dict_copy.pop(slot, None) - return dict_copy - - -class GenericMeta(TypingMeta, abc.ABCMeta): - """Metaclass for generic types. - - This is a metaclass for typing.Generic and generic ABCs defined in - typing module. User defined subclasses of GenericMeta can override - __new__ and invoke super().__new__. Note that GenericMeta.__new__ - has strict rules on what is allowed in its bases argument: - * plain Generic is disallowed in bases; - * Generic[...] should appear in bases at most once; - * if Generic[...] is present, then it should list all type variables - that appear in other bases. - In addition, type of all generic bases is erased, e.g., C[int] is - stripped to plain C. - """ - - def __new__(cls, name, bases, namespace, - tvars=None, args=None, origin=None, extra=None, orig_bases=None): - """Create a new generic class. GenericMeta.__new__ accepts - keyword arguments that are used for internal bookkeeping, therefore - an override should pass unused keyword arguments to super(). - """ - if tvars is not None: - # Called from __getitem__() below. - assert origin is not None - assert all(isinstance(t, TypeVar) for t in tvars), tvars - else: - # Called from class statement. - assert tvars is None, tvars - assert args is None, args - assert origin is None, origin - - # Get the full set of tvars from the bases. - tvars = _type_vars(bases) - # Look for Generic[T1, ..., Tn]. - # If found, tvars must be a subset of it. - # If not found, tvars is it. - # Also check for and reject plain Generic, - # and reject multiple Generic[...]. - gvars = None - for base in bases: - if base is Generic: - raise TypeError("Cannot inherit from plain Generic") - if (isinstance(base, GenericMeta) and - base.__origin__ is Generic): - if gvars is not None: - raise TypeError( - "Cannot inherit from Generic[...] multiple types.") - gvars = base.__parameters__ - if gvars is None: - gvars = tvars - else: - tvarset = set(tvars) - gvarset = set(gvars) - if not tvarset <= gvarset: - raise TypeError( - "Some type variables (%s) " - "are not listed in Generic[%s]" % - (", ".join(str(t) for t in tvars if t not in gvarset), - ", ".join(str(g) for g in gvars))) - tvars = gvars - - initial_bases = bases - if extra is not None and type(extra) is abc.ABCMeta and extra not in bases: - bases = (extra,) + bases - bases = tuple(_gorg(b) if isinstance(b, GenericMeta) else b for b in bases) - - # remove bare Generic from bases if there are other generic bases - if any(isinstance(b, GenericMeta) and b is not Generic for b in bases): - bases = tuple(b for b in bases if b is not Generic) - namespace.update({'__origin__': origin, '__extra__': extra}) - self = super().__new__(cls, name, bases, namespace, _root=True) - - self.__parameters__ = tvars - # Be prepared that GenericMeta will be subclassed by TupleMeta - # and CallableMeta, those two allow ..., (), or [] in __args___. - self.__args__ = tuple(... if a is _TypingEllipsis else - () if a is _TypingEmpty else - a for a in args) if args else None - # Speed hack (https://github.com/python/typing/issues/196). - self.__next_in_mro__ = _next_in_mro(self) - # Preserve base classes on subclassing (__bases__ are type erased now). - if orig_bases is None: - self.__orig_bases__ = initial_bases - - # This allows unparameterized generic collections to be used - # with issubclass() and isinstance() in the same way as their - # collections.abc counterparts (e.g., isinstance([], Iterable)). - if ( - '__subclasshook__' not in namespace and extra or - # allow overriding - getattr(self.__subclasshook__, '__name__', '') == '__extrahook__' - ): - self.__subclasshook__ = _make_subclasshook(self) - if isinstance(extra, abc.ABCMeta): - self._abc_registry = extra._abc_registry - self._abc_cache = extra._abc_cache - elif origin is not None: - self._abc_registry = origin._abc_registry - self._abc_cache = origin._abc_cache - - if origin and hasattr(origin, '__qualname__'): # Fix for Python 3.2. - self.__qualname__ = origin.__qualname__ - self.__tree_hash__ = (hash(self._subs_tree()) if origin else - super(GenericMeta, self).__hash__()) - return self - - # _abc_negative_cache and _abc_negative_cache_version - # realised as descriptors, since GenClass[t1, t2, ...] always - # share subclass info with GenClass. - # This is an important memory optimization. - @property - def _abc_negative_cache(self): - if isinstance(self.__extra__, abc.ABCMeta): - return self.__extra__._abc_negative_cache - return _gorg(self)._abc_generic_negative_cache - - @_abc_negative_cache.setter - def _abc_negative_cache(self, value): - if self.__origin__ is None: - if isinstance(self.__extra__, abc.ABCMeta): - self.__extra__._abc_negative_cache = value - else: - self._abc_generic_negative_cache = value - - @property - def _abc_negative_cache_version(self): - if isinstance(self.__extra__, abc.ABCMeta): - return self.__extra__._abc_negative_cache_version - return _gorg(self)._abc_generic_negative_cache_version - - @_abc_negative_cache_version.setter - def _abc_negative_cache_version(self, value): - if self.__origin__ is None: - if isinstance(self.__extra__, abc.ABCMeta): - self.__extra__._abc_negative_cache_version = value - else: - self._abc_generic_negative_cache_version = value - - def _get_type_vars(self, tvars): - if self.__origin__ and self.__parameters__: - _get_type_vars(self.__parameters__, tvars) - - def _eval_type(self, globalns, localns): - ev_origin = (self.__origin__._eval_type(globalns, localns) - if self.__origin__ else None) - ev_args = tuple(_eval_type(a, globalns, localns) for a - in self.__args__) if self.__args__ else None - if ev_origin == self.__origin__ and ev_args == self.__args__: - return self - return self.__class__(self.__name__, - self.__bases__, - _no_slots_copy(self.__dict__), - tvars=_type_vars(ev_args) if ev_args else None, - args=ev_args, - origin=ev_origin, - extra=self.__extra__, - orig_bases=self.__orig_bases__) - - def __repr__(self): - if self.__origin__ is None: - return super().__repr__() - return self._tree_repr(self._subs_tree()) - - def _tree_repr(self, tree): - arg_list = [] - for arg in tree[1:]: - if arg == (): - arg_list.append('()') - elif not isinstance(arg, tuple): - arg_list.append(_type_repr(arg)) - else: - arg_list.append(arg[0]._tree_repr(arg)) - return super().__repr__() + '[%s]' % ', '.join(arg_list) - - def _subs_tree(self, tvars=None, args=None): - if self.__origin__ is None: - return self - tree_args = _subs_tree(self, tvars, args) - return (_gorg(self),) + tuple(tree_args) - - def __eq__(self, other): - if not isinstance(other, GenericMeta): - return NotImplemented - if self.__origin__ is None or other.__origin__ is None: - return self is other - return self.__tree_hash__ == other.__tree_hash__ - - def __hash__(self): - return self.__tree_hash__ - - @_tp_cache - def __getitem__(self, params): - if not isinstance(params, tuple): - params = (params,) - if not params and not _gorg(self) is Tuple: - raise TypeError( - "Parameter list to %s[...] cannot be empty" % _qualname(self)) - msg = "Parameters to generic types must be types." - params = tuple(_type_check(p, msg) for p in params) - if self is Generic: - # Generic can only be subscripted with unique type variables. - if not all(isinstance(p, TypeVar) for p in params): - raise TypeError( - "Parameters to Generic[...] must all be type variables") - if len(set(params)) != len(params): - raise TypeError( - "Parameters to Generic[...] must all be unique") - tvars = params - args = params - elif self in (Tuple, Callable): - tvars = _type_vars(params) - args = params - elif self is _Protocol: - # _Protocol is internal, don't check anything. - tvars = params - args = params - elif self.__origin__ in (Generic, _Protocol): - # Can't subscript Generic[...] or _Protocol[...]. - raise TypeError("Cannot subscript already-subscripted %s" % - repr(self)) - else: - # Subscripting a regular Generic subclass. - _check_generic(self, params) - tvars = _type_vars(params) - args = params - - prepend = (self,) if self.__origin__ is None else () - return self.__class__(self.__name__, - prepend + self.__bases__, - _no_slots_copy(self.__dict__), - tvars=tvars, - args=args, - origin=self, - extra=self.__extra__, - orig_bases=self.__orig_bases__) - - def __subclasscheck__(self, cls): - if self.__origin__ is not None: - if sys._getframe(1).f_globals['__name__'] not in ['abc', 'functools']: - raise TypeError("Parameterized generics cannot be used with class " - "or instance checks") - return False - if self is Generic: - raise TypeError("Class %r cannot be used with class " - "or instance checks" % self) - return super().__subclasscheck__(cls) - - def __instancecheck__(self, instance): - # Since we extend ABC.__subclasscheck__ and - # ABC.__instancecheck__ inlines the cache checking done by the - # latter, we must extend __instancecheck__ too. For simplicity - # we just skip the cache check -- instance checks for generic - # classes are supposed to be rare anyways. - return issubclass(instance.__class__, self) - - def __copy__(self): - return self.__class__(self.__name__, self.__bases__, - _no_slots_copy(self.__dict__), - self.__parameters__, self.__args__, self.__origin__, - self.__extra__, self.__orig_bases__) - - def __setattr__(self, attr, value): - # We consider all the subscripted genrics as proxies for original class - if ( - attr.startswith('__') and attr.endswith('__') or - attr.startswith('_abc_') - ): - super(GenericMeta, self).__setattr__(attr, value) - else: - super(GenericMeta, _gorg(self)).__setattr__(attr, value) - - -# Prevent checks for Generic to crash when defining Generic. -Generic = None - - -def _generic_new(base_cls, cls, *args, **kwds): - # Assure type is erased on instantiation, - # but attempt to store it in __orig_class__ - if cls.__origin__ is None: - return base_cls.__new__(cls) - else: - origin = _gorg(cls) - obj = base_cls.__new__(origin) - try: - obj.__orig_class__ = cls - except AttributeError: - pass - obj.__init__(*args, **kwds) - return obj - - -class Generic(metaclass=GenericMeta): - """Abstract base class for generic types. - - A generic type is typically declared by inheriting from - this class parameterized with one or more type variables. - For example, a generic mapping type might be defined as:: - - class Mapping(Generic[KT, VT]): - def __getitem__(self, key: KT) -> VT: - ... - # Etc. - - This class can then be used as follows:: - - def lookup_name(mapping: Mapping[KT, VT], key: KT, default: VT) -> VT: - try: - return mapping[key] - except KeyError: - return default - """ - - __slots__ = () - - def __new__(cls, *args, **kwds): - if _geqv(cls, Generic): - raise TypeError("Type Generic cannot be instantiated; " - "it can be used only as a base class") - return _generic_new(cls.__next_in_mro__, cls, *args, **kwds) - - -class _TypingEmpty: - """Internal placeholder for () or []. Used by TupleMeta and CallableMeta - to allow empty list/tuple in specific places, without allowing them - to sneak in where prohibited. - """ - - -class _TypingEllipsis: - """Internal placeholder for ... (ellipsis).""" - - -class TupleMeta(GenericMeta): - """Metaclass for Tuple (internal).""" - - @_tp_cache - def __getitem__(self, parameters): - if self.__origin__ is not None or not _geqv(self, Tuple): - # Normal generic rules apply if this is not the first subscription - # or a subscription of a subclass. - return super().__getitem__(parameters) - if parameters == (): - return super().__getitem__((_TypingEmpty,)) - if not isinstance(parameters, tuple): - parameters = (parameters,) - if len(parameters) == 2 and parameters[1] is ...: - msg = "Tuple[t, ...]: t must be a type." - p = _type_check(parameters[0], msg) - return super().__getitem__((p, _TypingEllipsis)) - msg = "Tuple[t0, t1, ...]: each t must be a type." - parameters = tuple(_type_check(p, msg) for p in parameters) - return super().__getitem__(parameters) - - def __instancecheck__(self, obj): - if self.__args__ is None: - return isinstance(obj, tuple) - raise TypeError("Parameterized Tuple cannot be used " - "with isinstance().") - - def __subclasscheck__(self, cls): - if self.__args__ is None: - return issubclass(cls, tuple) - raise TypeError("Parameterized Tuple cannot be used " - "with issubclass().") - - -class Tuple(tuple, extra=tuple, metaclass=TupleMeta): - """Tuple type; Tuple[X, Y] is the cross-product type of X and Y. - - Example: Tuple[T1, T2] is a tuple of two elements corresponding - to type variables T1 and T2. Tuple[int, float, str] is a tuple - of an int, a float and a string. - - To specify a variable-length tuple of homogeneous type, use Tuple[T, ...]. - """ - - __slots__ = () - - def __new__(cls, *args, **kwds): - if _geqv(cls, Tuple): - raise TypeError("Type Tuple cannot be instantiated; " - "use tuple() instead") - return _generic_new(tuple, cls, *args, **kwds) - - -class CallableMeta(GenericMeta): - """Metaclass for Callable (internal).""" - - def __repr__(self): - if self.__origin__ is None: - return super().__repr__() - return self._tree_repr(self._subs_tree()) - - def _tree_repr(self, tree): - if _gorg(self) is not Callable: - return super()._tree_repr(tree) - # For actual Callable (not its subclass) we override - # super()._tree_repr() for nice formatting. - arg_list = [] - for arg in tree[1:]: - if not isinstance(arg, tuple): - arg_list.append(_type_repr(arg)) - else: - arg_list.append(arg[0]._tree_repr(arg)) - if arg_list[0] == '...': - return repr(tree[0]) + '[..., %s]' % arg_list[1] - return (repr(tree[0]) + - '[[%s], %s]' % (', '.join(arg_list[:-1]), arg_list[-1])) - - def __getitem__(self, parameters): - """A thin wrapper around __getitem_inner__ to provide the latter - with hashable arguments to improve speed. - """ - - if self.__origin__ is not None or not _geqv(self, Callable): - return super().__getitem__(parameters) - if not isinstance(parameters, tuple) or len(parameters) != 2: - raise TypeError("Callable must be used as " - "Callable[[arg, ...], result].") - args, result = parameters - if args is Ellipsis: - parameters = (Ellipsis, result) - else: - if not isinstance(args, list): - raise TypeError("Callable[args, result]: args must be a list." - " Got %.100r." % (args,)) - parameters = (tuple(args), result) - return self.__getitem_inner__(parameters) - - @_tp_cache - def __getitem_inner__(self, parameters): - args, result = parameters - msg = "Callable[args, result]: result must be a type." - result = _type_check(result, msg) - if args is Ellipsis: - return super().__getitem__((_TypingEllipsis, result)) - msg = "Callable[[arg, ...], result]: each arg must be a type." - args = tuple(_type_check(arg, msg) for arg in args) - parameters = args + (result,) - return super().__getitem__(parameters) - - -class Callable(extra=collections_abc.Callable, metaclass=CallableMeta): - """Callable type; Callable[[int], str] is a function of (int) -> str. - - The subscription syntax must always be used with exactly two - values: the argument list and the return type. The argument list - must be a list of types or ellipsis; the return type must be a single type. - - There is no syntax to indicate optional or keyword arguments, - such function types are rarely used as callback types. - """ - - __slots__ = () - - def __new__(cls, *args, **kwds): - if _geqv(cls, Callable): - raise TypeError("Type Callable cannot be instantiated; " - "use a non-abstract subclass instead") - return _generic_new(cls.__next_in_mro__, cls, *args, **kwds) - - -class _ClassVar(_FinalTypingBase, _root=True): - """Special type construct to mark class variables. - - An annotation wrapped in ClassVar indicates that a given - attribute is intended to be used as a class variable and - should not be set on instances of that class. Usage:: - - class Starship: - stats: ClassVar[Dict[str, int]] = {} # class variable - damage: int = 10 # instance variable - - ClassVar accepts only types and cannot be further subscribed. - - Note that ClassVar is not a class itself, and should not - be used with isinstance() or issubclass(). - """ - - __slots__ = ('__type__',) - - def __init__(self, tp=None, **kwds): - self.__type__ = tp - - def __getitem__(self, item): - cls = type(self) - if self.__type__ is None: - return cls(_type_check(item, - '{} accepts only single type.'.format(cls.__name__[1:])), - _root=True) - raise TypeError('{} cannot be further subscripted' - .format(cls.__name__[1:])) - - def _eval_type(self, globalns, localns): - new_tp = _eval_type(self.__type__, globalns, localns) - if new_tp == self.__type__: - return self - return type(self)(new_tp, _root=True) - - def __repr__(self): - r = super().__repr__() - if self.__type__ is not None: - r += '[{}]'.format(_type_repr(self.__type__)) - return r - - def __hash__(self): - return hash((type(self).__name__, self.__type__)) - - def __eq__(self, other): - if not isinstance(other, _ClassVar): - return NotImplemented - if self.__type__ is not None: - return self.__type__ == other.__type__ - return self is other - - -ClassVar = _ClassVar(_root=True) - - -def cast(typ, val): - """Cast a value to a type. - - This returns the value unchanged. To the type checker this - signals that the return value has the designated type, but at - runtime we intentionally don't check anything (we want this - to be as fast as possible). - """ - return val - - -def _get_defaults(func): - """Internal helper to extract the default arguments, by name.""" - try: - code = func.__code__ - except AttributeError: - # Some built-in functions don't have __code__, __defaults__, etc. - return {} - pos_count = code.co_argcount - arg_names = code.co_varnames - arg_names = arg_names[:pos_count] - defaults = func.__defaults__ or () - kwdefaults = func.__kwdefaults__ - res = dict(kwdefaults) if kwdefaults else {} - pos_offset = pos_count - len(defaults) - for name, value in zip(arg_names[pos_offset:], defaults): - assert name not in res - res[name] = value - return res - - -_allowed_types = (types.FunctionType, types.BuiltinFunctionType, - types.MethodType, types.ModuleType, - SlotWrapperType, MethodWrapperType, MethodDescriptorType) - - -def get_type_hints(obj, globalns=None, localns=None): - """Return type hints for an object. - - This is often the same as obj.__annotations__, but it handles - forward references encoded as string literals, and if necessary - adds Optional[t] if a default value equal to None is set. - - The argument may be a module, class, method, or function. The annotations - are returned as a dictionary. For classes, annotations include also - inherited members. - - TypeError is raised if the argument is not of a type that can contain - annotations, and an empty dictionary is returned if no annotations are - present. - - BEWARE -- the behavior of globalns and localns is counterintuitive - (unless you are familiar with how eval() and exec() work). The - search order is locals first, then globals. - - - If no dict arguments are passed, an attempt is made to use the - globals from obj, and these are also used as the locals. If the - object does not appear to have globals, an exception is raised. - - - If one dict argument is passed, it is used for both globals and - locals. - - - If two dict arguments are passed, they specify globals and - locals, respectively. - """ - - if getattr(obj, '__no_type_check__', None): - return {} - if globalns is None: - globalns = getattr(obj, '__globals__', {}) - if localns is None: - localns = globalns - elif localns is None: - localns = globalns - # Classes require a special treatment. - if isinstance(obj, type): - hints = {} - for base in reversed(obj.__mro__): - ann = base.__dict__.get('__annotations__', {}) - for name, value in ann.items(): - if value is None: - value = type(None) - if isinstance(value, str): - value = _ForwardRef(value) - value = _eval_type(value, globalns, localns) - hints[name] = value - return hints - hints = getattr(obj, '__annotations__', None) - if hints is None: - # Return empty annotations for something that _could_ have them. - if isinstance(obj, _allowed_types): - return {} - else: - raise TypeError('{!r} is not a module, class, method, ' - 'or function.'.format(obj)) - defaults = _get_defaults(obj) - hints = dict(hints) - for name, value in hints.items(): - if value is None: - value = type(None) - if isinstance(value, str): - value = _ForwardRef(value) - value = _eval_type(value, globalns, localns) - if name in defaults and defaults[name] is None: - value = Optional[value] - hints[name] = value - return hints - - -def no_type_check(arg): - """Decorator to indicate that annotations are not type hints. - - The argument must be a class or function; if it is a class, it - applies recursively to all methods and classes defined in that class - (but not to methods defined in its superclasses or subclasses). - - This mutates the function(s) or class(es) in place. - """ - if isinstance(arg, type): - arg_attrs = arg.__dict__.copy() - for attr, val in arg.__dict__.items(): - if val in arg.__bases__: - arg_attrs.pop(attr) - for obj in arg_attrs.values(): - if isinstance(obj, types.FunctionType): - obj.__no_type_check__ = True - if isinstance(obj, type): - no_type_check(obj) - try: - arg.__no_type_check__ = True - except TypeError: # built-in classes - pass - return arg - - -def no_type_check_decorator(decorator): - """Decorator to give another decorator the @no_type_check effect. - - This wraps the decorator with something that wraps the decorated - function in @no_type_check. - """ - - @functools.wraps(decorator) - def wrapped_decorator(*args, **kwds): - func = decorator(*args, **kwds) - func = no_type_check(func) - return func - - return wrapped_decorator - - -def _overload_dummy(*args, **kwds): - """Helper for @overload to raise when called.""" - raise NotImplementedError( - "You should not call an overloaded function. " - "A series of @overload-decorated functions " - "outside a stub module should always be followed " - "by an implementation that is not @overload-ed.") - - -def overload(func): - """Decorator for overloaded functions/methods. - - In a stub file, place two or more stub definitions for the same - function in a row, each decorated with @overload. For example: - - @overload - def utf8(value: None) -> None: ... - @overload - def utf8(value: bytes) -> bytes: ... - @overload - def utf8(value: str) -> bytes: ... - - In a non-stub file (i.e. a regular .py file), do the same but - follow it with an implementation. The implementation should *not* - be decorated with @overload. For example: - - @overload - def utf8(value: None) -> None: ... - @overload - def utf8(value: bytes) -> bytes: ... - @overload - def utf8(value: str) -> bytes: ... - def utf8(value): - # implementation goes here - """ - return _overload_dummy - - -class _ProtocolMeta(GenericMeta): - """Internal metaclass for _Protocol. - - This exists so _Protocol classes can be generic without deriving - from Generic. - """ - - def __instancecheck__(self, obj): - if _Protocol not in self.__bases__: - return super().__instancecheck__(obj) - raise TypeError("Protocols cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - if not self._is_protocol: - # No structural checks since this isn't a protocol. - return NotImplemented - - if self is _Protocol: - # Every class is a subclass of the empty protocol. - return True - - # Find all attributes defined in the protocol. - attrs = self._get_protocol_attrs() - - for attr in attrs: - if not any(attr in d.__dict__ for d in cls.__mro__): - return False - return True - - def _get_protocol_attrs(self): - # Get all Protocol base classes. - protocol_bases = [] - for c in self.__mro__: - if getattr(c, '_is_protocol', False) and c.__name__ != '_Protocol': - protocol_bases.append(c) - - # Get attributes included in protocol. - attrs = set() - for base in protocol_bases: - for attr in base.__dict__.keys(): - # Include attributes not defined in any non-protocol bases. - for c in self.__mro__: - if (c is not base and attr in c.__dict__ and - not getattr(c, '_is_protocol', False)): - break - else: - if (not attr.startswith('_abc_') and - attr != '__abstractmethods__' and - attr != '__annotations__' and - attr != '__weakref__' and - attr != '_is_protocol' and - attr != '__dict__' and - attr != '__args__' and - attr != '__slots__' and - attr != '_get_protocol_attrs' and - attr != '__next_in_mro__' and - attr != '__parameters__' and - attr != '__origin__' and - attr != '__orig_bases__' and - attr != '__extra__' and - attr != '__tree_hash__' and - attr != '__module__'): - attrs.add(attr) - - return attrs - - -class _Protocol(metaclass=_ProtocolMeta): - """Internal base class for protocol classes. - - This implements a simple-minded structural issubclass check - (similar but more general than the one-offs in collections.abc - such as Hashable). - """ - - __slots__ = () - - _is_protocol = True - - -# Various ABCs mimicking those in collections.abc. -# A few are simply re-exported for completeness. - -Hashable = collections_abc.Hashable # Not generic. - - -if hasattr(collections_abc, 'Awaitable'): - class Awaitable(Generic[T_co], extra=collections_abc.Awaitable): - __slots__ = () - - __all__.append('Awaitable') - - -if hasattr(collections_abc, 'Coroutine'): - class Coroutine(Awaitable[V_co], Generic[T_co, T_contra, V_co], - extra=collections_abc.Coroutine): - __slots__ = () - - __all__.append('Coroutine') - - -if hasattr(collections_abc, 'AsyncIterable'): - - class AsyncIterable(Generic[T_co], extra=collections_abc.AsyncIterable): - __slots__ = () - - class AsyncIterator(AsyncIterable[T_co], - extra=collections_abc.AsyncIterator): - __slots__ = () - - __all__.append('AsyncIterable') - __all__.append('AsyncIterator') - - -class Iterable(Generic[T_co], extra=collections_abc.Iterable): - __slots__ = () - - -class Iterator(Iterable[T_co], extra=collections_abc.Iterator): - __slots__ = () - - -class SupportsInt(_Protocol): - __slots__ = () - - @abstractmethod - def __int__(self) -> int: - pass - - -class SupportsFloat(_Protocol): - __slots__ = () - - @abstractmethod - def __float__(self) -> float: - pass - - -class SupportsComplex(_Protocol): - __slots__ = () - - @abstractmethod - def __complex__(self) -> complex: - pass - - -class SupportsBytes(_Protocol): - __slots__ = () - - @abstractmethod - def __bytes__(self) -> bytes: - pass - - -class SupportsAbs(_Protocol[T_co]): - __slots__ = () - - @abstractmethod - def __abs__(self) -> T_co: - pass - - -class SupportsRound(_Protocol[T_co]): - __slots__ = () - - @abstractmethod - def __round__(self, ndigits: int = 0) -> T_co: - pass - - -if hasattr(collections_abc, 'Reversible'): - class Reversible(Iterable[T_co], extra=collections_abc.Reversible): - __slots__ = () -else: - class Reversible(_Protocol[T_co]): - __slots__ = () - - @abstractmethod - def __reversed__(self) -> 'Iterator[T_co]': - pass - - -Sized = collections_abc.Sized # Not generic. - - -class Container(Generic[T_co], extra=collections_abc.Container): - __slots__ = () - - -if hasattr(collections_abc, 'Collection'): - class Collection(Sized, Iterable[T_co], Container[T_co], - extra=collections_abc.Collection): - __slots__ = () - - __all__.append('Collection') - - -# Callable was defined earlier. - -if hasattr(collections_abc, 'Collection'): - class AbstractSet(Collection[T_co], - extra=collections_abc.Set): - __slots__ = () -else: - class AbstractSet(Sized, Iterable[T_co], Container[T_co], - extra=collections_abc.Set): - __slots__ = () - - -class MutableSet(AbstractSet[T], extra=collections_abc.MutableSet): - __slots__ = () - - -# NOTE: It is only covariant in the value type. -if hasattr(collections_abc, 'Collection'): - class Mapping(Collection[KT], Generic[KT, VT_co], - extra=collections_abc.Mapping): - __slots__ = () -else: - class Mapping(Sized, Iterable[KT], Container[KT], Generic[KT, VT_co], - extra=collections_abc.Mapping): - __slots__ = () - - -class MutableMapping(Mapping[KT, VT], extra=collections_abc.MutableMapping): - __slots__ = () - - -if hasattr(collections_abc, 'Reversible'): - if hasattr(collections_abc, 'Collection'): - class Sequence(Reversible[T_co], Collection[T_co], - extra=collections_abc.Sequence): - __slots__ = () - else: - class Sequence(Sized, Reversible[T_co], Container[T_co], - extra=collections_abc.Sequence): - __slots__ = () -else: - class Sequence(Sized, Iterable[T_co], Container[T_co], - extra=collections_abc.Sequence): - __slots__ = () - - -class MutableSequence(Sequence[T], extra=collections_abc.MutableSequence): - __slots__ = () - - -class ByteString(Sequence[int], extra=collections_abc.ByteString): - __slots__ = () - - -class List(list, MutableSequence[T], extra=list): - - __slots__ = () - - def __new__(cls, *args, **kwds): - if _geqv(cls, List): - raise TypeError("Type List cannot be instantiated; " - "use list() instead") - return _generic_new(list, cls, *args, **kwds) - - -class Deque(collections.deque, MutableSequence[T], extra=collections.deque): - - __slots__ = () - - def __new__(cls, *args, **kwds): - if _geqv(cls, Deque): - return collections.deque(*args, **kwds) - return _generic_new(collections.deque, cls, *args, **kwds) - - -class Set(set, MutableSet[T], extra=set): - - __slots__ = () - - def __new__(cls, *args, **kwds): - if _geqv(cls, Set): - raise TypeError("Type Set cannot be instantiated; " - "use set() instead") - return _generic_new(set, cls, *args, **kwds) - - -class FrozenSet(frozenset, AbstractSet[T_co], extra=frozenset): - __slots__ = () - - def __new__(cls, *args, **kwds): - if _geqv(cls, FrozenSet): - raise TypeError("Type FrozenSet cannot be instantiated; " - "use frozenset() instead") - return _generic_new(frozenset, cls, *args, **kwds) - - -class MappingView(Sized, Iterable[T_co], extra=collections_abc.MappingView): - __slots__ = () - - -class KeysView(MappingView[KT], AbstractSet[KT], - extra=collections_abc.KeysView): - __slots__ = () - - -class ItemsView(MappingView[Tuple[KT, VT_co]], - AbstractSet[Tuple[KT, VT_co]], - Generic[KT, VT_co], - extra=collections_abc.ItemsView): - __slots__ = () - - -class ValuesView(MappingView[VT_co], extra=collections_abc.ValuesView): - __slots__ = () - - -if hasattr(contextlib, 'AbstractContextManager'): - class ContextManager(Generic[T_co], extra=contextlib.AbstractContextManager): - __slots__ = () - __all__.append('ContextManager') - - -class Dict(dict, MutableMapping[KT, VT], extra=dict): - - __slots__ = () - - def __new__(cls, *args, **kwds): - if _geqv(cls, Dict): - raise TypeError("Type Dict cannot be instantiated; " - "use dict() instead") - return _generic_new(dict, cls, *args, **kwds) - - -class DefaultDict(collections.defaultdict, MutableMapping[KT, VT], - extra=collections.defaultdict): - - __slots__ = () - - def __new__(cls, *args, **kwds): - if _geqv(cls, DefaultDict): - return collections.defaultdict(*args, **kwds) - return _generic_new(collections.defaultdict, cls, *args, **kwds) - - -class Counter(collections.Counter, Dict[T, int], extra=collections.Counter): - - __slots__ = () - - def __new__(cls, *args, **kwds): - if _geqv(cls, Counter): - return collections.Counter(*args, **kwds) - return _generic_new(collections.Counter, cls, *args, **kwds) - - -if hasattr(collections, 'ChainMap'): - # ChainMap only exists in 3.3+ - __all__.append('ChainMap') - - class ChainMap(collections.ChainMap, MutableMapping[KT, VT], - extra=collections.ChainMap): - - __slots__ = () - - def __new__(cls, *args, **kwds): - if _geqv(cls, ChainMap): - return collections.ChainMap(*args, **kwds) - return _generic_new(collections.ChainMap, cls, *args, **kwds) - - -# Determine what base class to use for Generator. -if hasattr(collections_abc, 'Generator'): - # Sufficiently recent versions of 3.5 have a Generator ABC. - _G_base = collections_abc.Generator -else: - # Fall back on the exact type. - _G_base = types.GeneratorType - - -class Generator(Iterator[T_co], Generic[T_co, T_contra, V_co], - extra=_G_base): - __slots__ = () - - def __new__(cls, *args, **kwds): - if _geqv(cls, Generator): - raise TypeError("Type Generator cannot be instantiated; " - "create a subclass instead") - return _generic_new(_G_base, cls, *args, **kwds) - - -if hasattr(collections_abc, 'AsyncGenerator'): - class AsyncGenerator(AsyncIterator[T_co], Generic[T_co, T_contra], - extra=collections_abc.AsyncGenerator): - __slots__ = () - - __all__.append('AsyncGenerator') - - -# Internal type variable used for Type[]. -CT_co = TypeVar('CT_co', covariant=True, bound=type) - - -# This is not a real generic class. Don't use outside annotations. -class Type(Generic[CT_co], extra=type): - """A special construct usable to annotate class objects. - - For example, suppose we have the following classes:: - - class User: ... # Abstract base for User classes - class BasicUser(User): ... - class ProUser(User): ... - class TeamUser(User): ... - - And a function that takes a class argument that's a subclass of - User and returns an instance of the corresponding class:: - - U = TypeVar('U', bound=User) - def new_user(user_class: Type[U]) -> U: - user = user_class() - # (Here we could write the user object to a database) - return user - - joe = new_user(BasicUser) - - At this point the type checker knows that joe has type BasicUser. - """ - - __slots__ = () - - -def _make_nmtuple(name, types): - msg = "NamedTuple('Name', [(f0, t0), (f1, t1), ...]); each t must be a type" - types = [(n, _type_check(t, msg)) for n, t in types] - nm_tpl = collections.namedtuple(name, [n for n, t in types]) - # Prior to PEP 526, only _field_types attribute was assigned. - # Now, both __annotations__ and _field_types are used to maintain compatibility. - nm_tpl.__annotations__ = nm_tpl._field_types = collections.OrderedDict(types) - try: - nm_tpl.__module__ = sys._getframe(2).f_globals.get('__name__', '__main__') - except (AttributeError, ValueError): - pass - return nm_tpl - - -_PY36 = sys.version_info[:2] >= (3, 6) - -# attributes prohibited to set in NamedTuple class syntax -_prohibited = ('__new__', '__init__', '__slots__', '__getnewargs__', - '_fields', '_field_defaults', '_field_types', - '_make', '_replace', '_asdict') - -_special = ('__module__', '__name__', '__qualname__', '__annotations__') - - -class NamedTupleMeta(type): - - def __new__(cls, typename, bases, ns): - if ns.get('_root', False): - return super().__new__(cls, typename, bases, ns) - if not _PY36: - raise TypeError("Class syntax for NamedTuple is only supported" - " in Python 3.6+") - types = ns.get('__annotations__', {}) - nm_tpl = _make_nmtuple(typename, types.items()) - defaults = [] - defaults_dict = {} - for field_name in types: - if field_name in ns: - default_value = ns[field_name] - defaults.append(default_value) - defaults_dict[field_name] = default_value - elif defaults: - raise TypeError("Non-default namedtuple field {field_name} cannot " - "follow default field(s) {default_names}" - .format(field_name=field_name, - default_names=', '.join(defaults_dict.keys()))) - nm_tpl.__new__.__defaults__ = tuple(defaults) - nm_tpl._field_defaults = defaults_dict - # update from user namespace without overriding special namedtuple attributes - for key in ns: - if key in _prohibited: - raise AttributeError("Cannot overwrite NamedTuple attribute " + key) - elif key not in _special and key not in nm_tpl._fields: - setattr(nm_tpl, key, ns[key]) - return nm_tpl - - -class NamedTuple(metaclass=NamedTupleMeta): - """Typed version of namedtuple. - - Usage in Python versions >= 3.6:: - - class Employee(NamedTuple): - name: str - id: int - - This is equivalent to:: - - Employee = collections.namedtuple('Employee', ['name', 'id']) - - The resulting class has extra __annotations__ and _field_types - attributes, giving an ordered dict mapping field names to types. - __annotations__ should be preferred, while _field_types - is kept to maintain pre PEP 526 compatibility. (The field names - are in the _fields attribute, which is part of the namedtuple - API.) Alternative equivalent keyword syntax is also accepted:: - - Employee = NamedTuple('Employee', name=str, id=int) - - In Python versions <= 3.5 use:: - - Employee = NamedTuple('Employee', [('name', str), ('id', int)]) - """ - _root = True - - def __new__(self, typename, fields=None, **kwargs): - if kwargs and not _PY36: - raise TypeError("Keyword syntax for NamedTuple is only supported" - " in Python 3.6+") - if fields is None: - fields = kwargs.items() - elif kwargs: - raise TypeError("Either list of fields or keywords" - " can be provided to NamedTuple, not both") - return _make_nmtuple(typename, fields) - - -def NewType(name, tp): - """NewType creates simple unique types with almost zero - runtime overhead. NewType(name, tp) is considered a subtype of tp - by static type checkers. At runtime, NewType(name, tp) returns - a dummy function that simply returns its argument. Usage:: - - UserId = NewType('UserId', int) - - def name_by_id(user_id: UserId) -> str: - ... - - UserId('user') # Fails type check - - name_by_id(42) # Fails type check - name_by_id(UserId(42)) # OK - - num = UserId(5) + 1 # type: int - """ - - def new_type(x): - return x - - new_type.__name__ = name - new_type.__supertype__ = tp - return new_type - - -# Python-version-specific alias (Python 2: unicode; Python 3: str) -Text = str - - -# Constant that's True when type checking, but False here. -TYPE_CHECKING = False - - -class IO(Generic[AnyStr]): - """Generic base class for TextIO and BinaryIO. - - This is an abstract, generic version of the return of open(). - - NOTE: This does not distinguish between the different possible - classes (text vs. binary, read vs. write vs. read/write, - append-only, unbuffered). The TextIO and BinaryIO subclasses - below capture the distinctions between text vs. binary, which is - pervasive in the interface; however we currently do not offer a - way to track the other distinctions in the type system. - """ - - __slots__ = () - - @abstractproperty - def mode(self) -> str: - pass - - @abstractproperty - def name(self) -> str: - pass - - @abstractmethod - def close(self) -> None: - pass - - @abstractmethod - def closed(self) -> bool: - pass - - @abstractmethod - def fileno(self) -> int: - pass - - @abstractmethod - def flush(self) -> None: - pass - - @abstractmethod - def isatty(self) -> bool: - pass - - @abstractmethod - def read(self, n: int = -1) -> AnyStr: - pass - - @abstractmethod - def readable(self) -> bool: - pass - - @abstractmethod - def readline(self, limit: int = -1) -> AnyStr: - pass - - @abstractmethod - def readlines(self, hint: int = -1) -> List[AnyStr]: - pass - - @abstractmethod - def seek(self, offset: int, whence: int = 0) -> int: - pass - - @abstractmethod - def seekable(self) -> bool: - pass - - @abstractmethod - def tell(self) -> int: - pass - - @abstractmethod - def truncate(self, size: int = None) -> int: - pass - - @abstractmethod - def writable(self) -> bool: - pass - - @abstractmethod - def write(self, s: AnyStr) -> int: - pass - - @abstractmethod - def writelines(self, lines: List[AnyStr]) -> None: - pass - - @abstractmethod - def __enter__(self) -> 'IO[AnyStr]': - pass - - @abstractmethod - def __exit__(self, type, value, traceback) -> None: - pass - - -class BinaryIO(IO[bytes]): - """Typed version of the return of open() in binary mode.""" - - __slots__ = () - - @abstractmethod - def write(self, s: Union[bytes, bytearray]) -> int: - pass - - @abstractmethod - def __enter__(self) -> 'BinaryIO': - pass - - -class TextIO(IO[str]): - """Typed version of the return of open() in text mode.""" - - __slots__ = () - - @abstractproperty - def buffer(self) -> BinaryIO: - pass - - @abstractproperty - def encoding(self) -> str: - pass - - @abstractproperty - def errors(self) -> Optional[str]: - pass - - @abstractproperty - def line_buffering(self) -> bool: - pass - - @abstractproperty - def newlines(self) -> Any: - pass - - @abstractmethod - def __enter__(self) -> 'TextIO': - pass - - -class io: - """Wrapper namespace for IO generic classes.""" - - __all__ = ['IO', 'TextIO', 'BinaryIO'] - IO = IO - TextIO = TextIO - BinaryIO = BinaryIO - - -io.__name__ = __name__ + '.io' -sys.modules[io.__name__] = io - - -Pattern = _TypeAlias('Pattern', AnyStr, type(stdlib_re.compile('')), - lambda p: p.pattern) -Match = _TypeAlias('Match', AnyStr, type(stdlib_re.match('', '')), - lambda m: m.re.pattern) - - -class re: - """Wrapper namespace for re type aliases.""" - - __all__ = ['Pattern', 'Match'] - Pattern = Pattern - Match = Match - - -re.__name__ = __name__ + '.re' -sys.modules[re.__name__] = re diff --git a/mypy/__main__.py b/mypy/__main__.py index 0a6f79261a53..625242d100be 100644 --- a/mypy/__main__.py +++ b/mypy/__main__.py @@ -2,4 +2,10 @@ from mypy.main import main -main(None) + +def console_entry() -> None: + main(None) + + +if __name__ == '__main__': + main(None) diff --git a/mypy/checker.py b/mypy/checker.py index 9bbfc21f7d50..98bb35474bf9 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -631,7 +631,8 @@ def is_implicit_any(t: Type) -> bool: self.check_reverse_op_method(item, typ, name) elif name in ('__getattr__', '__getattribute__'): self.check_getattr_method(typ, defn) - + elif name == '__setattr__': + self.check_setattr_method(typ, defn) # Refuse contravariant return type variable if isinstance(typ.ret_type, TypeVarType): if typ.ret_type.variance == CONTRAVARIANT: @@ -931,6 +932,15 @@ def check_getattr_method(self, typ: CallableType, context: Context) -> None: if not is_subtype(typ, method_type): self.msg.invalid_signature(typ, context) + def check_setattr_method(self, typ: CallableType, context: Context) -> None: + method_type = CallableType([AnyType(), self.named_type('builtins.str'), AnyType()], + [nodes.ARG_POS, nodes.ARG_POS, nodes.ARG_POS], + [None, None, None], + NoneTyp(), + self.named_type('builtins.function')) + if not is_subtype(typ, method_type): + self.msg.invalid_signature(typ, context) + def expand_typevars(self, defn: FuncItem, typ: CallableType) -> List[Tuple[FuncItem, CallableType]]: # TODO use generator @@ -1889,7 +1899,8 @@ def check_return_stmt(self, s: ReturnStmt) -> None: if isinstance(typ, AnyType): # (Unless you asked to be warned in that case, and the # function is not declared to return Any) - if not isinstance(return_type, AnyType) and self.options.warn_return_any: + if (self.options.warn_return_any and + not is_proper_subtype(AnyType(), return_type)): self.warn(messages.RETURN_ANY.format(return_type), s) return diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index 091a41c76d35..027c0d85a854 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -156,7 +156,13 @@ def analyze_ref_expr(self, e: RefExpr, lvalue: bool = False) -> Type: result = type_object_type(node, self.named_type) elif isinstance(node, MypyFile): # Reference to a module object. - result = self.named_type('types.ModuleType') + try: + result = self.named_type('types.ModuleType') + except KeyError: + # In test cases might 'types' may not be available. + # Fall back to a dummy 'object' type instead to + # avoid a crash. + result = self.named_type('builtins.object') elif isinstance(node, Decorator): result = self.analyze_var_ref(node.var, e) else: @@ -2035,10 +2041,11 @@ def analyze_super(self, e: SuperExpr, is_lvalue: bool) -> Type: return AnyType() def visit_slice_expr(self, e: SliceExpr) -> Type: + expected = make_optional_type(self.named_type('builtins.int')) for index in [e.begin_index, e.end_index, e.stride]: if index: t = self.accept(index) - self.chk.check_subtype(t, self.named_type('builtins.int'), + self.chk.check_subtype(t, expected, index, messages.INVALID_SLICE_INDEX) return self.named_type('builtins.slice') diff --git a/mypy/checkmember.py b/mypy/checkmember.py index 37d4c6a75e4e..66949144eed8 100644 --- a/mypy/checkmember.py +++ b/mypy/checkmember.py @@ -245,6 +245,15 @@ def analyze_member_var_access(name: str, itype: Instance, info: TypeInfo, getattr_type = expand_type_by_instance(bound_method, typ) if isinstance(getattr_type, CallableType): return getattr_type.ret_type + else: + setattr_meth = info.get_method('__setattr__') + if setattr_meth and setattr_meth.info.fullname() != 'builtins.object': + setattr_func = function_type(setattr_meth, builtin_type('builtins.function')) + bound_type = bind_self(setattr_func, original_type) + typ = map_instance_to_supertype(itype, setattr_meth.info) + setattr_type = expand_type_by_instance(bound_type, typ) + if isinstance(setattr_type, CallableType) and len(setattr_type.arg_types) > 0: + return setattr_type.arg_types[-1] if itype.type.fallback_to_any: return AnyType() diff --git a/mypy/fastparse.py b/mypy/fastparse.py index bbf20c14be16..10ad642dcdf0 100644 --- a/mypy/fastparse.py +++ b/mypy/fastparse.py @@ -30,6 +30,7 @@ from mypy import experiments from mypy import messages from mypy.errors import Errors +from mypy.options import Options try: from typed_ast import ast3 @@ -60,14 +61,12 @@ def parse(source: Union[str, bytes], fnam: str = None, errors: Errors = None, - pyversion: Tuple[int, int] = defaults.PYTHON3_VERSION, - custom_typing_module: str = None) -> MypyFile: + options: Options = Options()) -> MypyFile: + """Parse a source file, without doing any semantic analysis. Return the parse tree. If errors is not provided, raise ParseError on failure. Otherwise, use the errors object to report parse errors. - - The pyversion (major, minor) argument determines the Python syntax variant. """ raise_on_error = False if errors is None: @@ -76,14 +75,16 @@ def parse(source: Union[str, bytes], fnam: str = None, errors: Errors = None, errors.set_file('' if fnam is None else fnam, None) is_stub_file = bool(fnam) and fnam.endswith('.pyi') try: - assert pyversion[0] >= 3 or is_stub_file - feature_version = pyversion[1] if not is_stub_file else defaults.PYTHON3_VERSION[1] + if is_stub_file: + feature_version = defaults.PYTHON3_VERSION[1] + else: + assert options.python_version[0] >= 3 + feature_version = options.python_version[1] ast = ast3.parse(source, fnam, 'exec', feature_version=feature_version) - tree = ASTConverter(pyversion=pyversion, + tree = ASTConverter(options=options, is_stub=is_stub_file, errors=errors, - custom_typing_module=custom_typing_module, ).visit(ast) tree.path = fnam tree.is_stub = is_stub_file @@ -138,17 +139,15 @@ def is_no_type_check_decorator(expr: ast3.expr) -> bool: class ASTConverter(ast3.NodeTransformer): # type: ignore # typeshed PR #931 def __init__(self, - pyversion: Tuple[int, int], + options: Options, is_stub: bool, - errors: Errors, - custom_typing_module: str = None) -> None: + errors: Errors) -> None: self.class_nesting = 0 self.imports = [] # type: List[ImportBase] - self.pyversion = pyversion + self.options = options self.is_stub = is_stub self.errors = errors - self.custom_typing_module = custom_typing_module def fail(self, msg: str, line: int, column: int) -> None: self.errors.report(line, column, msg) @@ -262,9 +261,9 @@ def translate_module_id(self, id: str) -> str: For example, translate '__builtin__' in Python 2 to 'builtins'. """ - if id == self.custom_typing_module: + if id == self.options.custom_typing_module: return 'typing' - elif id == '__builtin__' and self.pyversion[0] == 2: + elif id == '__builtin__' and self.options.python_version[0] == 2: # HACK: __builtin__ in Python 2 is aliases to builtins. However, the implementation # is named __builtin__.py (there is another layer of translation elsewhere). return 'builtins' @@ -391,7 +390,7 @@ def do_func_def(self, n: Union[ast3.FunctionDef, ast3.AsyncFunctionDef], return func_def def set_type_optional(self, type: Type, initializer: Expression) -> None: - if not experiments.STRICT_OPTIONAL: + if self.options.no_implicit_optional or not experiments.STRICT_OPTIONAL: return # Indicate that type should be wrapped in an Optional if arg is initialized to None. optional = isinstance(initializer, NameExpr) and initializer.name == 'None' @@ -846,38 +845,48 @@ def visit_Num(self, n: ast3.Num) -> Union[IntExpr, FloatExpr, ComplexExpr]: # Str(string s) @with_line def visit_Str(self, n: ast3.Str) -> Union[UnicodeExpr, StrExpr]: - if self.pyversion[0] >= 3 or self.is_stub: - # Hack: assume all string literals in Python 2 stubs are normal - # strs (i.e. not unicode). All stubs are parsed with the Python 3 - # parser, which causes unprefixed string literals to be interpreted - # as unicode instead of bytes. This hack is generally okay, - # because mypy considers str literals to be compatible with - # unicode. - return StrExpr(n.s) - else: - return UnicodeExpr(n.s) + # Hack: assume all string literals in Python 2 stubs are normal + # strs (i.e. not unicode). All stubs are parsed with the Python 3 + # parser, which causes unprefixed string literals to be interpreted + # as unicode instead of bytes. This hack is generally okay, + # because mypy considers str literals to be compatible with + # unicode. + return StrExpr(n.s) # Only available with typed_ast >= 0.6.2 if hasattr(ast3, 'JoinedStr'): # JoinedStr(expr* values) @with_line def visit_JoinedStr(self, n: ast3.JoinedStr) -> Expression: - arg_count = len(n.values) - format_string = StrExpr('{}' * arg_count) - format_string.set_line(n.lineno, n.col_offset) - format_method = MemberExpr(format_string, 'format') - format_method.set_line(format_string) - format_args = self.translate_expr_list(n.values) - format_arg_kinds = [ARG_POS] * arg_count - result_expression = CallExpr(format_method, - format_args, - format_arg_kinds) + # Each of n.values is a str or FormattedValue; we just concatenate + # them all using ''.join. + empty_string = StrExpr('') + empty_string.set_line(n.lineno, n.col_offset) + strs_to_join = ListExpr(self.translate_expr_list(n.values)) + strs_to_join.set_line(empty_string) + join_method = MemberExpr(empty_string, 'join') + join_method.set_line(empty_string) + result_expression = CallExpr(join_method, + [strs_to_join], + [ARG_POS]) return result_expression # FormattedValue(expr value) @with_line def visit_FormattedValue(self, n: ast3.FormattedValue) -> Expression: - return self.visit(n.value) + # A FormattedValue is a component of a JoinedStr, or it can exist + # on its own. We translate them to individual '{}'.format(value) + # calls -- we don't bother with the conversion/format_spec fields. + exp = self.visit(n.value) + exp.set_line(n.lineno, n.col_offset) + format_string = StrExpr('{}') + format_string.set_line(n.lineno, n.col_offset) + format_method = MemberExpr(format_string, 'format') + format_method.set_line(format_string) + result_expression = CallExpr(format_method, + [exp], + [ARG_POS]) + return result_expression # Bytes(bytes s) @with_line @@ -885,11 +894,7 @@ def visit_Bytes(self, n: ast3.Bytes) -> Union[BytesExpr, StrExpr]: # The following line is a bit hacky, but is the best way to maintain # compatibility with how mypy currently parses the contents of bytes literals. contents = str(n.s)[2:-1] - - if self.pyversion[0] >= 3: - return BytesExpr(contents) - else: - return StrExpr(contents) + return BytesExpr(contents) # NameConstant(singleton value) def visit_NameConstant(self, n: ast3.NameConstant) -> NameExpr: diff --git a/mypy/fastparse2.py b/mypy/fastparse2.py index b7d5e9d400db..109dfe407cf2 100644 --- a/mypy/fastparse2.py +++ b/mypy/fastparse2.py @@ -38,11 +38,11 @@ from mypy.types import ( Type, CallableType, AnyType, UnboundType, EllipsisType ) -from mypy import defaults from mypy import experiments from mypy import messages from mypy.errors import Errors from mypy.fastparse import TypeConverter, parse_type_comment +from mypy.options import Options try: from typed_ast import ast27 @@ -74,14 +74,11 @@ def parse(source: Union[str, bytes], fnam: str = None, errors: Errors = None, - pyversion: Tuple[int, int] = defaults.PYTHON3_VERSION, - custom_typing_module: str = None) -> MypyFile: + options: Options = Options()) -> MypyFile: """Parse a source file, without doing any semantic analysis. Return the parse tree. If errors is not provided, raise ParseError on failure. Otherwise, use the errors object to report parse errors. - - The pyversion (major, minor) argument determines the Python syntax variant. """ raise_on_error = False if errors is None: @@ -90,12 +87,11 @@ def parse(source: Union[str, bytes], fnam: str = None, errors: Errors = None, errors.set_file('' if fnam is None else fnam, None) is_stub_file = bool(fnam) and fnam.endswith('.pyi') try: - assert pyversion[0] < 3 and not is_stub_file + assert options.python_version[0] < 3 and not is_stub_file ast = ast27.parse(source, fnam, 'exec') - tree = ASTConverter(pyversion=pyversion, + tree = ASTConverter(options=options, is_stub=is_stub_file, errors=errors, - custom_typing_module=custom_typing_module, ).visit(ast) assert isinstance(tree, MypyFile) tree.path = fnam @@ -137,17 +133,15 @@ def is_no_type_check_decorator(expr: ast27.expr) -> bool: class ASTConverter(ast27.NodeTransformer): def __init__(self, - pyversion: Tuple[int, int], + options: Options, is_stub: bool, - errors: Errors, - custom_typing_module: str = None) -> None: + errors: Errors) -> None: self.class_nesting = 0 self.imports = [] # type: List[ImportBase] - self.pyversion = pyversion + self.options = options self.is_stub = is_stub self.errors = errors - self.custom_typing_module = custom_typing_module def fail(self, msg: str, line: int, column: int) -> None: self.errors.report(line, column, msg) @@ -262,9 +256,9 @@ def translate_module_id(self, id: str) -> str: For example, translate '__builtin__' in Python 2 to 'builtins'. """ - if id == self.custom_typing_module: + if id == self.options.custom_typing_module: return 'typing' - elif id == '__builtin__' and self.pyversion[0] == 2: + elif id == '__builtin__': # HACK: __builtin__ in Python 2 is aliases to builtins. However, the implementation # is named __builtin__.py (there is another layer of translation elsewhere). return 'builtins' @@ -370,7 +364,7 @@ def visit_FunctionDef(self, n: ast27.FunctionDef) -> Statement: return func_def def set_type_optional(self, type: Type, initializer: Expression) -> None: - if not experiments.STRICT_OPTIONAL: + if self.options.no_implicit_optional or not experiments.STRICT_OPTIONAL: return # Indicate that type should be wrapped in an Optional if arg is initialized to None. optional = isinstance(initializer, NameExpr) and initializer.name == 'None' @@ -870,16 +864,9 @@ def visit_Str(self, s: ast27.Str) -> Expression: # The following line is a bit hacky, but is the best way to maintain # compatibility with how mypy currently parses the contents of bytes literals. contents = str(n)[2:-1] - - if self.pyversion[0] >= 3: - return BytesExpr(contents) - else: - return StrExpr(contents) + return StrExpr(contents) else: - if self.pyversion[0] >= 3 or self.is_stub: - return StrExpr(s.s) - else: - return UnicodeExpr(s.s) + return UnicodeExpr(s.s) # Ellipsis def visit_Ellipsis(self, n: ast27.Ellipsis) -> EllipsisExpr: diff --git a/mypy/main.py b/mypy/main.py index 8483d66587fe..422ca3ccec03 100644 --- a/mypy/main.py +++ b/mypy/main.py @@ -24,6 +24,10 @@ PY_EXTENSIONS = tuple(PYTHON_EXTENSIONS) +class InvalidPackageName(Exception): + """Exception indicating that a package name was invalid.""" + + def main(script_path: str, args: List[str] = None) -> None: """Main entry point to the type checker. @@ -270,6 +274,8 @@ def add_invertible_flag(flag: str, add_invertible_flag('--show-error-context', default=False, dest='show_error_context', help='Precede errors with "note:" messages explaining context') + add_invertible_flag('--no-implicit-optional', default=False, strict_flag=True, + help="don't assume arguments with default values of None are Optional") parser.add_argument('-i', '--incremental', action='store_true', help="enable module cache") parser.add_argument('--quick-and-dirty', action='store_true', @@ -480,9 +486,15 @@ def add_invertible_flag(flag: str, targets = [] for f in special_opts.files: if f.endswith(PY_EXTENSIONS): - targets.append(BuildSource(f, crawl_up(f)[1], None)) + try: + targets.append(BuildSource(f, crawl_up(f)[1], None)) + except InvalidPackageName as e: + fail(str(e)) elif os.path.isdir(f): - sub_targets = expand_dir(f) + try: + sub_targets = expand_dir(f) + except InvalidPackageName as e: + fail(str(e)) if not sub_targets: fail("There are no .py[i] files in directory '{}'" .format(f)) @@ -549,10 +561,14 @@ def crawl_up(arg: str) -> Tuple[str, str]: dir, base = os.path.split(dir) if not base: break + # Ensure that base is a valid python module name + if not base.isidentifier(): + raise InvalidPackageName('{} is not a valid Python package name'.format(base)) if mod == '__init__' or not mod: mod = base else: mod = base + '.' + mod + return dir, mod diff --git a/mypy/options.py b/mypy/options.py index b558470520f3..69f99cce9501 100644 --- a/mypy/options.py +++ b/mypy/options.py @@ -30,6 +30,7 @@ class Options: "warn_return_any", "ignore_errors", "strict_boolean", + "no_implicit_optional", } OPTIONS_AFFECTING_CACHE = PER_MODULE_OPTIONS | {"strict_optional", "quick_and_dirty"} @@ -94,6 +95,9 @@ def __init__(self) -> None: # Alternate way to show/hide strict-None-checking related errors self.show_none_errors = True + # Don't assume arguments with default values of None are Optional + self.no_implicit_optional = False + # Use script name instead of __main__ self.scripts_are_modules = False diff --git a/mypy/parse.py b/mypy/parse.py index 13fd58be3f60..2e02269f5e46 100644 --- a/mypy/parse.py +++ b/mypy/parse.py @@ -22,12 +22,10 @@ def parse(source: Union[str, bytes], return mypy.fastparse.parse(source, fnam=fnam, errors=errors, - pyversion=options.python_version, - custom_typing_module=options.custom_typing_module) + options=options) else: import mypy.fastparse2 return mypy.fastparse2.parse(source, fnam=fnam, errors=errors, - pyversion=options.python_version, - custom_typing_module=options.custom_typing_module) + options=options) diff --git a/mypy/report.py b/mypy/report.py index 061e07476b21..e53a68c569c1 100644 --- a/mypy/report.py +++ b/mypy/report.py @@ -396,7 +396,7 @@ def on_file(self, etree.SubElement(root, 'line', number=str(lineno), precision=stats.precision_names[status], - content=line_text[:-1]) + content=line_text.rstrip('\n')) # Assumes a layout similar to what XmlReporter uses. xslt_path = os.path.relpath('mypy-html.xslt', path) transform_pi = etree.ProcessingInstruction('xml-stylesheet', diff --git a/mypy/semanal.py b/mypy/semanal.py index af2bd63160e3..523edc8563e0 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -163,6 +163,21 @@ '_make', '_replace', '_asdict', '_source', '__annotations__') +# Map from the full name of a missing definition to the test fixture (under +# test-data/unit/fixtures/) that provides the definition. This is used for +# generating better error messages when running mypy tests only. +SUGGESTED_TEST_FIXTURES = { + 'typing.List': 'list.pyi', + 'typing.Dict': 'dict.pyi', + 'typing.Set': 'set.pyi', + 'builtins.bool': 'bool.pyi', + 'builtins.Exception': 'exception.pyi', + 'builtins.BaseException': 'exception.pyi', + 'builtins.isinstance': 'isinstancelist.pyi', + 'builtins.property': 'property.pyi', + 'builtins.classmethod': 'classmethod.pyi', +} + class SemanticAnalyzer(NodeVisitor): """Semantically analyze parsed mypy files. @@ -550,9 +565,10 @@ def analyze_property_with_multi_part_definition(self, defn: OverloadedFuncDef) - first_item.var.is_settable_property = True # Get abstractness from the original definition. item.func.is_abstract = first_item.func.is_abstract - item.func.accept(self) else: self.fail("Decorated property not supported", item) + if isinstance(item, Decorator): + item.func.accept(self) def analyze_function(self, defn: FuncItem) -> None: is_method = self.is_class_scope() @@ -1380,20 +1396,31 @@ def process_import_over_existing_name(self, def normalize_type_alias(self, node: SymbolTableNode, ctx: Context) -> SymbolTableNode: normalized = False - if node.fullname in type_aliases: + fullname = node.fullname + if fullname in type_aliases: # Node refers to an aliased type such as typing.List; normalize. - node = self.lookup_qualified(type_aliases[node.fullname], ctx) + node = self.lookup_qualified(type_aliases[fullname], ctx) + if node is None: + self.add_fixture_note(fullname, ctx) + return None normalized = True - if node.fullname in collections_type_aliases: + if fullname in collections_type_aliases: # Similar, but for types from the collections module like typing.DefaultDict self.add_module_symbol('collections', '__mypy_collections__', False, ctx) - node = self.lookup_qualified(collections_type_aliases[node.fullname], ctx) + node = self.lookup_qualified(collections_type_aliases[fullname], ctx) normalized = True if normalized: node = SymbolTableNode(node.kind, node.node, node.mod_id, node.type_override, normalized=True) return node + def add_fixture_note(self, fullname: str, ctx: Context) -> None: + self.note('Maybe your test fixture does not define "{}"?'.format(fullname), ctx) + if fullname in SUGGESTED_TEST_FIXTURES: + self.note( + 'Consider adding [builtins fixtures/{}] to your test description'.format( + SUGGESTED_TEST_FIXTURES[fullname]), ctx) + def correct_relative_import(self, node: Union[ImportFrom, ImportAll]) -> str: if node.relative == 0: return node.id @@ -1527,6 +1554,8 @@ def visit_assignment_stmt(self, s: AssignmentStmt) -> None: self.process_namedtuple_definition(s) self.process_typeddict_definition(s) self.process_enum_call(s) + if not s.type: + self.process_module_assignment(s.lvalues, s.rvalue, s) if (len(s.lvalues) == 1 and isinstance(s.lvalues[0], NameExpr) and s.lvalues[0].name == '__all__' and s.lvalues[0].kind == GDEF and @@ -1829,10 +1858,6 @@ def check_newtype_args(self, name: str, call: CallExpr, context: Context) -> Opt return None old_type = self.anal_type(unanalyzed_type) - if isinstance(old_type, Instance) and old_type.type.is_newtype: - self.fail("Argument 2 to NewType(...) cannot be another NewType", context) - has_failed = True - return None if has_failed else old_type def build_newtype_typeinfo(self, name: str, old_type: Type, base_type: Instance) -> TypeInfo: @@ -2377,6 +2402,66 @@ def is_classvar(self, typ: Type) -> bool: def fail_invalid_classvar(self, context: Context) -> None: self.fail('ClassVar can only be used for assignments in class body', context) + def process_module_assignment(self, lvals: List[Expression], rval: Expression, + ctx: AssignmentStmt) -> None: + """Propagate module references across assignments. + + Recursively handles the simple form of iterable unpacking; doesn't + handle advanced unpacking with *rest, dictionary unpacking, etc. + + In an expression like x = y = z, z is the rval and lvals will be [x, + y]. + + """ + if all(isinstance(v, (TupleExpr, ListExpr)) for v in lvals + [rval]): + # rval and all lvals are either list or tuple, so we are dealing + # with unpacking assignment like `x, y = a, b`. Mypy didn't + # understand our all(isinstance(...)), so cast them as + # Union[TupleExpr, ListExpr] so mypy knows it is safe to access + # their .items attribute. + seq_lvals = cast(List[Union[TupleExpr, ListExpr]], lvals) + seq_rval = cast(Union[TupleExpr, ListExpr], rval) + # given an assignment like: + # (x, y) = (m, n) = (a, b) + # we now have: + # seq_lvals = [(x, y), (m, n)] + # seq_rval = (a, b) + # We now zip this into: + # elementwise_assignments = [(a, x, m), (b, y, n)] + # where each elementwise assignment includes one element of rval and the + # corresponding element of each lval. Basically we unpack + # (x, y) = (m, n) = (a, b) + # into elementwise assignments + # x = m = a + # y = n = b + # and then we recursively call this method for each of those assignments. + # If the rval and all lvals are not all of the same length, zip will just ignore + # extra elements, so no error will be raised here; mypy will later complain + # about the length mismatch in type-checking. + elementwise_assignments = zip(seq_rval.items, *[v.items for v in seq_lvals]) + for rv, *lvs in elementwise_assignments: + self.process_module_assignment(lvs, rv, ctx) + elif isinstance(rval, NameExpr): + rnode = self.lookup(rval.name, ctx) + if rnode and rnode.kind == MODULE_REF: + for lval in lvals: + if not isinstance(lval, NameExpr): + continue + # respect explicitly annotated type + if (isinstance(lval.node, Var) and lval.node.type is not None): + continue + lnode = self.lookup(lval.name, ctx) + if lnode: + if lnode.kind == MODULE_REF and lnode.node is not rnode.node: + self.fail( + "Cannot assign multiple modules to name '{}' " + "without explicit 'types.ModuleType' annotation".format(lval.name), + ctx) + # never create module alias except on initial var definition + elif lval.is_def: + lnode.kind = MODULE_REF + lnode.node = rnode.node + def process_enum_call(self, s: AssignmentStmt) -> None: """Check if s defines an Enum; if yes, store the definition in symbol table.""" if len(s.lvalues) != 1 or not isinstance(s.lvalues[0], NameExpr): @@ -2948,21 +3033,34 @@ def visit_member_expr(self, expr: MemberExpr) -> None: if full_name in obsolete_name_mapping: self.fail("Module%s has no attribute %r (it's now called %r)" % ( mod_name, expr.name, obsolete_name_mapping[full_name]), expr) - elif isinstance(base, RefExpr) and isinstance(base.node, TypeInfo): - n = base.node.names.get(expr.name) - if n is not None and (n.kind == MODULE_REF or isinstance(n.node, TypeInfo)): - # This branch handles the case C.bar where C is a class and - # bar is a type definition or a module resulting from - # `import bar` inside class C. Here base.node is a TypeInfo, - # and again we look up the name in its namespace. - # This is done only when bar is a module or a type; other - # things (e.g. methods) are handled by other code in checkmember. - n = self.normalize_type_alias(n, expr) - if not n: - return - expr.kind = n.kind - expr.fullname = n.fullname - expr.node = n.node + elif isinstance(base, RefExpr): + # This branch handles the case C.bar (or cls.bar or self.bar inside + # a classmethod/method), where C is a class and bar is a type + # definition or a module resulting from `import bar` (or a module + # assignment) inside class C. We look up bar in the class' TypeInfo + # namespace. This is done only when bar is a module or a type; + # other things (e.g. methods) are handled by other code in + # checkmember. + type_info = None + if isinstance(base.node, TypeInfo): + # C.bar where C is a class + type_info = base.node + elif isinstance(base.node, Var) and self.type and self.function_stack: + # check for self.bar or cls.bar in method/classmethod + func_def = self.function_stack[-1] + if not func_def.is_static and isinstance(func_def.type, CallableType): + formal_arg = func_def.type.argument_by_name(base.node.name()) + if formal_arg and formal_arg.pos == 0: + type_info = self.type + if type_info: + n = type_info.names.get(expr.name) + if n is not None and (n.kind == MODULE_REF or isinstance(n.node, TypeInfo)): + n = self.normalize_type_alias(n, expr) + if not n: + return + expr.kind = n.kind + expr.fullname = n.fullname + expr.node = n.node def visit_op_expr(self, expr: OpExpr) -> None: expr.left.accept(self) @@ -3358,6 +3456,12 @@ def name_not_defined(self, name: str, ctx: Context) -> None: if extra: message += ' {}'.format(extra) self.fail(message, ctx) + if 'builtins.{}'.format(name) in SUGGESTED_TEST_FIXTURES: + # The user probably has a missing definition in a test fixture. Let's verify. + fullname = 'builtins.{}'.format(name) + if self.lookup_fully_qualified_or_none(fullname) is None: + # Yes. Generate a helpful note. + self.add_fixture_note(fullname, ctx) def name_already_defined(self, name: str, ctx: Context) -> None: self.fail("Name '{}' already defined".format(name), ctx) diff --git a/mypy/subtypes.py b/mypy/subtypes.py index 8ca6421a0a91..b03843fba9a4 100644 --- a/mypy/subtypes.py +++ b/mypy/subtypes.py @@ -69,11 +69,6 @@ def is_subtype(left: Type, right: Type, elif is_subtype_of_item: return True # otherwise, fall through - # Treat builtins.type the same as Type[Any] - elif is_named_instance(left, 'builtins.type'): - return is_subtype(TypeType(AnyType()), right) - elif is_named_instance(right, 'builtins.type'): - return is_subtype(left, TypeType(AnyType())) return left.accept(SubtypeVisitor(right, type_parameter_checker, ignore_pos_arg_names=ignore_pos_arg_names)) @@ -158,16 +153,18 @@ def visit_instance(self, left: Instance) -> bool: item = right.item if isinstance(item, TupleType): item = item.fallback - if isinstance(item, Instance): - return is_subtype(left, item.type.metaclass_type) - elif isinstance(item, AnyType): - # Special case: all metaclasses are subtypes of Type[Any] - mro = left.type.mro or [] - return any(base.fullname() == 'builtins.type' for base in mro) - else: - return False - else: - return False + if is_named_instance(left, 'builtins.type'): + return is_subtype(TypeType(AnyType()), right) + if left.type.is_metaclass(): + if isinstance(item, AnyType): + return True + if isinstance(item, Instance): + # Special-case enum since we don't have better way of expressing it + if (is_named_instance(left, 'enum.EnumMeta') + and is_named_instance(item, 'enum.Enum')): + return True + return is_named_instance(item, 'builtins.object') + return False def visit_type_var(self, left: TypeVarType) -> bool: right = self.right @@ -263,8 +260,8 @@ def visit_overloaded(self, left: Overloaded) -> bool: elif isinstance(right, TypeType): # All the items must have the same type object status, so # it's sufficient to query only (any) one of them. - # This is unsound, we don't check the __init__ signature. - return left.is_type_obj() and is_subtype(left.items()[0].ret_type, right.item) + # This is unsound, we don't check all the __init__ signatures. + return left.is_type_obj() and is_subtype(left.items()[0], right) else: return False @@ -284,11 +281,14 @@ def visit_type_type(self, left: TypeType) -> bool: # This is unsound, we don't check the __init__ signature. return is_subtype(left.item, right.ret_type) if isinstance(right, Instance): - if right.type.fullname() == 'builtins.object': - # treat builtins.object the same as Any. + if right.type.fullname() in ['builtins.object', 'builtins.type']: return True item = left.item - return isinstance(item, Instance) and is_subtype(item, right.type.metaclass_type) + if isinstance(item, TypeVarType): + item = item.upper_bound + if isinstance(item, Instance): + metaclass = item.type.metaclass_type + return metaclass is not None and is_subtype(metaclass, right) return False diff --git a/mypy/test/data.py b/mypy/test/data.py index b72d9c5ea5ac..ccee92eac276 100644 --- a/mypy/test/data.py +++ b/mypy/test/data.py @@ -54,6 +54,7 @@ def parse_test_cases( output_files = [] # type: List[Tuple[str, str]] # path and contents for output files tcout = [] # type: List[str] # Regular output errors tcout2 = {} # type: Dict[int, List[str]] # Output errors for incremental, runs 2+ + deleted_paths = {} # type: Dict[int, Set[str]] # from run number of paths stale_modules = {} # type: Dict[int, Set[str]] # from run number to module names rechecked_modules = {} # type: Dict[ int, Set[str]] # from run number module names while i < len(p) and p[i].id != 'case': @@ -67,7 +68,7 @@ def parse_test_cases( elif p[i].id == 'outfile': output_files.append(file_entry) elif p[i].id in ('builtins', 'builtins_py2'): - # Use a custom source file for the std module. + # Use an alternative stub file for the builtins module. arg = p[i].arg assert arg is not None mpath = join(os.path.dirname(path), arg) @@ -78,6 +79,13 @@ def parse_test_cases( fnam = '__builtin__.pyi' with open(mpath) as f: files.append((join(base_path, fnam), f.read())) + elif p[i].id == 'typing': + # Use an alternative stub file for the typing module. + arg = p[i].arg + assert arg is not None + src_path = join(os.path.dirname(path), arg) + with open(src_path) as f: + files.append((join(base_path, 'typing.pyi'), f.read())) elif re.match(r'stale[0-9]*$', p[i].id): if p[i].id == 'stale': passnum = 1 @@ -99,6 +107,16 @@ def parse_test_cases( rechecked_modules[passnum] = set() else: rechecked_modules[passnum] = {item.strip() for item in arg.split(',')} + elif p[i].id == 'delete': + # File to delete during a multi-step test case + arg = p[i].arg + assert arg is not None + m = re.match(r'(.*)\.([0-9]+)$', arg) + assert m, 'Invalid delete section: {}'.format(arg) + num = int(m.group(2)) + assert num >= 2, "Can't delete during step {}".format(num) + full = join(base_path, m.group(1)) + deleted_paths.setdefault(num, set()).add(full) elif p[i].id == 'out' or p[i].id == 'out1': tcout = p[i].data if native_sep and os.path.sep == '\\': @@ -142,7 +160,7 @@ def parse_test_cases( tc = DataDrivenTestCase(p[i0].arg, input, tcout, tcout2, path, p[i0].line, lastline, perform, files, output_files, stale_modules, - rechecked_modules, native_sep) + rechecked_modules, deleted_paths, native_sep) out.append(tc) if not ok: raise ValueError( @@ -180,6 +198,7 @@ def __init__(self, output_files: List[Tuple[str, str]], expected_stale_modules: Dict[int, Set[str]], expected_rechecked_modules: Dict[int, Set[str]], + deleted_paths: Dict[int, Set[str]], native_sep: bool = False, ) -> None: super().__init__(name) @@ -194,24 +213,30 @@ def __init__(self, self.output_files = output_files self.expected_stale_modules = expected_stale_modules self.expected_rechecked_modules = expected_rechecked_modules + self.deleted_paths = deleted_paths self.native_sep = native_sep def set_up(self) -> None: super().set_up() encountered_files = set() self.clean_up = [] + all_deleted = [] # type: List[str] + for paths in self.deleted_paths.values(): + all_deleted += paths for path, content in self.files: dir = os.path.dirname(path) for d in self.add_dirs(dir): self.clean_up.append((True, d)) with open(path, 'w') as f: f.write(content) - self.clean_up.append((False, path)) + if path not in all_deleted: + # TODO: Don't assume that deleted files don't get reintroduced. + self.clean_up.append((False, path)) encountered_files.add(path) if re.search(r'\.[2-9]$', path): # Make sure new files introduced in the second and later runs are accounted for renamed_path = path[:-2] - if renamed_path not in encountered_files: + if renamed_path not in encountered_files and renamed_path not in all_deleted: encountered_files.add(renamed_path) self.clean_up.append((False, renamed_path)) for path, _ in self.output_files: diff --git a/mypy/test/helpers.py b/mypy/test/helpers.py index 32d889bd20cf..2f28ab1e47a5 100644 --- a/mypy/test/helpers.py +++ b/mypy/test/helpers.py @@ -1,8 +1,9 @@ -import sys -import re import os +import re +import sys +import time -from typing import List, Dict, Tuple +from typing import List, Dict, Tuple, Callable, Any from mypy import defaults from mypy.myunit import AssertionFailure @@ -283,3 +284,26 @@ def normalize_error_messages(messages: List[str]) -> List[str]: for m in messages: a.append(m.replace(os.sep, '/')) return a + + +def retry_on_error(func: Callable[[], Any], max_wait: float = 1.0) -> None: + """Retry callback with exponential backoff when it raises OSError. + + If the function still generates an error after max_wait seconds, propagate + the exception. + + This can be effective against random file system operation failures on + Windows. + """ + t0 = time.time() + wait_time = 0.01 + while True: + try: + func() + return + except OSError: + wait_time = min(wait_time * 2, t0 + max_wait - time.time()) + if wait_time <= 0.01: + # Done enough waiting, the error seems persistent. + raise + time.sleep(wait_time) diff --git a/mypy/test/testcheck.py b/mypy/test/testcheck.py index df28afcaff85..91a818ac0f01 100644 --- a/mypy/test/testcheck.py +++ b/mypy/test/testcheck.py @@ -17,7 +17,7 @@ from mypy.test.data import parse_test_cases, DataDrivenTestCase, DataSuite from mypy.test.helpers import ( assert_string_arrays_equal, normalize_error_messages, - testcase_pyversion, update_testcase_output, + retry_on_error, testcase_pyversion, update_testcase_output, ) from mypy.errors import CompileError from mypy.options import Options @@ -75,6 +75,7 @@ 'check-underscores.test', 'check-classvar.test', 'check-enum.test', + 'check-incomplete-fixture.test', ] @@ -147,13 +148,18 @@ def run_case_once(self, testcase: DataDrivenTestCase, incremental_step: int = 0) if file.endswith('.' + str(incremental_step)): full = os.path.join(dn, file) target = full[:-2] - shutil.copy(full, target) + # Use retries to work around potential flakiness on Windows (AppVeyor). + retry_on_error(lambda: shutil.copy(full, target)) # In some systems, mtime has a resolution of 1 second which can cause # annoying-to-debug issues when a file has the same size after a # change. We manually set the mtime to circumvent this. new_time = os.stat(target).st_mtime + 1 os.utime(target, times=(new_time, new_time)) + # Delete files scheduled to be deleted in [delete .num] sections. + for path in testcase.deleted_paths.get(incremental_step, set()): + # Use retries to work around potential flakiness on Windows (AppVeyor). + retry_on_error(lambda: os.remove(path)) # Parse options after moving files (in case mypy.ini is being moved). options = self.parse_options(original_program_text, testcase, incremental_step) diff --git a/mypy/test/testpythoneval.py b/mypy/test/testpythoneval.py index 85e9aa3751d7..602692e073e4 100644 --- a/mypy/test/testpythoneval.py +++ b/mypy/test/testpythoneval.py @@ -84,14 +84,8 @@ def test_python_evaluation(testcase: DataDrivenTestCase) -> None: # This uses the same PYTHONPATH as the current process. returncode, out = run(mypy_cmdline) if returncode == 0: - # Set up module path for the execution. - # This needs the typing module but *not* the mypy module. - vers_dir = '2.7' if py2 else '3.2' - typing_path = os.path.join(testcase.old_cwd, 'lib-typing', vers_dir) - assert os.path.isdir(typing_path) - env = os.environ.copy() - env['PYTHONPATH'] = typing_path - returncode, interp_out = run([interpreter, program], env=env) + # Execute the program. + returncode, interp_out = run([interpreter, program]) out += interp_out # Remove temp file. os.remove(program_path) diff --git a/mypy/waiter.py b/mypy/waiter.py index 0f1759fefab8..e8ba99d4efd8 100644 --- a/mypy/waiter.py +++ b/mypy/waiter.py @@ -9,7 +9,7 @@ from multiprocessing import cpu_count import pipes import re -from subprocess import Popen, STDOUT +from subprocess import Popen, STDOUT, DEVNULL import sys import tempfile import time @@ -25,16 +25,22 @@ class LazySubprocess: """Wrapper around a subprocess that runs a test task.""" def __init__(self, name: str, args: List[str], *, cwd: str = None, - env: Dict[str, str] = None) -> None: + env: Dict[str, str] = None, passthrough: Optional[int] = None) -> None: self.name = name self.args = args self.cwd = cwd self.env = env self.start_time = None # type: float self.end_time = None # type: float + # None means no passthrough + # otherwise, it represents verbosity level + self.passthrough = passthrough def start(self) -> None: - self.outfile = tempfile.TemporaryFile() + if self.passthrough is None or self.passthrough < 0: + self.outfile = tempfile.TemporaryFile() + else: + self.outfile = None self.start_time = time.perf_counter() self.process = Popen(self.args, cwd=self.cwd, env=self.env, stdout=self.outfile, stderr=STDOUT) @@ -47,6 +53,8 @@ def status(self) -> Optional[int]: return self.process.returncode def read_output(self) -> str: + if not self.outfile: + return '' file = self.outfile file.seek(0) # Assume it's ascii to avoid unicode headaches (and portability issues). diff --git a/runtests.py b/runtests.py index 83a6ffa0d3da..634b4ce83f2e 100755 --- a/runtests.py +++ b/runtests.py @@ -1,29 +1,6 @@ #!/usr/bin/env python3 """Mypy test runner.""" -if False: - import typing - -if True: - # When this is run as a script, `typing` is not available yet. - import sys - from os.path import join, isdir - - def get_versions(): # type: () -> typing.List[str] - major = sys.version_info[0] - minor = sys.version_info[1] - if major == 2: - return ['2.7'] - else: - # generates list of python versions to use. - # For Python2, this is only [2.7]. - # Otherwise, it is [3.4, 3.3, 3.2, 3.1, 3.0]. - return ['%d.%d' % (major, i) for i in range(minor, -1, -1)] - - sys.path[0:0] = [v for v in [join('lib-typing', v) for v in get_versions()] if isdir(v)] - # Now `typing` is available. - - from typing import Dict, List, Optional, Set, Iterable from mypy.waiter import Waiter, LazySubprocess @@ -33,8 +10,21 @@ def get_versions(): # type: () -> typing.List[str] import itertools import os +from os.path import join, isdir import re -import json +import sys + + +def get_versions(): # type: () -> List[str] + major = sys.version_info[0] + minor = sys.version_info[1] + if major == 2: + return ['2.7'] + else: + # generates list of python versions to use. + # For Python2, this is only [2.7]. + # Otherwise, it is [3.4, 3.3, 3.2, 3.1, 3.0]. + return ['%d.%d' % (major, i) for i in range(minor, -1, -1)] # Ideally, all tests would be `discover`able so that they can be driven @@ -111,7 +101,8 @@ def add_pytest(self, name: str, pytest_args: List[str], coverage: bool = False) else: args = [sys.executable, '-m', 'pytest'] + pytest_args - self.waiter.add(LazySubprocess(full_name, args, env=self.env), sequential=True) + self.waiter.add(LazySubprocess(full_name, args, env=self.env, passthrough=self.verbosity), + sequential=True) def add_python(self, name: str, *args: str, cwd: Optional[str] = None) -> None: name = 'run %s' % name @@ -421,6 +412,13 @@ def main() -> None: pyt_arglist.append('--lf') if ff: pyt_arglist.append('--ff') + if verbosity >= 1: + pyt_arglist.extend(['-v'] * verbosity) + elif verbosity < 0: + pyt_arglist.extend(['-q'] * (-verbosity)) + if parallel_limit: + if '-n' not in pyt_arglist: + pyt_arglist.append('-n{}'.format(parallel_limit)) driver = Driver(whitelist=whitelist, blacklist=blacklist, lf=lf, ff=ff, arglist=arglist, pyt_arglist=pyt_arglist, verbosity=verbosity, @@ -429,7 +427,6 @@ def main() -> None: driver.prepend_path('PATH', [join(driver.cwd, 'scripts')]) driver.prepend_path('MYPYPATH', [driver.cwd]) driver.prepend_path('PYTHONPATH', [driver.cwd]) - driver.prepend_path('PYTHONPATH', [join(driver.cwd, 'lib-typing', v) for v in driver.versions]) driver.add_flake8() add_pytest(driver) diff --git a/scripts/stubgen b/scripts/stubgen old mode 100755 new mode 100644 diff --git a/setup.cfg b/setup.cfg index 0ec9131aa413..27244e880337 100644 --- a/setup.cfg +++ b/setup.cfg @@ -13,8 +13,6 @@ exclude = .cache, # Sphinx configuration is irrelevant docs/source/conf.py, - # external library with incompatible style - lib-typing/*, # conflicting styles misc/*, # external library with incompatible style diff --git a/setup.py b/setup.py index 78c6a639ad0a..efc23f4a6fe4 100644 --- a/setup.py +++ b/setup.py @@ -94,9 +94,6 @@ def run(self): package_dir = {'mypy': 'mypy'} -scripts = ['scripts/mypy', 'scripts/stubgen'] -if os.name == 'nt': - scripts.append('scripts/mypy.bat') # These requirements are used when installing by other means than bdist_wheel. # E.g. "pip3 install ." or @@ -119,7 +116,8 @@ def run(self): package_dir=package_dir, py_modules=[], packages=['mypy'], - scripts=scripts, + entry_points={'console_scripts': ['mypy=mypy.__main__:console_entry', + 'stubgen=mypy.stubgen:main']}, data_files=data_files, classifiers=classifiers, cmdclass={'build_py': CustomPythonBuild}, diff --git a/test-data/stdlib-samples/3.2/test/test_genericpath.py b/test-data/stdlib-samples/3.2/test/test_genericpath.py index 43b78e77db61..df0e10701d39 100644 --- a/test-data/stdlib-samples/3.2/test/test_genericpath.py +++ b/test-data/stdlib-samples/3.2/test/test_genericpath.py @@ -23,7 +23,7 @@ def safe_rmdir(dirname: str) -> None: class GenericTest(unittest.TestCase): # The path module to be tested - pathmodule = genericpath # type: Any + pathmodule = genericpath # type: Any common_attributes = ['commonprefix', 'getsize', 'getatime', 'getctime', 'getmtime', 'exists', 'isdir', 'isfile'] attributes = [] # type: List[str] diff --git a/test-data/unit/README.md b/test-data/unit/README.md index 64737e8ca3b4..693e7f4d8719 100644 --- a/test-data/unit/README.md +++ b/test-data/unit/README.md @@ -61,9 +61,12 @@ Where the stubs for builtins come from for a given test: - The builtins used by default in unit tests live in `test-data/unit/lib-stub`. -- Individual test cases can override the stubs by using `[builtins fixtures/foo.pyi]`; - this targets files in `test-data/unit/fixtures`. Feel free to modify existing files - there or create new ones as you deem fit. +- Individual test cases can override the builtins stubs by using + `[builtins fixtures/foo.pyi]`; this targets files in `test-data/unit/fixtures`. + Feel free to modify existing files there or create new ones as you deem fit. + +- Test cases can also use `[typing fixtures/typing-full.pyi]` to use a more + complete stub for `typing` that contains the async types, among other things. - Feel free to add additional stubs to that `fixtures` directory, but generally don't expand files in `lib-stub` without first discussing the @@ -78,6 +81,11 @@ First install any additional dependencies needed for testing: $ python3 -m pip install -U -r test-requirements.txt +You must also have a Python 2.7 binary installed that can import the `typing` +module: + + $ python2 -m pip install -U typing + To run all tests, run the script `runtests.py` in the mypy repository: $ ./runtests.py @@ -110,13 +118,14 @@ finer control over which unit tests are run and how, you can run `py.test` or $ ./runtests.py mypy.test.testlex -a -v -a '*backslash*' You can also run the type checker for manual testing without -installing anything by setting up the Python module search path -suitably (the lib-typing/3.2 path entry is not needed for Python 3.5 -or when you have manually installed the `typing` module): +installing it by setting up the Python module search path suitably: - $ export PYTHONPATH=$PWD:$PWD/lib-typing/3.2 + $ export PYTHONPATH=$PWD $ python -m mypy PROGRAM.py +You will have to manually install the `typing` module if you're running Python +3.4 or earlier. + You can add the entry scripts to PATH for a single python3 version: $ export PATH=$PWD/scripts diff --git a/test-data/unit/check-async-await.test b/test-data/unit/check-async-await.test index 672bf2b408b8..f8ac01d8c830 100644 --- a/test-data/unit/check-async-await.test +++ b/test-data/unit/check-async-await.test @@ -6,6 +6,7 @@ async def f() -> int: pass [builtins fixtures/async_await.pyi] +[typing fixtures/typing-full.pyi] [case testAsyncDefReturn] @@ -13,12 +14,14 @@ async def f() -> int: return 0 reveal_type(f()) # E: Revealed type is 'typing.Awaitable[builtins.int]' [builtins fixtures/async_await.pyi] +[typing fixtures/typing-full.pyi] [case testAsyncDefMissingReturn] # flags: --warn-no-return async def f() -> int: make_this_not_trivial = 1 [builtins fixtures/async_await.pyi] +[typing fixtures/typing-full.pyi] [out] main:2: error: Missing return statement @@ -28,6 +31,7 @@ async def f() -> int: make_this_not_trivial = 1 return [builtins fixtures/async_await.pyi] +[typing fixtures/typing-full.pyi] [out] main:4: error: Return value expected @@ -38,6 +42,7 @@ async def f() -> int: reveal_type(x) # E: Revealed type is 'builtins.int*' return x [builtins fixtures/async_await.pyi] +[typing fixtures/typing-full.pyi] [out] [case testAwaitDefaultContext] @@ -48,6 +53,7 @@ async def f(x: T) -> T: y = await f(x) reveal_type(y) return y +[typing fixtures/typing-full.pyi] [out] main:6: error: Revealed type is 'T`-1' @@ -59,6 +65,7 @@ async def f(x: T) -> T: y = await f(x) # type: Any reveal_type(y) return y +[typing fixtures/typing-full.pyi] [out] main:6: error: Revealed type is 'Any' @@ -70,6 +77,7 @@ async def f(x: T) -> T: y = await f(x) # type: int reveal_type(y) return x +[typing fixtures/typing-full.pyi] [out] main:5: error: Argument 1 to "f" has incompatible type "T"; expected "int" main:6: error: Revealed type is 'builtins.int' @@ -83,6 +91,7 @@ def g() -> Generator[int, None, str]: async def f() -> int: x = await g() return x +[typing fixtures/typing-full.pyi] [out] main:7: error: Incompatible types in await (actual type Generator[int, None, str], expected type Awaitable[Any]) @@ -94,6 +103,7 @@ def g() -> Iterator[Any]: async def f() -> int: x = await g() return x +[typing fixtures/typing-full.pyi] [out] main:6: error: Incompatible types in await (actual type Iterator[Any], expected type Awaitable[Any]) @@ -105,6 +115,7 @@ async def f() -> int: x = await g() return x [builtins fixtures/async_await.pyi] +[typing fixtures/typing-full.pyi] [out] main:5: error: Incompatible types in await (actual type "int", expected type Awaitable[Any]) @@ -116,6 +127,7 @@ async def f() -> str: x = await g() # type: str return x [builtins fixtures/async_await.pyi] +[typing fixtures/typing-full.pyi] [out] main:5: error: Incompatible types in assignment (expression has type "int", variable has type "str") @@ -127,6 +139,7 @@ async def f() -> str: x = await g() return x [builtins fixtures/async_await.pyi] +[typing fixtures/typing-full.pyi] [out] main:6: error: Incompatible return value type (got "int", expected "str") @@ -139,7 +152,7 @@ async def f() -> None: async for x in C(): reveal_type(x) # E: Revealed type is 'builtins.int*' [builtins fixtures/async_await.pyi] -[out] +[typing fixtures/typing-full.pyi] [case testAsyncForError] @@ -148,6 +161,7 @@ async def f() -> None: async for x in [1]: pass [builtins fixtures/async_await.pyi] +[typing fixtures/typing-full.pyi] [out] main:4: error: AsyncIterable expected main:4: error: List[int] has no attribute "__aiter__" @@ -167,6 +181,7 @@ async def f() -> None: async for z in C(): # type: Union[int, str] reveal_type(z) # E: Revealed type is 'Union[builtins.int, builtins.str]' [builtins fixtures/async_await.pyi] +[typing fixtures/typing-full.pyi] [case testAsyncForComprehension] # flags: --fast-parser --python-version 3.6 @@ -206,6 +221,7 @@ async def generatorexp(obj: Iterable[int]): reveal_type(lst2) # E: Revealed type is 'typing.AsyncIterator[builtins.int*]' [builtins fixtures/async_await.pyi] +[typing fixtures/typing-full.pyi] [case testAsyncForComprehensionErrors] # flags: --fast-parser --python-version 3.6 @@ -240,6 +256,7 @@ main:20: error: Iterable[int] has no attribute "__aiter__"; maybe "__iter__"? main:21: error: Iterable expected main:21: error: asyncify[int] has no attribute "__iter__"; maybe "__aiter__"? [builtins fixtures/async_await.pyi] +[typing fixtures/typing-full.pyi] [case testAsyncWith] @@ -250,6 +267,7 @@ async def f() -> None: async with C() as x: reveal_type(x) # E: Revealed type is 'builtins.int*' [builtins fixtures/async_await.pyi] +[typing fixtures/typing-full.pyi] [case testAsyncWithError] @@ -261,6 +279,7 @@ async def f() -> None: async with C() as x: pass [builtins fixtures/async_await.pyi] +[typing fixtures/typing-full.pyi] [out] main:6: error: "C" has no attribute "__aenter__"; maybe "__enter__"? main:6: error: "C" has no attribute "__aexit__"; maybe "__exit__"? @@ -274,7 +293,7 @@ async def f() -> None: async with C() as x: # E: Incompatible types in "async with" for __aenter__ (actual type "int", expected type Awaitable[Any]) pass [builtins fixtures/async_await.pyi] -[out] +[typing fixtures/typing-full.pyi] [case testAsyncWithErrorBadAenter2] @@ -285,7 +304,7 @@ async def f() -> None: async with C() as x: # E: None has no attribute "__await__" pass [builtins fixtures/async_await.pyi] -[out] +[typing fixtures/typing-full.pyi] [case testAsyncWithErrorBadAexit] @@ -296,7 +315,7 @@ async def f() -> None: async with C() as x: # E: Incompatible types in "async with" for __aexit__ (actual type "int", expected type Awaitable[Any]) pass [builtins fixtures/async_await.pyi] -[out] +[typing fixtures/typing-full.pyi] [case testAsyncWithErrorBadAexit2] @@ -307,7 +326,7 @@ async def f() -> None: async with C() as x: # E: None has no attribute "__await__" pass [builtins fixtures/async_await.pyi] -[out] +[typing fixtures/typing-full.pyi] [case testAsyncWithTypeComments] @@ -324,6 +343,7 @@ async def f() -> None: async with C() as a: # type: int, int # E: Invalid tuple literal type pass [builtins fixtures/async_await.pyi] +[typing fixtures/typing-full.pyi] [case testNoYieldInAsyncDef] # flags: --python-version 3.5 @@ -361,6 +381,7 @@ def g() -> Generator[Any, None, str]: x = yield from f() return x [builtins fixtures/async_await.pyi] +[typing fixtures/typing-full.pyi] [out] main:6: error: "yield from" can't be applied to Awaitable[str] @@ -389,7 +410,7 @@ async def main() -> None: async for z in I(): reveal_type(z) # E: Revealed type is 'builtins.int' [builtins fixtures/async_await.pyi] -[out] +[typing fixtures/typing-full.pyi] [case testYieldTypeCheckInDecoratedCoroutine] @@ -405,7 +426,7 @@ def f() -> Generator[int, str, int]: else: return '' # E: Incompatible return value type (got "str", expected "int") [builtins fixtures/async_await.pyi] -[out] +[typing fixtures/typing-full.pyi] -- Async generators (PEP 525), some test cases adapted from the PEP text -- --------------------------------------------------------------------- @@ -436,6 +457,7 @@ async def wrong_return() -> Generator[int, None, None]: # E: The return type of yield 3 [builtins fixtures/dict.pyi] +[typing fixtures/typing-full.pyi] [case testAsyncGeneratorReturnIterator] # flags: --python-version 3.6 @@ -451,6 +473,7 @@ async def use_gen() -> None: reveal_type(item) # E: Revealed type is 'builtins.int*' [builtins fixtures/dict.pyi] +[typing fixtures/typing-full.pyi] [case testAsyncGeneratorManualIter] # flags: --python-version 3.6 @@ -468,6 +491,7 @@ async def user() -> None: reveal_type(await gen.__anext__()) # E: Revealed type is 'builtins.int*' [builtins fixtures/dict.pyi] +[typing fixtures/typing-full.pyi] [case testAsyncGeneratorAsend] # flags: --fast-parser --python-version 3.6 @@ -488,6 +512,7 @@ async def h() -> None: reveal_type(await g.asend('hello')) # E: Revealed type is 'builtins.int*' [builtins fixtures/dict.pyi] +[typing fixtures/typing-full.pyi] [case testAsyncGeneratorAthrow] # flags: --fast-parser --python-version 3.6 @@ -506,6 +531,7 @@ async def h() -> None: reveal_type(await g.athrow(BaseException)) # E: Revealed type is 'builtins.str*' [builtins fixtures/dict.pyi] +[typing fixtures/typing-full.pyi] [case testAsyncGeneratorNoSyncIteration] # flags: --fast-parser --python-version 3.6 @@ -520,6 +546,7 @@ def h() -> None: pass [builtins fixtures/dict.pyi] +[typing fixtures/typing-full.pyi] [out] main:9: error: Iterable expected @@ -536,6 +563,7 @@ async def gen() -> AsyncGenerator[int, None]: yield from f() # E: 'yield from' in async function [builtins fixtures/dict.pyi] +[typing fixtures/typing-full.pyi] [case testAsyncGeneratorNoReturnWithValue] # flags: --fast-parser --python-version 3.6 @@ -557,6 +585,7 @@ async def return_f() -> AsyncGenerator[int, None]: return f() # E: 'return' with value in async generator is not allowed [builtins fixtures/dict.pyi] +[typing fixtures/typing-full.pyi] -- The full matrix of coroutine compatibility -- ------------------------------------------ @@ -644,4 +673,5 @@ async def decorated_host_coroutine() -> None: x = await other_coroutine() [builtins fixtures/async_await.pyi] +[typing fixtures/typing-full.pyi] [out] diff --git a/test-data/unit/check-class-namedtuple.test b/test-data/unit/check-class-namedtuple.test index 085fb3f4f60e..0447edb2df67 100644 --- a/test-data/unit/check-class-namedtuple.test +++ b/test-data/unit/check-class-namedtuple.test @@ -507,6 +507,7 @@ reveal_type(XMeth(1).asyncdouble()) # E: Revealed type is 'typing.Awaitable[bui reveal_type(XMeth(42).x) # E: Revealed type is 'builtins.int' reveal_type(XRepr(42).__str__()) # E: Revealed type is 'builtins.str' reveal_type(XRepr(1, 2).__add__(XRepr(3))) # E: Revealed type is 'builtins.int' +[typing fixtures/typing-full.pyi] [case testNewNamedTupleOverloading] from typing import NamedTuple, overload diff --git a/test-data/unit/check-classes.test b/test-data/unit/check-classes.test index 9c88741be78f..666477941eb5 100644 --- a/test-data/unit/check-classes.test +++ b/test-data/unit/check-classes.test @@ -1651,7 +1651,6 @@ b = a.bar [out] main:9: error: Incompatible types in assignment (expression has type "A", variable has type "B") - [case testGetAttrSignature] class A: def __getattr__(self, x: str) -> A: pass @@ -1665,6 +1664,86 @@ class D: main:4: error: Invalid signature "def (__main__.B, __main__.A) -> __main__.B" main:6: error: Invalid signature "def (__main__.C, builtins.str, builtins.str) -> __main__.C" +[case testSetAttr] +from typing import Union, Any +class A: + def __setattr__(self, name: str, value: Any) -> None: ... + +a = A() +a.test = 'hello' + +class B: + def __setattr__(self, name: str, value: Union[int, str]) -> None: ... + +b = B() +b.both = 1 +b.work = '2' + +class C: + def __setattr__(self, name: str, value: str) -> None: ... + +c = C() +c.fail = 4 # E: Incompatible types in assignment (expression has type "int", variable has type "str") + +class D: + __setattr__ = 'hello' + +d = D() +d.crash = 4 # E: "D" has no attribute "crash" + +class Ex: + def __setattr__(self, name: str, value: int) -> None:... + test = '42' # type: str +e = Ex() +e.test = 'hello' +e.t = 4 + +class Super: + def __setattr__(self, name: str, value: int) -> None: ... + +class Sub(Super): + ... +s = Sub() +s.success = 4 +s.fail = 'fail' # E: Incompatible types in assignment (expression has type "str", variable has type "int") + +[case testSetAttrSignature] +class Test: + def __setattr__() -> None: ... # E: Method must have at least one argument # E: Invalid signature "def ()" +t = Test() +t.crash = 'test' # E: "Test" has no attribute "crash" + +class A: + def __setattr__(self): ... # E: Invalid signature "def (self: Any) -> Any" +a = A() +a.test = 4 # E: "A" has no attribute "test" + +class B: + def __setattr__(self, name, value: int): ... +b = B() +b.integer = 5 + +class C: + def __setattr__(self, name: int, value: int) -> None: ... # E: Invalid signature "def (__main__.C, builtins.int, builtins.int)" +c = C() +c.check = 13 + +[case testGetAttrAndSetattr] +from typing import Any +class A: + def __setattr__(self, name: str, value: Any) -> None: ... + def __getattr__(self, name: str) -> Any: ... +a = A() +a.test = 4 +t = a.test + +class B: + def __setattr__(self, name: str, value: int) -> None: ... + def __getattr__(self, name: str) -> str: ... +integer = 0 +b = B() +b.at = '3' # E: Incompatible types in assignment (expression has type "str", variable has type "int") +integer = b.at # E: Incompatible types in assignment (expression has type "str", variable has type "int") -- CallableType objects -- ---------------- @@ -3066,11 +3145,11 @@ class A(metaclass=M): pass reveal_type(A[M]) # E: Revealed type is 'builtins.int' -[case testMetaclassSelftype] +[case testMetaclassSelfType] from typing import TypeVar, Type class M(type): pass -T = TypeVar('T', bound='A') +T = TypeVar('T') class M1(M): def foo(cls: Type[T]) -> T: ... @@ -3136,6 +3215,80 @@ class M(type): class A(metaclass=M): pass reveal_type(type(A).x) # E: Revealed type is 'builtins.int' +[case testMetaclassStrictSupertypeOfTypeWithClassmethods] +from typing import Type, TypeVar +TA = TypeVar('TA', bound='A') +TTA = TypeVar('TTA', bound='Type[A]') +TM = TypeVar('TM', bound='M') + +class M(type): + def g1(cls: 'Type[A]') -> A: pass # E: The erased type of self 'Type[__main__.A]' is not a supertype of its class '__main__.M' + def g2(cls: Type[TA]) -> TA: pass # E: The erased type of self 'Type[__main__.A]' is not a supertype of its class '__main__.M' + def g3(cls: TTA) -> TTA: pass # E: The erased type of self 'Type[__main__.A]' is not a supertype of its class '__main__.M' + def g4(cls: TM) -> TM: pass +m: M + +class A(metaclass=M): + def foo(self): pass + +reveal_type(A.g1) # E: Revealed type is 'def () -> __main__.A' +reveal_type(A.g2) # E: Revealed type is 'def () -> __main__.A*' +reveal_type(A.g3) # E: Revealed type is 'def () -> def () -> __main__.A' +reveal_type(A.g4) # E: Revealed type is 'def () -> def () -> __main__.A' + +class B(metaclass=M): + def foo(self): pass + +B.g1 # Should be error: Argument 0 to "g1" of "M" has incompatible type "B"; expected Type[A] +B.g2 # Should be error: Argument 0 to "g2" of "M" has incompatible type "B"; expected Type[TA] +B.g3 # Should be error: Argument 0 to "g3" of "M" has incompatible type "B"; expected "TTA" +reveal_type(B.g4) # E: Revealed type is 'def () -> def () -> __main__.B' + +# 4 examples of unsoundness - instantiation, classmethod, staticmethod and ClassVar: + +ta: Type[A] = m # E: Incompatible types in assignment (expression has type "M", variable has type Type[A]) +a: A = ta() +reveal_type(ta.g1) # E: Revealed type is 'def () -> __main__.A' +reveal_type(ta.g2) # E: Revealed type is 'def () -> __main__.A*' +reveal_type(ta.g3) # E: Revealed type is 'def () -> Type[__main__.A]' +reveal_type(ta.g4) # E: Revealed type is 'def () -> Type[__main__.A]' + +x: M = ta +x.g1 # should be error: Argument 0 to "g1" of "M" has incompatible type "M"; expected Type[A] +x.g2 # should be error: Argument 0 to "g2" of "M" has incompatible type "M"; expected Type[TA] +x.g3 # should be error: Argument 0 to "g3" of "M" has incompatible type "M"; expected "TTA" +reveal_type(x.g4) # E: Revealed type is 'def () -> __main__.M*' + +def r(ta: Type[TA], tta: TTA) -> None: + x: M = ta + y: M = tta + +class Class(metaclass=M): + @classmethod + def f1(cls: Type[Class]) -> None: pass + @classmethod + def f2(cls: M) -> None: pass +cl: Type[Class] = m # E: Incompatible types in assignment (expression has type "M", variable has type Type[Class]) +reveal_type(cl.f1) # E: Revealed type is 'def ()' +reveal_type(cl.f2) # E: Revealed type is 'def ()' +x1: M = cl + +class Static(metaclass=M): + @staticmethod + def f() -> None: pass +s: Type[Static] = m # E: Incompatible types in assignment (expression has type "M", variable has type Type[Static]) +reveal_type(s.f) # E: Revealed type is 'def ()' +x2: M = s + +from typing import ClassVar +class Cvar(metaclass=M): + x = 1 # type: ClassVar[int] +cv: Type[Cvar] = m # E: Incompatible types in assignment (expression has type "M", variable has type Type[Cvar]) +cv.x +x3: M = cv + +[builtins fixtures/classmethod.pyi] + -- Synthetic types crashes -- ----------------------- diff --git a/test-data/unit/check-expressions.test b/test-data/unit/check-expressions.test index a023aa15484f..ab2bd1e92543 100644 --- a/test-data/unit/check-expressions.test +++ b/test-data/unit/check-expressions.test @@ -1006,6 +1006,16 @@ a[None:] a[:None] [builtins fixtures/slice.pyi] +[case testNoneSliceBoundsWithStrictOptional] +# flags: --strict-optional +from typing import Any +a = None # type: Any +a[None:1] +a[1:None] +a[None:] +a[:None] +[builtins fixtures/slice.pyi] + -- String interpolation -- -------------------- diff --git a/test-data/unit/check-incomplete-fixture.test b/test-data/unit/check-incomplete-fixture.test new file mode 100644 index 000000000000..68c7c6c9aa0f --- /dev/null +++ b/test-data/unit/check-incomplete-fixture.test @@ -0,0 +1,98 @@ +-- Test cases for reporting errors when a test case uses a fixture with +-- missing definitions. At least in the most common cases this should not +-- result in an uncaught exception. These tests make sure that this behavior +-- does not regress. +-- +-- NOTE: These tests do NOT test behavior of mypy outside tests. + +[case testVariableUndefinedUsingDefaultFixture] +import m +# This used to cause a crash since types.ModuleType is not available +# by default. We fall back to 'object' now. +m.x # E: "object" has no attribute "x" +[file m.py] + +[case testListMissingFromStubs] +from typing import List +def f(x: List[int]) -> None: pass +[out] +main:1: error: Name '__builtins__.list' is not defined +main:1: note: Maybe your test fixture does not define "typing.List"? +main:1: note: Consider adding [builtins fixtures/list.pyi] to your test description + +[case testDictMissingFromStubs] +from typing import Dict +def f(x: Dict[int]) -> None: pass +[out] +main:1: error: Name '__builtins__.dict' is not defined +main:1: note: Maybe your test fixture does not define "typing.Dict"? +main:1: note: Consider adding [builtins fixtures/dict.pyi] to your test description + +[case testSetMissingFromStubs] +from typing import Set +def f(x: Set[int]) -> None: pass +[out] +main:1: error: Name '__builtins__.set' is not defined +main:1: note: Maybe your test fixture does not define "typing.Set"? +main:1: note: Consider adding [builtins fixtures/set.pyi] to your test description + +[case testBoolMissingFromStubs] +x: bool +[out] +main:1: error: Name 'bool' is not defined +main:1: note: Maybe your test fixture does not define "builtins.bool"? +main:1: note: Consider adding [builtins fixtures/bool.pyi] to your test description + +[case testBaseExceptionMissingFromStubs] +e: BaseException +[out] +main:1: error: Name 'BaseException' is not defined +main:1: note: Maybe your test fixture does not define "builtins.BaseException"? +main:1: note: Consider adding [builtins fixtures/exception.pyi] to your test description + +[case testExceptionMissingFromStubs] +e: Exception +[out] +main:1: error: Name 'Exception' is not defined +main:1: note: Maybe your test fixture does not define "builtins.Exception"? +main:1: note: Consider adding [builtins fixtures/exception.pyi] to your test description + +[case testIsinstanceMissingFromStubs] +if isinstance(1, int): + pass +[out] +main:1: error: Name 'isinstance' is not defined +main:1: note: Maybe your test fixture does not define "builtins.isinstance"? +main:1: note: Consider adding [builtins fixtures/isinstancelist.pyi] to your test description + +[case testInvalidTupleDefinitionFromStubs] +from typing import Tuple +x: Tuple[int, ...] +x[0] +for y in x: + pass +[out] +-- These errors are pretty bad, but keeping this test anyway to +-- avoid things getting worse. +main:2: error: "tuple" expects no type arguments, but 1 given +main:3: error: Value of type "tuple" is not indexable +main:4: error: Iterable expected +main:4: error: "tuple" has no attribute "__iter__" + +[case testClassmethodMissingFromStubs] +class A: + @classmethod + def f(cls): pass +[out] +main:2: error: Name 'classmethod' is not defined +main:2: note: Maybe your test fixture does not define "builtins.classmethod"? +main:2: note: Consider adding [builtins fixtures/classmethod.pyi] to your test description + +[case testPropertyMissingFromStubs] +class A: + @property + def f(self): pass +[out] +main:2: error: Name 'property' is not defined +main:2: note: Maybe your test fixture does not define "builtins.property"? +main:2: note: Consider adding [builtins fixtures/property.pyi] to your test description diff --git a/test-data/unit/check-incremental.test b/test-data/unit/check-incremental.test index 23fafc24edf5..d28cb1acf512 100644 --- a/test-data/unit/check-incremental.test +++ b/test-data/unit/check-incremental.test @@ -3,6 +3,9 @@ -- Before the tests are run again, in step N any *.py.N files are copied to -- *.py. -- +-- You can add an empty section like `[delete mod.py.2]` to delete `mod.py` +-- before the second run. +-- -- Errors expected in the first run should be in the `[out1]` section, and -- errors expected in the second run should be in the `[out2]` section, and so on. -- If a section is omitted, it is expected there are no errors on that run. @@ -1945,7 +1948,35 @@ main:3: error: Revealed type is 'builtins.int' main:5: error: Revealed type is 'builtins.int' -- TODO: Add another test for metaclass in import cycle (reversed from the above test). --- This currently doesn't work. +-- This currently does not work. + +[case testDeleteFile] +import n +[file n.py] +import m +[file m.py] +x = 1 +[delete m.py.2] +[rechecked n] +[stale] +[out2] +tmp/n.py:1: error: Cannot find module named 'm' +tmp/n.py:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) + +[case testDeleteFileWithinCycle] +import a +[file a.py] +import b +[file b.py] +import c +[file c.py] +import a +[file a.py.2] +import c +[delete b.py.2] +[rechecked a, c] +[stale a] +[out2] [case testThreePassesBasic] import m @@ -2004,7 +2035,11 @@ def foo(x) -> int: tmp/n.py:2: error: Too many arguments for "foo" [out3] -[case testQuickAndDirty1] +-- +-- Quick mode +-- + +[case testQuickAndDirtyInterfaceChangeDoesNotPropagate] # flags: --quick-and-dirty import b, c [file a.py] @@ -2020,7 +2055,7 @@ def a(x): pass [rechecked a] [stale a] -[case testQuickAndDirty2] +[case testQuickAndDirtyDoesNotInvalidateImportCycle] # flags: --quick-and-dirty import b, c [file a.py] @@ -2038,7 +2073,7 @@ x = 0 [rechecked b] [stale b] -[case testQuickAndDirty3] +[case testQuickAndDirtySwitchToIncrementalMode] # flags: --quick-and-dirty # flags2: --incremental import a, b @@ -2049,7 +2084,7 @@ import a [rechecked a, b, builtins] [stale a, b, builtins] -[case testQuickAndDirty4] +[case testQuickAndDirtyFixErrorInExistingFunction] # flags: --quick-and-dirty import a, b [file a.py] @@ -2065,7 +2100,7 @@ tmp/a.py:2: error: Incompatible return value type (got "str", expected "int") [rechecked a] [stale a] -[case testQuickAndDirty5] +[case testQuickAndDirtyIntroduceErrorInNewFunction] # flags: --quick-and-dirty import a, b [file a.py] @@ -2081,7 +2116,7 @@ tmp/a.py:2: error: Incompatible return value type (got "str", expected "int") [rechecked a] [stale] -[case testQuickAndDirty6] +[case testQuickAndDirtyPersistingError] # flags: --quick-and-dirty import a, b [file a.py] @@ -2099,7 +2134,7 @@ tmp/a.py:2: error: Incompatible return value type (got "float", expected "int") [rechecked a] [stale] -[case testQuickAndDirty7] +[case testQuickAndDirtyIntroduceReferencesWithinCycle] # flags: --quick-and-dirty import a, b [file a.py] @@ -2119,7 +2154,7 @@ tmp/a.py:3: error: Revealed type is 'def () -> builtins.int' [rechecked a] [stale] -[case testQuickAndDirty8] +[case testQuickAndDirtyIntroduceReferencesWithinCycle2] # flags: --quick-and-dirty import a, b [file a.py] @@ -2139,14 +2174,70 @@ tmp/b.py:3: error: Revealed type is 'def () -> builtins.int' [rechecked b] [stale] +[case testQuickAndDirtyIntroduceReferencesWithinCycleNoError] +# flags: --quick-and-dirty +import a, b, c +[file a.py] +import b +[file b.py] +import a +class C: pass +def f() -> int: pass +[file c.py] +[file a.py.2] +import b +def g() -> b.C: pass +h = b.f +[file c.py.3] +import a +reveal_type(a.g) +reveal_type(a.h) +[out1] +[out2] +[out3] +tmp/c.py:2: error: Revealed type is 'def () -> b.C' +tmp/c.py:3: error: Revealed type is 'def () -> builtins.int' +[rechecked a] +[stale a] +[rechecked2 c] +[stale2] + +[case testQuickAndDirtyIntroduceReferencesWithinCycleNoError2] +# flags: --quick-and-dirty +import a, b, c +[file a.py] +import b +class C: pass +def f() -> int: pass +[file b.py] +import a +[file c.py] +[file b.py.2] +import a +def g() -> a.C: pass +h = a.f +[file c.py.3] +import b +reveal_type(b.g) +reveal_type(b.h) +[out1] +[out2] +[out3] +tmp/c.py:2: error: Revealed type is 'def () -> a.C' +tmp/c.py:3: error: Revealed type is 'def () -> builtins.int' +[rechecked b] +[stale b] +[rechecked2 c] +[stale2] + -- (The behavior for blockers is actually no different than in regular incremental mode) -[case testQuickAndDirty9] +[case testQuickAndDirtyBlockerOnFirstRound] # flags: --quick-and-dirty import a, b [file a.py] import b -class B: pass -class C(B, B): pass # blocker +class B(C): pass +class C(B): pass # blocker [file b.py] import a [file a.py.2] @@ -2154,12 +2245,12 @@ import b class B: pass class C(B): pass [out1] -tmp/a.py:3: error: Duplicate base class "B" +tmp/a.py:3: error: Cycle in inheritance hierarchy [out2] [rechecked a, b] [stale a, b] -[case testQuickAndDirty10] +[case testQuickAndDirtyBlockerOnSecondRound] # flags: --quick-and-dirty import a, b [file a.py] @@ -2170,15 +2261,15 @@ class C(B): pass import a [file a.py.2] import b -class B: pass -class C(B, B): pass # blocker +class B(C): pass +class C(B): pass # blocker [out1] [out2] -tmp/a.py:3: error: Duplicate base class "B" +tmp/a.py:3: error: Cycle in inheritance hierarchy [rechecked a, b] [stale a, b] -[case testQuickAndDirty11] +[case testQuickAndDirtyRenameFunctionInTwoModules] # flags: --quick-and-dirty import a, b, c, d [file a.py] @@ -2196,7 +2287,7 @@ def g(): pass # renamed f to g [file c.py.2] from a import g -[case testQuickAndDirty12] +[case testQuickAndDirtyUnmodifiedModuleDoesNotGenerateError] # flags: --quick-and-dirty import a, b, c, d [file a.py] @@ -2209,7 +2300,7 @@ from a import C from b import C [file d.py] from c import C -C().f() +C().f() # no error because unmodified [file a.py.2] import d class C: @@ -2219,7 +2310,7 @@ from a import C [out1] [out2] -[case testQuickAndDirty13] +[case testQuickAndDirtyUnmodifiedModuleDoesNotGenerateError2] # flags: --quick-and-dirty import a, b, c [file a.py] @@ -2241,7 +2332,7 @@ class C: [rechecked a] [stale a] -[case testQuickAndDirty14] +[case testQuickAndDirtyTypeAliasReference] # flags: --quick-and-dirty import a, b [file a.py] @@ -2254,7 +2345,7 @@ S = str import b def f(x: b.S) -> int: return 0 -[case testQuickAndDirty15] +[case testQuickAndDirtyNamedTupleReference] # flags: --quick-and-dirty import a, b [file a.py] @@ -2268,7 +2359,7 @@ P = NamedTuple('P', (('x', int),)) import b def f(x: b.P) -> int: return 0 -[case testQuickAndDirty16] +[case testQuickAndDirtyTypeVarReference] # flags: --quick-and-dirty import a, b [file a.py] @@ -2281,3 +2372,344 @@ T = TypeVar('T') [file a.py.2] import b def f(x: b.T) -> int: return 0 + +[case testQuickAndDirtyDeleteFunctionUsedByOtherModule] +# flags: --quick-and-dirty +import a +[file a.py] +from b import f +[file b.py] +import a +def f() -> int: pass +a.f() +[file b.py.2] +import a +reveal_type(a.f) +[out2] +tmp/b.py:2: error: Revealed type is 'Any' + +[case testQuickAndDirtyDeleteClassUsedInAnnotation] +# flags: --quick-and-dirty +import a +[file a.py] +import b +def f() -> b.C: pass +[file b.py] +import a +class C: pass +[file b.py.2] +import a +reveal_type(a.f) +a.f().x +[out2] +tmp/b.py:2: error: Revealed type is 'def () -> ' +tmp/b.py:3: error: "" has no attribute "x" + +[case testQuickAndDirtyDeleteClassUsedAsBase] +# flags: --quick-and-dirty +import a +[file a.py] +import b +class D(b.C): pass +[file b.py] +import a +class C: pass +[file b.py.2] +import a +reveal_type(a.D) +a.D().x +[out2] +tmp/b.py:2: error: Revealed type is 'Any' + +[case testQuickAndDirtyDeleteNestedClassUsedInAnnotation] +# flags: --quick-and-dirty +import a +[file a.py] +import b +def f() -> b.C.D: pass +[file b.py] +import a +class C: + class D: pass +[file b.py.2] +import a +class C: + pass +reveal_type(a.f) +a.f().x +[out2] +tmp/b.py:4: error: Revealed type is 'def () -> ' +tmp/b.py:5: error: "" has no attribute "x" + +[case testQuickAndDirtyTurnGenericClassIntoNonGeneric-skip] +# flags: --quick-and-dirty +import a +[file a.py] +import b +def f() -> b.C[int]: pass +[file b.py] +from typing import TypeVar, Generic +import a +T = TypeVar('T') +class C(Generic[T]): pass +[file b.py.2] +import a +class C: pass +reveal_type(a.f) +c: C +d = a.f() +c = d +d = c +[out2] +# TODO: Crashes (https://github.com/python/mypy/issues/3279) + +[case testQuickAndDirtyTurnClassIntoGenericOne-skip] +# flags: --quick-and-dirty +import a +[file a.py] +import b +def f() -> b.C: pass +[file b.py] +import a +class C: pass +[file b.py.2] +from typing import TypeVar, Generic +import a +T = TypeVar('T') +class C(Generic[T]): pass +reveal_type(a.f) +c: C[int] +d = a.f() +d = c +c = d +[out2] +# TODO: Crashes (https://github.com/python/mypy/issues/3279) + +[case testQuickAndDirtyDeleteTypeVarUsedInAnnotation] +# flags: --quick-and-dirty +import a +[file a.py] +import b +def f(x: b.T) -> b.T: return x +[file b.py] +from typing import TypeVar +import a +T = TypeVar('T') +[file b.py.2] +import a +reveal_type(a.f) +reveal_type(a.f(1)) +[out2] +tmp/b.py:2: error: Revealed type is 'def [b.T] (x: b.T`-1) -> b.T`-1' +tmp/b.py:3: error: Revealed type is 'builtins.int*' + +[case testQuickAndDirtyDeleteNewTypeUsedInAnnotation] +# flags: --quick-and-dirty +import a +[file a.py] +import b +def f() -> b.C: pass +[file b.py] +from typing import NewType +import a +C = NewType('C', int) +[file b.py.2] +import a +reveal_type(a.f) +a.f().x +[out2] +tmp/b.py:2: error: Revealed type is 'def () -> ' +tmp/b.py:3: error: "" has no attribute "x" + +[case testQuickAndDirtyChangeClassIntoFunction] +# flags: --quick-and-dirty +import a +[file a.py] +import b +def f() -> b.C: pass +[file b.py] +import a +class C: pass +[file b.py.2] +import a +def C() -> None: pass +reveal_type(a.f) +a.f().x +[out2] +tmp/b.py:3: error: Revealed type is 'def () -> ' +tmp/b.py:4: error: "" has no attribute "x" + +[case testQuickAndDirtyChangeClassIntoVariable] +# flags: --quick-and-dirty +import a +[file a.py] +import b +def f() -> b.C: pass +[file b.py] +import a +class C: pass +[file b.py.2] +import a +C = 0 +reveal_type(a.f) +a.f().x +[out2] +tmp/b.py:3: error: Revealed type is 'def () -> ' +tmp/b.py:4: error: "" has no attribute "x" + +[case testQuickAndDirtyAddFile] +# flags: --quick-and-dirty +import a +[file a.py] +import b +x = '' +[file b.py] +import a +[file b.py.2] +import c +reveal_type(c.x) +[file c.py.2] +import a +x = 1 +reveal_type(a.x) +[rechecked b, c] +[stale] +[out2] +tmp/c.py:3: error: Revealed type is 'builtins.str' +tmp/b.py:2: error: Revealed type is 'builtins.int' + +[case testQuickAndDirtyDeleteFile] +# flags: --quick-and-dirty +import b +[file a.py] +def f() -> None: pass +[file b.py] +import a +a.f() +[delete a.py.2] +[file b.py.3] +import a +a.f() # Comment change +[file b.py.4] +# Remove import +[rechecked b] +[stale] +[rechecked2 b] +[stale2] +[rechecked3 b] +[stale3 b] +[out2] +tmp/b.py:1: error: Cannot find module named 'a' +tmp/b.py:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) +[out3] +tmp/b.py:1: error: Cannot find module named 'a' +tmp/b.py:1: note: (Perhaps setting MYPYPATH or using the "--ignore-missing-imports" flag would help) +[out4] + +[case testQuickAndDirtyRenameModule] +# flags: --quick-and-dirty +import a +[file a.py] +import b +b.f() +[file b.py] +def f() -> None: pass +[delete b.py.2] +[file c.py.2] +def f() -> None: pass +[file a.py.2] +import c +c.f(1) +[file c.py.3] +def f() -> None: pass # comment change +[file c.py.4] +def f(x) -> None: pass +[out] +[out2] +tmp/a.py:2: error: Too many arguments for "f" +[out3] +tmp/a.py:2: error: Too many arguments for "f" +[out4] +[rechecked a, c] +[stale c] +[rechecked2 a, c] +[stale2] +[rechecked3 a, c] +[stale3 a, c] + +[case testQuickAndDirtyMultiplePasses] +# flags: --quick-and-dirty +import a +[file a.py] +import b +b.f() +[file b.py] +def f() -> None: pass +[file b.py.2] +# Write cache file but the error in a is not caught yet. +def f(x) -> None: pass +[file a.py.3] +# Editing a triggers the error. +import b +b.f() +[rechecked b] +[rechecked2 a] +[out2] +[out3] +tmp/a.py:3: error: Too few arguments for "f" + +[case testQuickAndDirtySerializeStaleType] +# flags: --quick-and-dirty +import a, c +[file a.py] +import b +def f() -> b.C: pass +[file b.py] +import a +class C: pass +[file c.py] +[file b.py.2] +import a +x = a.f() +[file c.py.3] +import b +reveal_type(b.x) +def g(x: object) -> None: pass +g(b.x) +b.x.y +[rechecked b] +[stale b] +[rechecked2 c] +[stale2] +[out3] +tmp/c.py:2: error: Revealed type is '' +tmp/c.py:5: error: "" has no attribute "y" + +[case testSerializeAbstractPropertyIncremental] +from abc import abstractmethod +import typing +class A: + @property + def f(self) -> int: + return 1 + @f.setter # type: ignore + @abstractmethod + def f(self, x: int) -> None: + pass +a = A() +[builtins fixtures/property.pyi] + +[case testSerializeAbstractPropertyDisallowUntypedIncremental] +# flags: --disallow-untyped-defs +from abc import abstractmethod +import typing +class A: + @property + def f(self) -> int: + return 1 + @f.setter # type: ignore + @abstractmethod + def f(self, x: int) -> None: + pass +a = A() +[builtins fixtures/property.pyi] diff --git a/test-data/unit/check-modules.test b/test-data/unit/check-modules.test index 66050601d26f..b1b6857e5518 100644 --- a/test-data/unit/check-modules.test +++ b/test-data/unit/check-modules.test @@ -1416,3 +1416,227 @@ reveal_type(f()) # E: Revealed type is 'types.ModuleType' reveal_type(types) # E: Revealed type is 'types.ModuleType' [builtins fixtures/module.pyi] + +[case testClassImportAccessedInMethod] +class C: + import m + def foo(self) -> None: + x = self.m.a + reveal_type(x) # E: Revealed type is 'builtins.str' + # ensure we distinguish self from other variables + y = 'hello' + z = y.m.a # E: "str" has no attribute "m" + @classmethod + def cmethod(cls) -> None: + y = cls.m.a + reveal_type(y) # E: Revealed type is 'builtins.str' + @staticmethod + def smethod(foo: int) -> None: + # we aren't confused by first arg of a staticmethod + y = foo.m.a # E: "int" has no attribute "m" + +[file m.py] +a = 'foo' + +[builtins fixtures/module.pyi] + +[case testModuleAlias] +import m +m2 = m +reveal_type(m2.a) # E: Revealed type is 'builtins.str' +m2.b # E: Module has no attribute "b" +m2.c = 'bar' # E: Module has no attribute "c" + +[file m.py] +a = 'foo' + +[builtins fixtures/module.pyi] + +[case testClassModuleAlias] +import m + +class C: + x = m + def foo(self) -> None: + reveal_type(self.x.a) # E: Revealed type is 'builtins.str' + +[file m.py] +a = 'foo' + +[builtins fixtures/module.pyi] + +[case testLocalModuleAlias] +import m + +def foo() -> None: + x = m + reveal_type(x.a) # E: Revealed type is 'builtins.str' + +class C: + def foo(self) -> None: + x = m + reveal_type(x.a) # E: Revealed type is 'builtins.str' + +[file m.py] +a = 'foo' + +[builtins fixtures/module.pyi] + +[case testChainedModuleAlias] +import m +m3 = m2 = m +m4 = m3 +m5 = m4 +reveal_type(m2.a) # E: Revealed type is 'builtins.str' +reveal_type(m3.a) # E: Revealed type is 'builtins.str' +reveal_type(m4.a) # E: Revealed type is 'builtins.str' +reveal_type(m5.a) # E: Revealed type is 'builtins.str' + +[file m.py] +a = 'foo' + +[builtins fixtures/module.pyi] + +[case testMultiModuleAlias] +import m, n +m2, n2, (m3, n3) = m, n, [m, n] +reveal_type(m2.a) # E: Revealed type is 'builtins.str' +reveal_type(n2.b) # E: Revealed type is 'builtins.str' +reveal_type(m3.a) # E: Revealed type is 'builtins.str' +reveal_type(n3.b) # E: Revealed type is 'builtins.str' + +x, y = m # E: 'types.ModuleType' object is not iterable +x, y, z = m, n # E: Need more than 2 values to unpack (3 expected) +x, y = m, m, m # E: Too many values to unpack (2 expected, 3 provided) +x, (y, z) = m, n # E: 'types.ModuleType' object is not iterable +x, (y, z) = m, (n, n, n) # E: Too many values to unpack (2 expected, 3 provided) + +[file m.py] +a = 'foo' + +[file n.py] +b = 'bar' + +[builtins fixtures/module.pyi] + +[case testModuleAliasWithExplicitAnnotation] +from typing import Any +import types +import m +mod_mod: types.ModuleType = m +mod_mod2: types.ModuleType +mod_mod2 = m +mod_mod3 = m # type: types.ModuleType +mod_any: Any = m +mod_int: int = m # E: Incompatible types in assignment (expression has type Module, variable has type "int") + +reveal_type(mod_mod) # E: Revealed type is 'types.ModuleType' +mod_mod.a # E: Module has no attribute "a" +reveal_type(mod_mod2) # E: Revealed type is 'types.ModuleType' +mod_mod2.a # E: Module has no attribute "a" +reveal_type(mod_mod3) # E: Revealed type is 'types.ModuleType' +mod_mod3.a # E: Module has no attribute "a" +reveal_type(mod_any) # E: Revealed type is 'Any' + +[file m.py] +a = 'foo' + +[builtins fixtures/module.pyi] + +[case testModuleAliasPassedToFunction] +import types +import m + +def takes_module(x: types.ModuleType): + reveal_type(x.__file__) # E: Revealed type is 'builtins.str' + +n = m +takes_module(m) +takes_module(n) + +[file m.py] +a = 'foo' + +[builtins fixtures/module.pyi] + +[case testModuleAliasRepeated] +import m, n + +if bool(): + x = m +else: + x = 3 # E: Incompatible types in assignment (expression has type "int", variable has type Module) + +if bool(): + y = 3 +else: + y = m # E: Incompatible types in assignment (expression has type Module, variable has type "int") + +if bool(): + z = m +else: + z = n # E: Cannot assign multiple modules to name 'z' without explicit 'types.ModuleType' annotation + +[file m.py] +a = 'foo' + +[file n.py] +a = 3 + +[builtins fixtures/module.pyi] + +[case testModuleAliasRepeatedWithAnnotation] +import types +import m, n + +x: types.ModuleType +if bool(): + x = m +else: + x = n + +x.a # E: Module has no attribute "a" +reveal_type(x.__file__) # E: Revealed type is 'builtins.str' + +[file m.py] +a = 'foo' + +[file n.py] +a = 3 + +[builtins fixtures/module.pyi] + +[case testModuleAliasRepeatedComplex] +import m, n, o + +x = m +x = n # E: Cannot assign multiple modules to name 'x' without explicit 'types.ModuleType' annotation +x = o # E: Cannot assign multiple modules to name 'x' without explicit 'types.ModuleType' annotation + +y = o +y, z = m, n # E: Cannot assign multiple modules to name 'y' without explicit 'types.ModuleType' annotation + +xx = m +xx = m +reveal_type(xx.a) # E: Revealed type is 'builtins.str' + +[file m.py] +a = 'foo' + +[file n.py] +a = 3 + +[file o.py] +a = 'bar' + +[builtins fixtures/module.pyi] + +[case testModuleAliasToOtherModule] +import m, n +m = n # E: Cannot assign multiple modules to name 'm' without explicit 'types.ModuleType' annotation + +[file m.py] + +[file n.py] + +[builtins fixtures/module.pyi] diff --git a/test-data/unit/check-newsyntax.test b/test-data/unit/check-newsyntax.test index 9f3f87853e63..645fbe525358 100644 --- a/test-data/unit/check-newsyntax.test +++ b/test-data/unit/check-newsyntax.test @@ -119,19 +119,19 @@ f'{type(1)}' a: str a = f'foobar' a = f'{"foobar"}' -[builtins fixtures/primitives.pyi] +[builtins fixtures/f_string.pyi] [case testNewSyntaxFStringExpressionsOk] # flags: --python-version 3.6 f'.{1 + 1}.' f'.{1 + 1}.{"foo" + "bar"}' -[builtins fixtures/primitives.pyi] +[builtins fixtures/f_string.pyi] [case testNewSyntaxFStringExpressionsErrors] # flags: --python-version 3.6 f'{1 + ""}' f'.{1 + ""}' -[builtins fixtures/primitives.pyi] +[builtins fixtures/f_string.pyi] [out] main:2: error: Unsupported operand types for + ("int" and "str") main:3: error: Unsupported operand types for + ("int" and "str") @@ -142,4 +142,12 @@ value = 10.5142 width = 10 precision = 4 f'result: {value:{width}.{precision}}' -[builtins fixtures/primitives.pyi] +[builtins fixtures/f_string.pyi] + +[case testNewSyntaxFStringSingleField] +# flags: --python-version 3.6 +v = 1 +reveal_type(f'{v}') # E: Revealed type is 'builtins.str' +reveal_type(f'{1}') # E: Revealed type is 'builtins.str' +[builtins fixtures/f_string.pyi] + diff --git a/test-data/unit/check-newtype.test b/test-data/unit/check-newtype.test index 144c8fba04c3..32b25558c84f 100644 --- a/test-data/unit/check-newtype.test +++ b/test-data/unit/check-newtype.test @@ -155,6 +155,29 @@ y = Bar2(42) y = func3(x) [out] +[case testNewTypeWithNewType] +from typing import NewType +A = NewType('A', int) +B = NewType('B', A) +C = A +D = C +E = NewType('E', D) + +a = A(1) +b = B(a) +e = E(a) + +def funca(a: A) -> None: ... +def funcb(b: B) -> None: ... + +funca(a) +funca(b) +funca(e) +funcb(a) # E: Argument 1 to "funcb" has incompatible type "A"; expected "B" +funcb(b) +funcb(e) # E: Argument 1 to "funcb" has incompatible type "E"; expected "B" + +[out] -- Make sure NewType works as expected in a variety of different scopes/across files @@ -279,15 +302,6 @@ main:3: error: Argument 2 to NewType(...) must be subclassable (got T?) main:3: error: Invalid type "__main__.T" main:4: error: Invalid type "__main__.T" -[case testNewTypeWithNewTypeFails] -from typing import NewType -A = NewType('A', int) -B = NewType('B', A) # E: Argument 2 to NewType(...) cannot be another NewType -C = A -D = C -E = NewType('E', D) # E: Argument 2 to NewType(...) cannot be another NewType -[out] - [case testNewTypeRedefiningVariablesFails] from typing import NewType diff --git a/test-data/unit/check-optional.test b/test-data/unit/check-optional.test index a563ef91fe59..a22f2e8ed3a5 100644 --- a/test-data/unit/check-optional.test +++ b/test-data/unit/check-optional.test @@ -125,11 +125,10 @@ def f(x: int = None) -> None: f(None) [out] -[case testInferOptionalFromDefaultNoneWithFastParser] - -def f(x: int = None) -> None: - x + 1 # E: Unsupported left operand type for + (some union) -f(None) +[case testNoInferOptionalFromDefaultNone] +# flags: --no-implicit-optional +def f(x: int = None) -> None: # E: Incompatible types in assignment (expression has type None, variable has type "int") + pass [out] [case testInferOptionalFromDefaultNoneComment] @@ -139,12 +138,11 @@ def f(x=None): f(None) [out] -[case testInferOptionalFromDefaultNoneCommentWithFastParser] - -def f(x=None): +[case testNoInferOptionalFromDefaultNoneComment] +# flags: --no-implicit-optional +def f(x=None): # E: Incompatible types in assignment (expression has type None, variable has type "int") # type: (int) -> None - x + 1 # E: Unsupported left operand type for + (some union) -f(None) + pass [out] [case testInferOptionalType] diff --git a/test-data/unit/check-unions.test b/test-data/unit/check-unions.test index 37a170e7367e..6ab2d0fed017 100644 --- a/test-data/unit/check-unions.test +++ b/test-data/unit/check-unions.test @@ -335,13 +335,13 @@ t_a = None # type: Type[Any] reveal_type(u(t_o, t_o)) # E: Revealed type is 'Type[builtins.object]' reveal_type(u(t_s, t_s)) # E: Revealed type is 'Type[builtins.str]' reveal_type(u(t_a, t_a)) # E: Revealed type is 'Type[Any]' -reveal_type(u(type, type)) # E: Revealed type is 'def (x: Any) -> builtins.type' +reveal_type(u(type, type)) # E: Revealed type is 'def (x: builtins.object) -> builtins.type' # One type, other non-type reveal_type(u(t_s, 1)) # E: Revealed type is 'Union[builtins.int*, Type[builtins.str]]' reveal_type(u(1, t_s)) # E: Revealed type is 'Union[Type[builtins.str], builtins.int*]' -reveal_type(u(type, 1)) # E: Revealed type is 'Union[builtins.int*, def (x: Any) -> builtins.type]' -reveal_type(u(1, type)) # E: Revealed type is 'Union[def (x: Any) -> builtins.type, builtins.int*]' +reveal_type(u(type, 1)) # E: Revealed type is 'Union[builtins.int*, def (x: builtins.object) -> builtins.type]' +reveal_type(u(1, type)) # E: Revealed type is 'Union[def (x: builtins.object) -> builtins.type, builtins.int*]' reveal_type(u(t_a, 1)) # E: Revealed type is 'Union[builtins.int*, Type[Any]]' reveal_type(u(1, t_a)) # E: Revealed type is 'Union[Type[Any], builtins.int*]' reveal_type(u(t_o, 1)) # E: Revealed type is 'Union[builtins.int*, Type[builtins.object]]' diff --git a/test-data/unit/check-warnings.test b/test-data/unit/check-warnings.test index 2f2d592b13fd..c95baec1cc93 100644 --- a/test-data/unit/check-warnings.test +++ b/test-data/unit/check-warnings.test @@ -165,3 +165,20 @@ from typing import Any def g() -> Any: pass def f() -> Any: return g() [out] + +[case testOKReturnAnyIfProperSubtype] +# flags: --warn-return-any --strict-optional +from typing import Any, Optional + +class Test(object): + + def __init__(self) -> None: + self.attr = "foo" # type: Any + + def foo(self, do_it: bool) -> Optional[Any]: + if do_it: + return self.attr # Should not warn here + else: + return None +[builtins fixtures/list.pyi] +[out] diff --git a/test-data/unit/cmdline.test b/test-data/unit/cmdline.test index dc1c6f1c2c53..118a7c0f5452 100644 --- a/test-data/unit/cmdline.test +++ b/test-data/unit/cmdline.test @@ -75,6 +75,14 @@ undef dir/subpkg/a.py:1: error: Name 'undef' is not defined dir/a.py:1: error: Name 'undef' is not defined +[case testCmdlineInvalidPackageName] +# cmd: mypy dir/sub.pkg/a.py +[file dir/sub.pkg/__init__.py] +[file dir/sub.pkg/a.py] +undef +[out] +sub.pkg is not a valid Python package name + [case testBadFileEncoding] # cmd: mypy a.py [file a.py] diff --git a/test-data/unit/fixtures/f_string.pyi b/test-data/unit/fixtures/f_string.pyi new file mode 100644 index 000000000000..78d39aee85b8 --- /dev/null +++ b/test-data/unit/fixtures/f_string.pyi @@ -0,0 +1,36 @@ +# Builtins stub used for format-string-related test cases. +# We need str and list, and str needs join and format methods. + +from typing import TypeVar, Generic, Iterable, Iterator, List, overload + +T = TypeVar('T') + +class object: + def __init__(self): pass + +class type: + def __init__(self, x) -> None: pass + +class ellipsis: pass + +class list(Iterable[T], Generic[T]): + @overload + def __init__(self) -> None: pass + @overload + def __init__(self, x: Iterable[T]) -> None: pass + def append(self, x: T) -> None: pass + +class tuple(Generic[T]): pass + +class function: pass +class int: + def __add__(self, i: int) -> int: pass + +class float: pass +class bool(int): pass + +class str: + def __add__(self, s: str) -> str: pass + def format(self, *args) -> str: pass + def join(self, l: List[str]) -> str: pass + diff --git a/test-data/unit/fixtures/module.pyi b/test-data/unit/fixtures/module.pyi index b130d795d25c..44a4dfe0c277 100644 --- a/test-data/unit/fixtures/module.pyi +++ b/test-data/unit/fixtures/module.pyi @@ -17,3 +17,5 @@ class tuple: pass class dict(Generic[T, S]): pass class ellipsis: pass +classmethod = object() +staticmethod = object() diff --git a/test-data/unit/fixtures/property.pyi b/test-data/unit/fixtures/property.pyi index 994874b93b79..929317e2ef66 100644 --- a/test-data/unit/fixtures/property.pyi +++ b/test-data/unit/fixtures/property.pyi @@ -6,7 +6,7 @@ class object: def __init__(self) -> None: pass class type: - def __init__(self, x) -> None: pass + def __init__(self, x: typing.Any) -> None: pass class function: pass diff --git a/test-data/unit/lib-stub/builtins.pyi b/test-data/unit/lib-stub/builtins.pyi index 5010235a53ab..457bea0e9020 100644 --- a/test-data/unit/lib-stub/builtins.pyi +++ b/test-data/unit/lib-stub/builtins.pyi @@ -1,10 +1,8 @@ -Any = 0 - class object: def __init__(self) -> None: pass class type: - def __init__(self, x: Any) -> None: pass + def __init__(self, x: object) -> None: pass # These are provided here for convenience. class int: diff --git a/test-data/unit/lib-stub/types.pyi b/test-data/unit/lib-stub/types.pyi index b118000e688c..02113aea3834 100644 --- a/test-data/unit/lib-stub/types.pyi +++ b/test-data/unit/lib-stub/types.pyi @@ -1,9 +1,10 @@ -from typing import TypeVar, Optional, List, Any, Generic, Sequence -T = TypeVar('T') +from typing import TypeVar -def coroutine(func: T) -> T: - return func +_T = TypeVar('_T') + +def coroutine(func: _T) -> _T: pass class bool: ... -class ModuleType: ... +class ModuleType: + __file__ = ... # type: str diff --git a/test-data/unit/lib-stub/typing.pyi b/test-data/unit/lib-stub/typing.pyi index 754c32c8d23e..274f3da76164 100644 --- a/test-data/unit/lib-stub/typing.pyi +++ b/test-data/unit/lib-stub/typing.pyi @@ -20,6 +20,7 @@ Type = 0 no_type_check = 0 ClassVar = 0 NoReturn = 0 +NewType = 0 # Type aliases. List = 0 @@ -53,7 +54,7 @@ class Generator(Iterator[T], Generic[T, U, V]): def send(self, value: U) -> T: pass @abstractmethod - def throw(self, typ: Any, val: Any=None, tb=None) -> None: pass + def throw(self, typ: Any, val: Any = None, tb: Any = None) -> None: pass @abstractmethod def close(self) -> None: pass @@ -61,38 +62,6 @@ class Generator(Iterator[T], Generic[T, U, V]): @abstractmethod def __iter__(self) -> 'Generator[T, U, V]': pass -class AsyncGenerator(AsyncIterator[T], Generic[T, U]): - @abstractmethod - def __anext__(self) -> Awaitable[T]: pass - - @abstractmethod - def asend(self, value: U) -> Awaitable[T]: pass - - @abstractmethod - def athrow(self, typ: Any, val: Any=None, tb: Any=None) -> Awaitable[T]: pass - - @abstractmethod - def aclose(self) -> Awaitable[T]: pass - - @abstractmethod - def __aiter__(self) -> 'AsyncGenerator[T, U]': pass - -class Awaitable(Generic[T]): - @abstractmethod - def __await__(self) -> Generator[Any, Any, T]: pass - -class AwaitableGenerator(Generator[T, U, V], Awaitable[V], Generic[T, U, V, S]): - pass - -class AsyncIterable(Generic[T]): - @abstractmethod - def __aiter__(self) -> 'AsyncIterator[T]': pass - -class AsyncIterator(AsyncIterable[T], Generic[T]): - def __aiter__(self) -> 'AsyncIterator[T]': return self - @abstractmethod - def __anext__(self) -> Awaitable[T]: pass - class Sequence(Iterable[T], Generic[T]): @abstractmethod def __getitem__(self, n: Any) -> T: pass @@ -101,9 +70,4 @@ class Mapping(Generic[T, U]): pass class MutableMapping(Generic[T, U]): pass -def NewType(name: str, tp: Type[T]) -> Callable[[T], T]: - def new_type(x): - return x - return new_type - TYPE_CHECKING = 1 diff --git a/test-data/unit/semanal-classvar.test b/test-data/unit/semanal-classvar.test index 677e1bd8cadc..d2e474cd278f 100644 --- a/test-data/unit/semanal-classvar.test +++ b/test-data/unit/semanal-classvar.test @@ -95,7 +95,7 @@ def f(x: ClassVar, y: ClassVar) -> ClassVar: pass main:2: error: ClassVar can only be used for assignments in class body [case testClassVarInCallableArgs] -from typing import Callable, ClassVar +from typing import Callable, ClassVar, Any f = None # type: Callable[[int, ClassVar], Any] [out] main:2: error: Invalid type: ClassVar nested inside other type diff --git a/test-data/unit/semanal-types.test b/test-data/unit/semanal-types.test index 4c3033191dfc..c1ec57f205de 100644 --- a/test-data/unit/semanal-types.test +++ b/test-data/unit/semanal-types.test @@ -720,7 +720,7 @@ MypyFile:1( def ())) [case testOverloadedFunction] -from typing import overload +from typing import overload, Any @overload def f(a: object) -> int: a @overload @@ -730,7 +730,7 @@ def f(a: Any) -> Any: return a [out] MypyFile:1( - ImportFrom:1(typing, [overload]) + ImportFrom:1(typing, [overload, Any]) OverloadedFuncDef:2( FuncDef:7( f From d280d914452b572138afca44a8a54ff1e2ca72f9 Mon Sep 17 00:00:00 2001 From: quartox Date: Sun, 11 Jun 2017 09:32:33 -0700 Subject: [PATCH 17/27] Finish cleanup of inappropriate additions --- test-data/unit/check-functions.test | 2 ++ test-data/unit/pythoneval.test | 14 +++++++------- 2 files changed, 9 insertions(+), 7 deletions(-) diff --git a/test-data/unit/check-functions.test b/test-data/unit/check-functions.test index 290d9bea7524..e2ddade9887f 100644 --- a/test-data/unit/check-functions.test +++ b/test-data/unit/check-functions.test @@ -88,6 +88,7 @@ def r(x) -> None: ... r = l # E: Incompatible types in assignment (expression has type Callable[[Any, Any], None], variable has type Callable[[Any], None]) [case testSubtypingFunctionsImplicitNames] +from typing import Any def f(a, b): pass def g(c: Any, d: Any) -> Any: pass @@ -1825,6 +1826,7 @@ class A(Generic[t]): [case testRedefineFunction] +from typing import Any def f(x) -> Any: pass def g(x, y): pass def h(x): pass diff --git a/test-data/unit/pythoneval.test b/test-data/unit/pythoneval.test index 46b3273ce561..4497917553f4 100644 --- a/test-data/unit/pythoneval.test +++ b/test-data/unit/pythoneval.test @@ -400,7 +400,7 @@ f.write(b'x') f.foobar() [out] _program.py:3: error: Argument 1 to "write" of "IO" has incompatible type "bytes"; expected "str" -_program.py:4: error: IO[str] has no attribute "foobar" +_program.py:4: error: "TextIO" has no attribute "foobar" [case testOpenReturnTypeInference] reveal_type(open('x')) @@ -409,9 +409,9 @@ reveal_type(open('x', 'rb')) mode = 'rb' reveal_type(open('x', mode)) [out] -_program.py:1: error: Revealed type is 'typing.IO[builtins.str]' -_program.py:2: error: Revealed type is 'typing.IO[builtins.str]' -_program.py:3: error: Revealed type is 'typing.IO[builtins.bytes]' +_program.py:1: error: Revealed type is 'typing.TextIO' +_program.py:2: error: Revealed type is 'typing.TextIO' +_program.py:3: error: Revealed type is 'typing.BinaryIO' _program.py:5: error: Revealed type is 'typing.IO[Any]' [case testOpenReturnTypeInferenceSpecialCases] @@ -421,10 +421,10 @@ reveal_type(open(file='x', mode='rb')) mode = 'rb' reveal_type(open(mode=mode, file='r')) [out] -_testOpenReturnTypeInferenceSpecialCases.py:1: error: Revealed type is 'typing.IO[builtins.str]' +_testOpenReturnTypeInferenceSpecialCases.py:1: error: Revealed type is 'typing.TextIO' _testOpenReturnTypeInferenceSpecialCases.py:1: error: Too few arguments for "open" -_testOpenReturnTypeInferenceSpecialCases.py:2: error: Revealed type is 'typing.IO[builtins.bytes]' -_testOpenReturnTypeInferenceSpecialCases.py:3: error: Revealed type is 'typing.IO[builtins.bytes]' +_testOpenReturnTypeInferenceSpecialCases.py:2: error: Revealed type is 'typing.BinaryIO' +_testOpenReturnTypeInferenceSpecialCases.py:3: error: Revealed type is 'typing.BinaryIO' _testOpenReturnTypeInferenceSpecialCases.py:5: error: Revealed type is 'typing.IO[Any]' [case testGenericPatterns] From 533260d247428120d8c649a2a0f3258cc69a3b5b Mon Sep 17 00:00:00 2001 From: quartox Date: Sat, 15 Jul 2017 06:33:43 -0700 Subject: [PATCH 18/27] Improve checking --- mypy/applytype.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/mypy/applytype.py b/mypy/applytype.py index 83149d03095e..598c45321b89 100644 --- a/mypy/applytype.py +++ b/mypy/applytype.py @@ -4,7 +4,7 @@ from mypy.sametypes import is_same_type from mypy.expandtype import expand_type from mypy.types import ( - Type, TypeVarId, TypeVarType, TypeVisitor, CallableType, AnyType, PartialType, + Type, TypeVarId, TypeVarType, TypeVisitor, CallableType, AnyType, PartialType, Instance, UnionType ) from mypy.messages import MessageBuilder @@ -71,13 +71,13 @@ def apply_generic_arguments(callable: CallableType, types: List[Type], def get_incompatible_arg_constraints(arg_types: Sequence[Type], type: Type, - index: int) -> Dict[str, Tuple[str]]: + index: int) -> Dict[str, Tuple[str, ...]]: """Gets incompatible function arguments with the constrained types. An example of a constrained type is AnyStr which must be all str or all byte. """ - constraints = {} # type: Dict[str, Tuple[str]] - if isinstance(type, Instance) and type.type.name() == 'object': + constraints = {} # type: Dict[str, Tuple[str, ...]] + if isinstance(type, Instance) and type.type.fullname() == 'builtins.object': if index == len(arg_types): # Index is off by one for '*' arguments constraints = add_arg_constraints(constraints, arg_types[index - 1]) @@ -86,13 +86,13 @@ def get_incompatible_arg_constraints(arg_types: Sequence[Type], type: Type, return constraints -def add_arg_constraints(constraints: Dict[str, Tuple[str]], - arg_type: Type) -> Dict[str, Tuple[str]]: +def add_arg_constraints(constraints: Dict[str, Tuple[str, ...]], + arg_type: Type) -> Dict[str, Tuple[str, ...]]: if (isinstance(arg_type, TypeVarType) and arg_type.values and len(arg_type.values) > 1 and arg_type.name not in constraints.keys()): - constraints[arg_type.name] = tuple(vals.type.name() for vals in arg_type.values) + constraints[arg_type.name] = tuple(val.type.name() for val in arg_type.values) elif isinstance(arg_type, UnionType): for item in arg_type.items: constraints = add_arg_constraints(constraints, item) From 93a7d5851d96e506a2c42e65dff97d3eaefdb442 Mon Sep 17 00:00:00 2001 From: quartox Date: Sat, 15 Jul 2017 06:34:02 -0700 Subject: [PATCH 19/27] Sparser testing --- test-data/unit/check-functions.test | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/test-data/unit/check-functions.test b/test-data/unit/check-functions.test index 537fb88d4d3f..524711cd41c4 100644 --- a/test-data/unit/check-functions.test +++ b/test-data/unit/check-functions.test @@ -2176,8 +2176,6 @@ def f(x: S, y: S) -> S: return (x + y) f('1', '2') f('1', 2) # E: Type argument 1 of "f" has incompatible value \ # N: "S" must be all one type: int or str -f(1, '2') # E: Type argument 1 of "f" has incompatible value \ -# N: "S" must be all one type: int or str [case testMultipleConstrainedIncompatibleArguments] from typing import TypeVar @@ -2190,17 +2188,9 @@ f(1, 2, '3', '4') f(1, 2, b'3', b'4') f(1, '2', '3', '4') # E: Type argument 1 of "f" has incompatible value \ # N: "S" must be all one type: int or str -f('1', 2, '3', '4') # E: Type argument 1 of "f" has incompatible value \ -# N: "S" must be all one type: int or str f('1', '2', b'3', '4') # E: Type argument 2 of "f" has incompatible value \ # N: "AnyStr" must be all one type: str or bytes -f('1', '2', '3', b'4') # E: Type argument 2 of "f" has incompatible value \ -# N: "AnyStr" must be all one type: str or bytes f('1', 2, b'3', '4') # E: Type argument 1 of "f" has incompatible value \ # N: "S" must be all one type: int or str \ # E: Type argument 2 of "f" has incompatible value \ # N: "AnyStr" must be all one type: str or bytes -f(1, '2', '3', b'4') # E: Type argument 1 of "f" has incompatible value \ -# N: "S" must be all one type: int or str \ -# E: Type argument 2 of "f" has incompatible value \ -# N: "AnyStr" must be all one type: str or bytes From 7465cf87a796c7d8e26c1c56763efa9e1d2cfa4e Mon Sep 17 00:00:00 2001 From: quartox Date: Sun, 16 Jul 2017 09:52:47 -0700 Subject: [PATCH 20/27] Use MessageBuilder to get type strings --- mypy/applytype.py | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/mypy/applytype.py b/mypy/applytype.py index 598c45321b89..87cd49b0f431 100644 --- a/mypy/applytype.py +++ b/mypy/applytype.py @@ -41,7 +41,7 @@ def apply_generic_arguments(callable: CallableType, types: List[Type], types[i] = value break else: - constraints = get_incompatible_arg_constraints(callable.arg_types, type, i + 1) + constraints = get_incompatible_arg_constraints(msg, callable.arg_types, type, i + 1) if constraints: msg.incompatible_constrained_arguments(callable, i + 1, constraints, context) else: @@ -70,29 +70,33 @@ def apply_generic_arguments(callable: CallableType, types: List[Type], ) -def get_incompatible_arg_constraints(arg_types: Sequence[Type], type: Type, +def get_incompatible_arg_constraints(msg: MessageBuilder, + arg_types: Sequence[Type], + type: Type, index: int) -> Dict[str, Tuple[str, ...]]: """Gets incompatible function arguments with the constrained types. An example of a constrained type is AnyStr which must be all str or all byte. """ constraints = {} # type: Dict[str, Tuple[str, ...]] + print(index) if isinstance(type, Instance) and type.type.fullname() == 'builtins.object': if index == len(arg_types): # Index is off by one for '*' arguments - constraints = add_arg_constraints(constraints, arg_types[index - 1]) + constraints = add_arg_constraints(msg, constraints, arg_types[index - 1]) else: - constraints = add_arg_constraints(constraints, arg_types[index]) + constraints = add_arg_constraints(msg, constraints, arg_types[index]) return constraints -def add_arg_constraints(constraints: Dict[str, Tuple[str, ...]], +def add_arg_constraints(msg: MessageBuilder, + constraints: Dict[str, Tuple[str, ...]], arg_type: Type) -> Dict[str, Tuple[str, ...]]: if (isinstance(arg_type, TypeVarType) and arg_type.values and len(arg_type.values) > 1 and arg_type.name not in constraints.keys()): - constraints[arg_type.name] = tuple(val.type.name() for val in arg_type.values) + constraints[arg_type.name] = tuple(msg.format(val) for val in arg_type.values) elif isinstance(arg_type, UnionType): for item in arg_type.items: constraints = add_arg_constraints(constraints, item) From e88b7b56d0a4234b3842eda9daf72ba57fc4eaed Mon Sep 17 00:00:00 2001 From: quartox Date: Sat, 5 Aug 2017 07:25:51 -0700 Subject: [PATCH 21/27] Add indeces to AnyStr error message --- mypy/applytype.py | 49 ++++++++++++++++++++++++++++++++--------------- mypy/messages.py | 26 ++++++++++++++++++------- 2 files changed, 53 insertions(+), 22 deletions(-) diff --git a/mypy/applytype.py b/mypy/applytype.py index 87cd49b0f431..e28df4c544d2 100644 --- a/mypy/applytype.py +++ b/mypy/applytype.py @@ -41,9 +41,12 @@ def apply_generic_arguments(callable: CallableType, types: List[Type], types[i] = value break else: - constraints = get_incompatible_arg_constraints(msg, callable.arg_types, type, i + 1) + constraints = get_inferred_object_constraints(msg, callable.arg_types, type, i + 1) if constraints: - msg.incompatible_constrained_arguments(callable, i + 1, constraints, context) + constrained_indeces = get_inferred_object_arg_indeces( + msg, constraints, callable.arg_types) + msg.incompatible_inferred_object_arguments( + callable, constrained_indeces, constraints, context) else: msg.incompatible_typevar_value(callable, i + 1, type, context) upper_bound = callable.variables[i].upper_bound @@ -70,28 +73,30 @@ def apply_generic_arguments(callable: CallableType, types: List[Type], ) -def get_incompatible_arg_constraints(msg: MessageBuilder, - arg_types: Sequence[Type], - type: Type, - index: int) -> Dict[str, Tuple[str, ...]]: - """Gets incompatible function arguments with the constrained types. +def get_inferred_object_constraints(msg: MessageBuilder, + arg_types: Sequence[Type], + type: Type, + index: int) -> Dict[str, Tuple[str, ...]]: + """Gets incompatible function arguments that are inferred as object based on the type + constraints. - An example of a constrained type is AnyStr which must be all str or all byte. + An example of a constrained type is AnyStr which must be all str or all byte. When there is a + mismatch of arguments with a constrained type like AnyStr, then the inferred type is object. """ constraints = {} # type: Dict[str, Tuple[str, ...]] - print(index) if isinstance(type, Instance) and type.type.fullname() == 'builtins.object': if index == len(arg_types): # Index is off by one for '*' arguments - constraints = add_arg_constraints(msg, constraints, arg_types[index - 1]) + constraints = add_inferred_object_arg_constraints( + msg, constraints, arg_types[index - 1]) else: - constraints = add_arg_constraints(msg, constraints, arg_types[index]) + constraints = add_inferred_object_arg_constraints(msg, constraints, arg_types[index]) return constraints -def add_arg_constraints(msg: MessageBuilder, - constraints: Dict[str, Tuple[str, ...]], - arg_type: Type) -> Dict[str, Tuple[str, ...]]: +def add_inferred_object_arg_constraints(msg: MessageBuilder, + constraints: Dict[str, Tuple[str, ...]], + arg_type: Type) -> Dict[str, Tuple[str, ...]]: if (isinstance(arg_type, TypeVarType) and arg_type.values and len(arg_type.values) > 1 and @@ -99,5 +104,19 @@ def add_arg_constraints(msg: MessageBuilder, constraints[arg_type.name] = tuple(msg.format(val) for val in arg_type.values) elif isinstance(arg_type, UnionType): for item in arg_type.items: - constraints = add_arg_constraints(constraints, item) + constraints = add_inferred_object_arg_constraints(msg, constraints, item) return constraints + + +def get_inferred_object_arg_indeces(msg: MessageBuilder, + constraints: Dict[str, Tuple[str, ...]], + arg_types: List[Type]) -> Dict[str, List[str]]: + """Get the indeces of all arguments with inferred type of object and the same constraint. + """ + indeces = {} # type: Dict[str, List[str]] + for constrained_type in constraints.keys(): + indeces[constrained_type] = [] + for i, type in enumerate(arg_types): + if constrained_type in msg.format(type): + indeces[constrained_type].append(str(i + 1)) + return indeces diff --git a/mypy/messages.py b/mypy/messages.py index 59aa5ac87962..2e93777bf4c9 100644 --- a/mypy/messages.py +++ b/mypy/messages.py @@ -864,14 +864,26 @@ def incompatible_typevar_value(self, callee: CallableType, index: int, self.fail('Type argument {} of {} has incompatible value {}'.format( index, callable_name(callee), self.format(type)), context) - def incompatible_constrained_arguments(self, - callee: CallableType, - index: int, - constraints: Mapping[str, Sequence[str]], - context: Context) -> None: + def incompatible_inferred_object_arguments(self, + callee: CallableType, + indeces: Mapping[str, List[str]], + constraints: Mapping[str, Sequence[str]], + context: Context) -> None: for key, values in constraints.items(): - self.fail('Type argument {} of {} has incompatible value'.format( - index, callable_name(callee)), context) + if len(indeces[key]) == 1: + index_str = str(indeces[key][0]) + elif len(indeces[key]) == 2: + index_str = ' and '.join(indeces[key]) + elif len(indeces[key]) > 2: + for i, index in enumerate(indeces[key]): + if i == 0: + index_str = index + elif i == len(indeces[key]) - 1: + index_str += ', and ' + index + else: + index_str += ', ' + index + self.fail('Type arguments {} of {} have incompatible values'.format( + index_str, callable_name(callee)), context) if len(values) == 2: constraint_str = '{} or {}'.format(values[0], values[1]) elif len(values) > 3: From 7c3dd7bdb93762168f0a77d03748748e86a9aac2 Mon Sep 17 00:00:00 2001 From: quartox Date: Sat, 5 Aug 2017 07:47:37 -0700 Subject: [PATCH 22/27] Small change to error message --- mypy/applytype.py | 3 +- mypy/messages.py | 2 +- test-data/unit/check-functions.test | 44 ++++++++++++++--------------- 3 files changed, 25 insertions(+), 24 deletions(-) diff --git a/mypy/applytype.py b/mypy/applytype.py index 2b78bffb0ad1..24711d138ea8 100644 --- a/mypy/applytype.py +++ b/mypy/applytype.py @@ -48,7 +48,8 @@ def apply_generic_arguments(callable: CallableType, types: List[Type], msg.incompatible_inferred_object_arguments( callable, constrained_indeces, constraints, context) else: - msg.incompatible_typevar_value(callable, type, callable.variables[i].name, context) + msg.incompatible_typevar_value( + callable, type, callable.variables[i].name, context) upper_bound = callable.variables[i].upper_bound if (type and not isinstance(type, PartialType) and not mypy.subtypes.is_subtype(type, upper_bound)): diff --git a/mypy/messages.py b/mypy/messages.py index 04abe77f9cb1..3597ab274436 100644 --- a/mypy/messages.py +++ b/mypy/messages.py @@ -886,7 +886,7 @@ def incompatible_inferred_object_arguments(self, index_str += ', and ' + index else: index_str += ', ' + index - self.fail('Type arguments {} of {} have incompatible values'.format( + self.fail('Arguments {} of {} have incompatible values'.format( index_str, callable_name(callee)), context) if len(values) == 2: constraint_str = '{} or {}'.format(values[0], values[1]) diff --git a/test-data/unit/check-functions.test b/test-data/unit/check-functions.test index 5576515a62e6..1082a8d61e0c 100644 --- a/test-data/unit/check-functions.test +++ b/test-data/unit/check-functions.test @@ -2170,14 +2170,14 @@ def f(x: AnyStr, y: AnyStr) -> None: pass def g(x: AnyStr, y: AnyStr, z: int) -> AnyStr: pass f('a', 'b') f(b'a', b'b') -f('a', b'b') # E: Type argument 1 of "f" has incompatible value \ -# N: "AnyStr" must be all one type: str or bytes +f('a', b'b') # E: Arguments 1 and 2 of "f" have incompatible values \ +# N: "AnyStr" must be all one type: "str" or "bytes" g('a', 'b', 1) g(b'a', b'b', 1) -g('a', b'b', 1) # E: Type argument 1 of "g" has incompatible value \ -# N: "AnyStr" must be all one type: str or bytes -g('a', b'b', 'c') # E: Type argument 1 of "g" has incompatible value \ -# N: "AnyStr" must be all one type: str or bytes \ +g('a', b'b', 1) # E: Arguments 1 and 2 of "g" have incompatible values \ +# N: "AnyStr" must be all one type: "str" or "bytes" +g('a', b'b', 'c') # E: Arguments 1 and 2 of "g" have incompatible values \ +# N: "AnyStr" must be all one type: "str" or "bytes" \ # E: Argument 3 to "g" has incompatible type "str"; expected "int" [case testUnionAnyStrIncompatibleArguments] @@ -2186,8 +2186,8 @@ AnyStr = TypeVar('AnyStr', str, bytes) def f(x: Union[AnyStr, int], y: AnyStr) -> None: pass f('a', 'b') f(1, 'b') -f('a', b'b') # E: Type argument 1 of "f" has incompatible value \ -# N: "AnyStr" must be all one type: str or bytes +f('a', b'b') # E: Arguments 1 and 2 of "f" have incompatible values \ +# N: "AnyStr" must be all one type: "str" or "bytes" [case testStarAnyStrIncompatibleArguments] from typing import TypeVar, Union @@ -2197,11 +2197,11 @@ def g(x: int, *y: AnyStr) -> None: pass def h(*x: AnyStr, y: int) -> None: pass f('a') f('a', 'b') -f('a', b'b') # E: Type argument 1 of "f" has incompatible value \ -# N: "AnyStr" must be all one type: str or bytes +f('a', b'b') # E: Arguments 1 and 2 of "f" have incompatible values \ +# N: "AnyStr" must be all one type: "str" or "bytes" g(1, 'a') -g(1, 'a', b'b') # E: Type argument 1 of "g" has incompatible value \ -# N: "AnyStr" must be all one type: str or bytes +g(1, 'a', b'b') # E: Arguments 2 of "g" have incompatible values \ +# N: "AnyStr" must be all one type: "str" or "bytes" h('a', y=1) h('a', 'b', y=1) h('a', b'b', y=1) # E: Type argument 1 of "h" has incompatible value "object" @@ -2211,8 +2211,8 @@ from typing import TypeVar S = TypeVar('S', int, str) def f(x: S, y: S) -> S: return (x + y) f('1', '2') -f('1', 2) # E: Type argument 1 of "f" has incompatible value \ -# N: "S" must be all one type: int or str +f('1', 2) # E: Arguments 1 and 2 of "f" have incompatible values \ +# N: "S" must be all one type: "int" or "str" [case testMultipleConstrainedIncompatibleArguments] from typing import TypeVar @@ -2223,11 +2223,11 @@ f('1', '2', '3', '4') f('1', '2', b'3', b'4') f(1, 2, '3', '4') f(1, 2, b'3', b'4') -f(1, '2', '3', '4') # E: Type argument 1 of "f" has incompatible value \ -# N: "S" must be all one type: int or str -f('1', '2', b'3', '4') # E: Type argument 2 of "f" has incompatible value \ -# N: "AnyStr" must be all one type: str or bytes -f('1', 2, b'3', '4') # E: Type argument 1 of "f" has incompatible value \ -# N: "S" must be all one type: int or str \ -# E: Type argument 2 of "f" has incompatible value \ -# N: "AnyStr" must be all one type: str or bytes +f(1, '2', '3', '4') # E: Arguments 1 and 2 of "f" have incompatible values \ +# N: "S" must be all one type: "int" or "str" +f('1', '2', b'3', '4') # E: Arguments 3 and 4 of "f" have incompatible values \ +# N: "AnyStr" must be all one type: "str" or "bytes" +f('1', 2, b'3', '4') # E: Arguments 1 and 2 of "f" have incompatible values \ +# N: "S" must be all one type: "int" or "str" \ +# E: Arguments 3 and 4 of "f" have incompatible values \ +# N: "AnyStr" must be all one type: "str" or "bytes" From 5d778289e406e9dcde2f4416daecc551b71fb554 Mon Sep 17 00:00:00 2001 From: quartox Date: Sat, 5 Aug 2017 07:58:41 -0700 Subject: [PATCH 23/27] Fix some test messages --- test-data/unit/check-inference.test | 8 ++++---- test-data/unit/check-overloading.test | 8 ++++---- test-data/unit/check-typevar-values.test | 12 ++++++------ test-data/unit/pythoneval.test | 4 ++-- 4 files changed, 16 insertions(+), 16 deletions(-) diff --git a/test-data/unit/check-inference.test b/test-data/unit/check-inference.test index 93afc662f2e7..5abec0f8c7ba 100644 --- a/test-data/unit/check-inference.test +++ b/test-data/unit/check-inference.test @@ -750,12 +750,12 @@ AnyStr = TypeVar('AnyStr', bytes, str) def f(x: Union[AnyStr, int], *a: AnyStr) -> None: pass f('foo') f('foo', 'bar') -f('foo', b'bar') # E: Type arguments 1 and 2 of "f" have incompatible values \ -# N: "AnyStr" must be all one type: bytes or str +f('foo', b'bar') # E: Arguments 1 and 2 of "f" have incompatible values \ +# N: "AnyStr" must be all one type: "bytes" or "str" f(1) f(1, 'foo') -f(1, 'foo', b'bar') # E: Type arguments 2 and 3 of "f" have incompatible values \ -# N: "AnyStr" must be all one type: bytes or str +f(1, 'foo', b'bar') # E: Arguments 1 and 2 of "f" have incompatible values \ +# N: "AnyStr" must be all one type: "bytes" or "str" [builtins fixtures/primitives.pyi] diff --git a/test-data/unit/check-overloading.test b/test-data/unit/check-overloading.test index 6184ec45ab0d..c74cf1c2f9f5 100644 --- a/test-data/unit/check-overloading.test +++ b/test-data/unit/check-overloading.test @@ -998,12 +998,12 @@ def g(x: int, *a: AnyStr) -> None: pass g('foo') g('foo', 'bar') -g('foo', b'bar') # E: Type arguments 1 and 2 of "g" have incompatible value1 \ -# N: "AnyStr" must be all one type: bytes or str +g('foo', b'bar') # E: Arguments 1 and 2 of "g" have incompatible values \ +# N: "AnyStr" must be all one type: "bytes" or "str" g(1) g(1, 'foo') -g(1, 'foo', b'bar') # E: Type arguments 2 and 3 of "g" have incompatible values \ -# N: "AnyStr" must be all one type: bytes or str +g(1, 'foo', b'bar') # E: Arguments 2 of "g" have incompatible values \ +# N: "AnyStr" must be all one type: "bytes" or "str" [builtins fixtures/primitives.pyi] [case testBadOverlapWithTypeVarsWithValues] diff --git a/test-data/unit/check-typevar-values.test b/test-data/unit/check-typevar-values.test index b930e1e714cb..3fb3554a8e75 100644 --- a/test-data/unit/check-typevar-values.test +++ b/test-data/unit/check-typevar-values.test @@ -7,8 +7,8 @@ T = TypeVar('T', int, str) def f(x: T) -> None: pass f(1) f('x') -f(object()) # E: Type arguments 1 of "f" have incompatible values \ -# N: "T" must be all one type: int or str +f(object()) # E: Arguments 1 of "f" have incompatible values \ +# N: "T" must be all one type: "int" or "str" [case testCallGenericFunctionWithTypeVarValueRestrictionUsingContext] @@ -20,8 +20,8 @@ s = ['x'] o = [object()] i = f(1) s = f('') -o = f(1) # E: Type arguments 1 of "f" have incompatible values \ -# N: "T" must be all one type: int or str +o = f(1) # E: Arguments 1 of "f" have incompatible values \ +# N: "T" must be all one type: "int" or "str" [builtins fixtures/list.pyi] [case testCallGenericFunctionWithTypeVarValueRestrictionAndAnyArgs] @@ -242,8 +242,8 @@ class A(Generic[X]): A(1) A('x') A(cast(Any, object())) -A(object()) # E: Type arguments 1 of "A" have incompatible values \ -# N: "X" must be all one type: int or str +A(object()) # E: Arguments 1 of "A" have incompatible values \ +# N: "X" must be all one type: "int" or "str" [case testGenericTypeWithTypevarValuesAndTypevarArgument] from typing import TypeVar, Generic diff --git a/test-data/unit/pythoneval.test b/test-data/unit/pythoneval.test index f3906960291e..83dd3f7cd9e8 100644 --- a/test-data/unit/pythoneval.test +++ b/test-data/unit/pythoneval.test @@ -1277,7 +1277,7 @@ re.subn(bpat, b'', b'')[0] + b'' re.subn(bre, lambda m: b'', b'')[0] + b'' re.subn(bpat, lambda m: b'', b'')[0] + b'' [out] -_program.py:7: error: Type arguments 1 of "search" have incompatible values +_program.py:7: error: Arguments 1 and 2 of "search" have incompatible values _program.py:7: note: "AnyStr" must be all one type: str or bytes _program.py:9: error: Cannot infer type argument 1 of "search" @@ -1302,7 +1302,7 @@ re.subn(spat, '', '')[0] + '' re.subn(sre, lambda m: '', '')[0] + '' re.subn(spat, lambda m: '', '')[0] + '' [out] -_program.py:7: error: Type arguments 1 of "search" have incompatible values +_program.py:7: error: Arguments 1 and 2 of "search" have incompatible values _program.py:7: note: "AnyStr" must be all one type: str or bytes _program.py:9: error: Cannot infer type argument 1 of "search" From 24bdbe0210074dc96e15f7609ed7df31b9086ec6 Mon Sep 17 00:00:00 2001 From: quartox Date: Sat, 5 Aug 2017 08:06:49 -0700 Subject: [PATCH 24/27] Fix more error messages --- test-data/unit/check-functions.test | 2 +- test-data/unit/pythoneval.test | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/test-data/unit/check-functions.test b/test-data/unit/check-functions.test index 1082a8d61e0c..e5ba22140525 100644 --- a/test-data/unit/check-functions.test +++ b/test-data/unit/check-functions.test @@ -2197,7 +2197,7 @@ def g(x: int, *y: AnyStr) -> None: pass def h(*x: AnyStr, y: int) -> None: pass f('a') f('a', 'b') -f('a', b'b') # E: Arguments 1 and 2 of "f" have incompatible values \ +f('a', b'b') # E: Arguments 1 of "f" have incompatible values \ # N: "AnyStr" must be all one type: "str" or "bytes" g(1, 'a') g(1, 'a', b'b') # E: Arguments 2 of "g" have incompatible values \ diff --git a/test-data/unit/pythoneval.test b/test-data/unit/pythoneval.test index 83dd3f7cd9e8..e089717d7308 100644 --- a/test-data/unit/pythoneval.test +++ b/test-data/unit/pythoneval.test @@ -1278,7 +1278,7 @@ re.subn(bre, lambda m: b'', b'')[0] + b'' re.subn(bpat, lambda m: b'', b'')[0] + b'' [out] _program.py:7: error: Arguments 1 and 2 of "search" have incompatible values -_program.py:7: note: "AnyStr" must be all one type: str or bytes +_program.py:7: note: "AnyStr" must be all one type: "str" or "bytes" _program.py:9: error: Cannot infer type argument 1 of "search" [case testReModuleString] @@ -1303,7 +1303,7 @@ re.subn(sre, lambda m: '', '')[0] + '' re.subn(spat, lambda m: '', '')[0] + '' [out] _program.py:7: error: Arguments 1 and 2 of "search" have incompatible values -_program.py:7: note: "AnyStr" must be all one type: str or bytes +_program.py:7: note: "AnyStr" must be all one type: "str" or "bytes" _program.py:9: error: Cannot infer type argument 1 of "search" [case testListSetitemTuple] From 30f98ab66f37fe26ee02d4b8cd3b8f5f5921c20d Mon Sep 17 00:00:00 2001 From: quartox Date: Fri, 18 Aug 2017 08:02:26 -0700 Subject: [PATCH 25/27] Remove brittle indeces of inferred object --- mypy/applytype.py | 18 +----------------- mypy/messages.py | 18 +++--------------- 2 files changed, 4 insertions(+), 32 deletions(-) diff --git a/mypy/applytype.py b/mypy/applytype.py index 24711d138ea8..e4ebede3310e 100644 --- a/mypy/applytype.py +++ b/mypy/applytype.py @@ -43,10 +43,8 @@ def apply_generic_arguments(callable: CallableType, types: List[Type], else: constraints = get_inferred_object_constraints(msg, callable.arg_types, type, i + 1) if constraints: - constrained_indeces = get_inferred_object_arg_indeces( - msg, constraints, callable.arg_types) msg.incompatible_inferred_object_arguments( - callable, constrained_indeces, constraints, context) + callable, i + 1, constraints, context) else: msg.incompatible_typevar_value( callable, type, callable.variables[i].name, context) @@ -107,17 +105,3 @@ def add_inferred_object_arg_constraints(msg: MessageBuilder, for item in arg_type.items: constraints = add_inferred_object_arg_constraints(msg, constraints, item) return constraints - - -def get_inferred_object_arg_indeces(msg: MessageBuilder, - constraints: Dict[str, Tuple[str, ...]], - arg_types: List[Type]) -> Dict[str, List[str]]: - """Get the indeces of all arguments with inferred type of object and the same constraint. - """ - indeces = {} # type: Dict[str, List[str]] - for constrained_type in constraints.keys(): - indeces[constrained_type] = [] - for i, type in enumerate(arg_types): - if constrained_type in msg.format(type): - indeces[constrained_type].append(str(i + 1)) - return indeces diff --git a/mypy/messages.py b/mypy/messages.py index 3597ab274436..ef3eeae05906 100644 --- a/mypy/messages.py +++ b/mypy/messages.py @@ -870,24 +870,12 @@ def incompatible_typevar_value(self, def incompatible_inferred_object_arguments(self, callee: CallableType, - indeces: Mapping[str, List[str]], + index: int, constraints: Mapping[str, Sequence[str]], context: Context) -> None: for key, values in constraints.items(): - if len(indeces[key]) == 1: - index_str = str(indeces[key][0]) - elif len(indeces[key]) == 2: - index_str = ' and '.join(indeces[key]) - elif len(indeces[key]) > 2: - for i, index in enumerate(indeces[key]): - if i == 0: - index_str = index - elif i == len(indeces[key]) - 1: - index_str += ', and ' + index - else: - index_str += ', ' + index - self.fail('Arguments {} of {} have incompatible values'.format( - index_str, callable_name(callee)), context) + self.fail('Argument {} of {} has incompatible value'.format( + index, callable_name(callee)), context) if len(values) == 2: constraint_str = '{} or {}'.format(values[0], values[1]) elif len(values) > 3: From 36fdd0f41f04411bb13b91699ee0d436d316b315 Mon Sep 17 00:00:00 2001 From: quartox Date: Sat, 19 Aug 2017 06:50:55 -0700 Subject: [PATCH 26/27] Remove mutliple indeces from tests --- test-data/unit/check-functions.test | 24 ++++++++++++------------ test-data/unit/check-inference.test | 4 ++-- test-data/unit/check-overloading.test | 4 ++-- test-data/unit/check-typevar-values.test | 6 +++--- test-data/unit/pythoneval.test | 4 ++-- 5 files changed, 21 insertions(+), 21 deletions(-) diff --git a/test-data/unit/check-functions.test b/test-data/unit/check-functions.test index e5ba22140525..3c7a15862acd 100644 --- a/test-data/unit/check-functions.test +++ b/test-data/unit/check-functions.test @@ -2170,13 +2170,13 @@ def f(x: AnyStr, y: AnyStr) -> None: pass def g(x: AnyStr, y: AnyStr, z: int) -> AnyStr: pass f('a', 'b') f(b'a', b'b') -f('a', b'b') # E: Arguments 1 and 2 of "f" have incompatible values \ +f('a', b'b') # E: Argument 1 of "f" has incompatible value \ # N: "AnyStr" must be all one type: "str" or "bytes" g('a', 'b', 1) g(b'a', b'b', 1) -g('a', b'b', 1) # E: Arguments 1 and 2 of "g" have incompatible values \ +g('a', b'b', 1) # E: Argument 1 of "g" has incompatible value \ # N: "AnyStr" must be all one type: "str" or "bytes" -g('a', b'b', 'c') # E: Arguments 1 and 2 of "g" have incompatible values \ +g('a', b'b', 'c') # E: Argument 1 of "g" has incompatible value \ # N: "AnyStr" must be all one type: "str" or "bytes" \ # E: Argument 3 to "g" has incompatible type "str"; expected "int" @@ -2186,7 +2186,7 @@ AnyStr = TypeVar('AnyStr', str, bytes) def f(x: Union[AnyStr, int], y: AnyStr) -> None: pass f('a', 'b') f(1, 'b') -f('a', b'b') # E: Arguments 1 and 2 of "f" have incompatible values \ +f('a', b'b') # E: Argument 1 of "f" has incompatible value \ # N: "AnyStr" must be all one type: "str" or "bytes" [case testStarAnyStrIncompatibleArguments] @@ -2197,21 +2197,21 @@ def g(x: int, *y: AnyStr) -> None: pass def h(*x: AnyStr, y: int) -> None: pass f('a') f('a', 'b') -f('a', b'b') # E: Arguments 1 of "f" have incompatible values \ +f('a', b'b') # E: Argument 1 of "f" has incompatible value \ # N: "AnyStr" must be all one type: "str" or "bytes" g(1, 'a') -g(1, 'a', b'b') # E: Arguments 2 of "g" have incompatible values \ +g(1, 'a', b'b') # E: Argument 1 of "g" has incompatible value \ # N: "AnyStr" must be all one type: "str" or "bytes" h('a', y=1) h('a', 'b', y=1) -h('a', b'b', y=1) # E: Type argument 1 of "h" has incompatible value "object" +h('a', b'b', y=1) # E: Value of type variable "AnyStr" of "h" cannot be "object" [case testConstrainedIncompatibleArguments] from typing import TypeVar S = TypeVar('S', int, str) def f(x: S, y: S) -> S: return (x + y) f('1', '2') -f('1', 2) # E: Arguments 1 and 2 of "f" have incompatible values \ +f('1', 2) # E: Argument 1 of "f" has incompatible value \ # N: "S" must be all one type: "int" or "str" [case testMultipleConstrainedIncompatibleArguments] @@ -2223,11 +2223,11 @@ f('1', '2', '3', '4') f('1', '2', b'3', b'4') f(1, 2, '3', '4') f(1, 2, b'3', b'4') -f(1, '2', '3', '4') # E: Arguments 1 and 2 of "f" have incompatible values \ +f(1, '2', '3', '4') # E: Argument 1 of "f" has incompatible value \ # N: "S" must be all one type: "int" or "str" -f('1', '2', b'3', '4') # E: Arguments 3 and 4 of "f" have incompatible values \ +f('1', '2', b'3', '4') # E: Argument 2 of "f" have incompatible value \ # N: "AnyStr" must be all one type: "str" or "bytes" -f('1', 2, b'3', '4') # E: Arguments 1 and 2 of "f" have incompatible values \ +f('1', 2, b'3', '4') # E: Argument 1 of "f" has incompatible value \ # N: "S" must be all one type: "int" or "str" \ -# E: Arguments 3 and 4 of "f" have incompatible values \ +# E: Argument 2 of "f" has incompatible value \ # N: "AnyStr" must be all one type: "str" or "bytes" diff --git a/test-data/unit/check-inference.test b/test-data/unit/check-inference.test index 5abec0f8c7ba..497b63b598e8 100644 --- a/test-data/unit/check-inference.test +++ b/test-data/unit/check-inference.test @@ -750,11 +750,11 @@ AnyStr = TypeVar('AnyStr', bytes, str) def f(x: Union[AnyStr, int], *a: AnyStr) -> None: pass f('foo') f('foo', 'bar') -f('foo', b'bar') # E: Arguments 1 and 2 of "f" have incompatible values \ +f('foo', b'bar') # E: Argument 1 of "f" has incompatible value \ # N: "AnyStr" must be all one type: "bytes" or "str" f(1) f(1, 'foo') -f(1, 'foo', b'bar') # E: Arguments 1 and 2 of "f" have incompatible values \ +f(1, 'foo', b'bar') # E: Argument 1 of "f" has incompatible value \ # N: "AnyStr" must be all one type: "bytes" or "str" [builtins fixtures/primitives.pyi] diff --git a/test-data/unit/check-overloading.test b/test-data/unit/check-overloading.test index c74cf1c2f9f5..b6acf3c33a9c 100644 --- a/test-data/unit/check-overloading.test +++ b/test-data/unit/check-overloading.test @@ -998,11 +998,11 @@ def g(x: int, *a: AnyStr) -> None: pass g('foo') g('foo', 'bar') -g('foo', b'bar') # E: Arguments 1 and 2 of "g" have incompatible values \ +g('foo', b'bar') # E: Argument 1 of "g" has incompatible value \ # N: "AnyStr" must be all one type: "bytes" or "str" g(1) g(1, 'foo') -g(1, 'foo', b'bar') # E: Arguments 2 of "g" have incompatible values \ +g(1, 'foo', b'bar') # E: Argument 1 of "g" has incompatible value \ # N: "AnyStr" must be all one type: "bytes" or "str" [builtins fixtures/primitives.pyi] diff --git a/test-data/unit/check-typevar-values.test b/test-data/unit/check-typevar-values.test index 3fb3554a8e75..b7bca30befe2 100644 --- a/test-data/unit/check-typevar-values.test +++ b/test-data/unit/check-typevar-values.test @@ -7,7 +7,7 @@ T = TypeVar('T', int, str) def f(x: T) -> None: pass f(1) f('x') -f(object()) # E: Arguments 1 of "f" have incompatible values \ +f(object()) # E: Argument 1 of "f" has incompatible value \ # N: "T" must be all one type: "int" or "str" @@ -20,7 +20,7 @@ s = ['x'] o = [object()] i = f(1) s = f('') -o = f(1) # E: Arguments 1 of "f" have incompatible values \ +o = f(1) # E: Argument 1 of "f" has incompatible value \ # N: "T" must be all one type: "int" or "str" [builtins fixtures/list.pyi] @@ -242,7 +242,7 @@ class A(Generic[X]): A(1) A('x') A(cast(Any, object())) -A(object()) # E: Arguments 1 of "A" have incompatible values \ +A(object()) # E: Argument 1 of "A" has incompatible value \ # N: "X" must be all one type: "int" or "str" [case testGenericTypeWithTypevarValuesAndTypevarArgument] diff --git a/test-data/unit/pythoneval.test b/test-data/unit/pythoneval.test index e089717d7308..06633ad1629d 100644 --- a/test-data/unit/pythoneval.test +++ b/test-data/unit/pythoneval.test @@ -1277,7 +1277,7 @@ re.subn(bpat, b'', b'')[0] + b'' re.subn(bre, lambda m: b'', b'')[0] + b'' re.subn(bpat, lambda m: b'', b'')[0] + b'' [out] -_program.py:7: error: Arguments 1 and 2 of "search" have incompatible values +_program.py:7: error: Argument 1 of "search" has incompatible value _program.py:7: note: "AnyStr" must be all one type: "str" or "bytes" _program.py:9: error: Cannot infer type argument 1 of "search" @@ -1302,7 +1302,7 @@ re.subn(spat, '', '')[0] + '' re.subn(sre, lambda m: '', '')[0] + '' re.subn(spat, lambda m: '', '')[0] + '' [out] -_program.py:7: error: Arguments 1 and 2 of "search" have incompatible values +_program.py:7: error: Argument 1 of "search" has incompatible value _program.py:7: note: "AnyStr" must be all one type: "str" or "bytes" _program.py:9: error: Cannot infer type argument 1 of "search" From b746fe858fb04e7c5873a4437dca6124aa2470b5 Mon Sep 17 00:00:00 2001 From: quartox Date: Sat, 19 Aug 2017 06:59:30 -0700 Subject: [PATCH 27/27] Fix test typo --- test-data/unit/check-functions.test | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test-data/unit/check-functions.test b/test-data/unit/check-functions.test index 3c7a15862acd..0f043debdf5c 100644 --- a/test-data/unit/check-functions.test +++ b/test-data/unit/check-functions.test @@ -2225,7 +2225,7 @@ f(1, 2, '3', '4') f(1, 2, b'3', b'4') f(1, '2', '3', '4') # E: Argument 1 of "f" has incompatible value \ # N: "S" must be all one type: "int" or "str" -f('1', '2', b'3', '4') # E: Argument 2 of "f" have incompatible value \ +f('1', '2', b'3', '4') # E: Argument 2 of "f" has incompatible value \ # N: "AnyStr" must be all one type: "str" or "bytes" f('1', 2, b'3', '4') # E: Argument 1 of "f" has incompatible value \ # N: "S" must be all one type: "int" or "str" \