-
-
Notifications
You must be signed in to change notification settings - Fork 1.8k
Use PEP 585 syntax in Python 2, protobuf
& _ast
stubs, where possible
#6949
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Conversation
Refs PyCQA/flake8-pyi#97 |
This comment has been minimized.
This comment has been minimized.
I used a similar script to #6717import ast
import re
import subprocess
import sys
from collections import defaultdict
from itertools import chain
from pathlib import Path
from typing import NamedTuple
class DeleteableImport(NamedTuple):
old: str
replacement: str
class NewImport(NamedTuple):
text: str
indentation: int
IMPORTED_FROM_BUILTINS_NOT_TYPING = frozenset({"List", "FrozenSet", "Set", "Dict", "Tuple", "Type"})
IMPORTED_FROM_TYPING_NOT_TYPING_EXTENSIONS = frozenset(
{
# collections.abc aliases
"Awaitable",
"Coroutine",
"AsyncIterable",
"AsyncIterator",
"AsyncGenerator",
# typing aliases
"Protocol",
"runtime_checkable",
"ClassVar",
"NewType",
"overload",
"Text",
"NoReturn",
}
)
# The values in the mapping are what these are called in `collections`
IMPORTED_FROM_COLLECTIONS_NOT_TYPING_OR_TYPING_EXTENSIONS = {
"Counter": "Counter",
"Deque": "deque",
"DefaultDict": "defaultdict",
"ChainMap": "ChainMap",
}
FAILURES = []
def fix_bad_syntax(path: Path) -> None:
with open(path) as f:
stub = f.read()
lines = stub.splitlines()
tree = ast.parse(stub)
imports_to_delete = {}
imports_to_add = defaultdict(list)
classes_from_typing = set()
classes_from_typing_extensions = set()
class BadImportFinder(ast.NodeVisitor):
def visit_ImportFrom(self, node: ast.ImportFrom) -> None:
if node.module not in {"typing", "typing_extensions"}:
return
bad_builtins_classes_in_this_import = set()
bad_collections_classes_in_this_import = set()
bad_collections_abc_classes_in_this_import = set()
bad_contextlib_classes_in_this_import = set()
if node.module == "typing":
for cls in node.names:
if cls.name in IMPORTED_FROM_BUILTINS_NOT_TYPING:
bad_builtins_classes_in_this_import.add(cls)
elif cls.name in IMPORTED_FROM_COLLECTIONS_NOT_TYPING_OR_TYPING_EXTENSIONS and path != Path(
"stdlib/typing_extensions.pyi"
):
bad_collections_classes_in_this_import.add(cls)
elif cls.name == "AsyncContextManager":
bad_contextlib_classes_in_this_import.add(cls)
else:
for cls in node.names:
if cls.name in IMPORTED_FROM_COLLECTIONS_NOT_TYPING_OR_TYPING_EXTENSIONS:
bad_collections_classes_in_this_import.add(cls)
elif cls.name in IMPORTED_FROM_TYPING_NOT_TYPING_EXTENSIONS:
bad_collections_abc_classes_in_this_import.add(cls)
elif cls.name in {"ContextManager", "AsyncContextManager"}:
bad_contextlib_classes_in_this_import.add(cls)
bad_classes_in_this_import = (
bad_builtins_classes_in_this_import
| bad_collections_classes_in_this_import
| bad_collections_abc_classes_in_this_import
| bad_contextlib_classes_in_this_import
)
if not bad_classes_in_this_import:
return
if node.module == "typing":
classes_from_typing.update(cls.name for cls in bad_classes_in_this_import)
else:
classes_from_typing_extensions.update(cls.name for cls in bad_classes_in_this_import)
new_import_list = [cls for cls in node.names if cls not in bad_classes_in_this_import]
### DEALING WITH EXISTING IMPORT STATEMENTS ###
# Scenario (1): Now we don't need *any* imports from typing/typing_extensions any more.
if not new_import_list:
imports_to_delete[node.lineno - 1] = DeleteableImport(old=ast.unparse(node), replacement="")
# Scenario (2): we still need imports from typing/typing_extensions; the existing import statement is only one line
elif node.lineno == node.end_lineno:
imports_to_delete[node.lineno - 1] = DeleteableImport(
old=ast.unparse(node),
replacement=ast.unparse(ast.ImportFrom(module=node.module, names=new_import_list, level=0)),
)
# Scenario (3): we still need imports from typing/typing_extensions; the existing import statement is multiline.
else:
for cls in node.names:
if cls in bad_classes_in_this_import:
imports_to_delete[cls.lineno - 1] = DeleteableImport(
old=f"{cls.name}," if cls.asname is None else f"{cls.name} as {cls.asname},", replacement=""
)
### ADDING NEW IMPORT STATEMENTS ###
if bad_collections_classes_in_this_import:
imports_to_add[node.lineno - 1].append(
NewImport(
text=ast.unparse(
ast.ImportFrom(
module="collections",
names=[
ast.alias(
name=IMPORTED_FROM_COLLECTIONS_NOT_TYPING_OR_TYPING_EXTENSIONS[cls.name],
asname=cls.asname,
)
for cls in bad_collections_classes_in_this_import
],
level=0,
)
),
indentation=node.col_offset,
)
)
if bad_collections_abc_classes_in_this_import:
imports_to_add[node.lineno - 1].append(
NewImport(
text=ast.unparse(ast.ImportFrom(module="typing", names=classes_to_import, level=0)),
indentation=node.col_offset,
)
)
BadImportFinder().visit(tree)
if not classes_from_typing | classes_from_typing_extensions:
return
for lineno, (old_syntax, new_syntax) in imports_to_delete.items():
lines[lineno] = lines[lineno].replace(old_syntax, new_syntax)
for lineno, import_list in imports_to_add.items():
for new_import, indentation in import_list:
if isinstance(new_import, str):
lines.insert(lineno, f'{" "*indentation}{new_import}')
else:
lines = lines[:lineno] + [f'{" "*indentation}{l}' for l in new_import] + lines[lineno:]
try:
new_tree = ast.parse("\n".join(lines))
except SyntaxError:
sys.stderr.write(f"Error converting new syntax in {path}")
FAILURES.append(path)
else:
lines_with_bad_syntax = defaultdict(list)
class OldSyntaxFinder(ast.NodeVisitor):
def visit_Subscript(self, node: ast.Subscript) -> None:
if isinstance(node.value, ast.Name) and node.value.id in (
(classes_from_typing | classes_from_typing_extensions)
& (IMPORTED_FROM_BUILTINS_NOT_TYPING | {"Deque", "DefaultDict"})
):
lines_with_bad_syntax[node.lineno - 1].append(node.value.id)
self.generic_visit(node)
OldSyntaxFinder().visit(new_tree)
for i, cls_list in lines_with_bad_syntax.items():
for cls in cls_list:
lines[i] = re.sub(fr"(\W){cls}\[", fr"\1{cls.lower()}[", lines[i])
with open(path, "w") as f:
f.write("\n".join(lines) + "\n")
def main() -> None:
print("STARTING RUN: Will attempt to fix new syntax in typeshed directory...\n\n")
for path in chain(Path("stdlib").rglob("*.pyi"), Path("stubs").rglob("*.pyi")):
print(f"Attempting to convert {path} to new syntax.")
fix_bad_syntax(path)
print("\n\nSTARTING ISORT...\n\n")
for folder in {"stdlib", "stubs", "tests"}:
subprocess.run([sys.executable, "-m", "isort", folder])
print("\n\nSTARTING BLACK...\n\n")
subprocess.run([sys.executable, "-m", "black", "."])
if FAILURES:
print("\n\nFAILED to convert the following files to new syntax:\n")
for path in FAILURES:
print(f"- {path}")
else:
print("\n\nThere were ZERO failures in converting to new syntax. HOORAY!!\n\n")
print('\n\nRunning "check_new_syntax.py"...\n\n')
subprocess.run([sys.executable, "tests/check_new_syntax.py"])
print('\n\nRunning "stubtest_stdlib.py"...\n\n')
subprocess.run([sys.executable, "tests/stubtest_stdlib.py"])
if __name__ == "__main__":
main() |
This comment has been minimized.
This comment has been minimized.
protobuf
& _ast
stubs, where possible
According to mypy_primer, this change has no effect on the checked open source code. 🤖🎉 |
Thanks! I think we should leave protobuf alone because the stubs are autogenerated. |
Okay — should I add per-file excludes for the proposed new error codes to https://github.com/python/typeshed/blob/master/.flake8? |
Actually, after looking at #6944 I realized these particular files are not autogenerated. |
Thanks! |
There are some differences with Python 3 stubs:
ContextManager
to be imported fromtyping
, as there is nocontextlib.AbstractContextManager
class in Python 2.OrderedDict
to be imported fromtyping_extensions
, as there is nocollections.OrderedDict
class in Python 2.