Skip to content

Commit 9d335e2

Browse files
authored
New semantic analyzer: remove the limit on max number of iterations (#6499)
Keep track of added names. If an iteration didn't add anything new, any further work is meaningless and the next iteration will be the last one. Avoid infinite loops by failing if the final iterations defers anything. Fixes #6485.
1 parent 8ac8b02 commit 9d335e2

File tree

2 files changed

+71
-23
lines changed

2 files changed

+71
-23
lines changed

mypy/newsemanal/semanal.py

Lines changed: 27 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -214,7 +214,13 @@ class NewSemanticAnalyzer(NodeVisitor[None],
214214
# Stack of functions being analyzed
215215
function_stack = None # type: List[FuncItem]
216216

217-
# Is this the final iteration of semantic analysis?
217+
# Set to True if semantic analysis defines a name, or replaces a
218+
# placeholder definition. If some iteration makes no progress,
219+
# there can be at most one additional final iteration (see below).
220+
progress = False
221+
222+
# Is this the final iteration of semantic analysis (where we report
223+
# unbound names due to cyclic definitions)?
218224
_final_iteration = False
219225

220226
loop_depth = 0 # Depth of breakable loops
@@ -307,6 +313,11 @@ def add_implicit_module_attrs(self, file_node: MypyFile) -> None:
307313
assert t is not None, 'type should be specified for {}'.format(name)
308314
typ = UnboundType(t)
309315

316+
existing = file_node.names.get(name)
317+
if existing is not None and not isinstance(existing.node, PlaceholderNode):
318+
# Already exists.
319+
continue
320+
310321
an_type = self.anal_type(typ)
311322
if an_type:
312323
var = Var(name, an_type)
@@ -4052,15 +4063,23 @@ def add_symbol_table_node(self, name: str, symbol: SymbolTableNode,
40524063
existing = names.get(name)
40534064
if (existing is not None
40544065
and context is not None
4055-
and not isinstance(existing.node, PlaceholderNode)):
4056-
if existing.node != symbol.node:
4066+
and (not isinstance(existing.node, PlaceholderNode)
4067+
or isinstance(symbol.node, PlaceholderNode))):
4068+
# There is an existing node, so this may be a redefinition.
4069+
# If the new node points to the same node as the old one,
4070+
# or if both old and new nodes are placeholders, we don't
4071+
# need to do anything.
4072+
if (existing.node != symbol.node
4073+
and not (isinstance(existing.node, PlaceholderNode)
4074+
and isinstance(symbol.node, PlaceholderNode))):
40574075
if isinstance(symbol.node, (FuncDef, Decorator)):
40584076
self.add_func_redefinition(names, name, symbol)
40594077
if not (isinstance(symbol.node, (FuncDef, Decorator))
40604078
and self.set_original_def(existing.node, symbol.node)):
40614079
self.name_already_defined(name, context, existing)
40624080
elif name not in self.missing_names and '*' not in self.missing_names:
40634081
names[name] = symbol
4082+
self.progress = True
40644083
return True
40654084
return False
40664085

@@ -4261,6 +4280,11 @@ def lookup_current_scope(self, name: str) -> Optional[SymbolTableNode]:
42614280
def schedule_patch(self, priority: int, patch: Callable[[], None]) -> None:
42624281
self.patches.append((priority, patch))
42634282

4283+
def report_hang(self) -> None:
4284+
self.errors.report(-1, -1,
4285+
'Internal error: maximum semantic analysis iteration count reached',
4286+
blocker=True)
4287+
42644288

42654289
def replace_implicit_first_type(sig: FunctionLike, new: Type) -> FunctionLike:
42664290
if isinstance(sig, CallableType):

mypy/newsemanal/semanal_main.py

Lines changed: 44 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -47,8 +47,9 @@
4747
Patches = List[Tuple[int, Callable[[], None]]]
4848

4949

50-
# Perform up to this many semantic analysis iterations until giving up trying to bind all names.
51-
MAX_ITERATIONS = 10
50+
# If we perform this many iterations, raise an exception since we are likely stuck.
51+
MAX_ITERATIONS = 20
52+
5253

5354
# Number of passes over core modules before going on to the rest of the builtin SCC.
5455
CORE_WARMUP = 2
@@ -132,29 +133,37 @@ def process_top_levels(graph: 'Graph', scc: List[str], patches: Patches) -> None
132133
# named tuples in builtin SCC.
133134
if all(m in worklist for m in core_modules):
134135
worklist += list(reversed(core_modules)) * CORE_WARMUP
135-
iteration = 0
136136
final_iteration = False
137+
iteration = 0
137138
while worklist:
138139
iteration += 1
139-
if iteration == MAX_ITERATIONS:
140-
# Give up. Likely it's impossible to bind all names.
140+
if iteration > MAX_ITERATIONS:
141+
state.manager.new_semantic_analyzer.report_hang()
142+
break
143+
if final_iteration:
144+
# Give up. It's impossible to bind all names.
141145
state.manager.incomplete_namespaces.clear()
142-
final_iteration = True
143-
elif iteration > MAX_ITERATIONS:
144-
assert False, 'Max iteration count reached in semantic analysis'
145146
all_deferred = [] # type: List[str]
147+
any_progress = False
146148
while worklist:
147149
next_id = worklist.pop()
148150
state = graph[next_id]
149151
assert state.tree is not None
150-
deferred, incomplete = semantic_analyze_target(next_id, state, state.tree, None,
151-
final_iteration, patches)
152+
deferred, incomplete, progress = semantic_analyze_target(next_id, state,
153+
state.tree,
154+
None,
155+
final_iteration,
156+
patches)
152157
all_deferred += deferred
158+
any_progress = any_progress or progress
153159
if not incomplete:
154160
state.manager.incomplete_namespaces.discard(next_id)
161+
if final_iteration:
162+
assert not all_deferred, 'Must not defer during final iteration'
155163
# Reverse to process the targets in the same order on every iteration. This avoids
156164
# processing the same target twice in a row, which is inefficient.
157165
worklist = list(reversed(all_deferred))
166+
final_iteration = not any_progress
158167

159168

160169
def process_functions(graph: 'Graph', scc: List[str], patches: Patches) -> None:
@@ -187,21 +196,28 @@ def process_top_level_function(analyzer: 'NewSemanticAnalyzer',
187196
Process the body of the function (including nested functions) again and again,
188197
until all names have been resolved (ot iteration limit reached).
189198
"""
190-
iteration = 0
191199
# We need one more iteration after incomplete is False (e.g. to report errors, if any).
192-
more_iterations = incomplete = True
200+
final_iteration = False
201+
incomplete = True
193202
# Start in the incomplete state (no missing names will be reported on first pass).
194203
# Note that we use module name, since functions don't create qualified names.
195204
deferred = [module]
196205
analyzer.incomplete_namespaces.add(module)
197-
while deferred and more_iterations:
206+
iteration = 0
207+
while deferred:
198208
iteration += 1
199-
if not (deferred or incomplete) or iteration == MAX_ITERATIONS:
209+
if iteration == MAX_ITERATIONS:
210+
analyzer.report_hang()
211+
break
212+
if not (deferred or incomplete) or final_iteration:
200213
# OK, this is one last pass, now missing names will be reported.
201-
more_iterations = False
202214
analyzer.incomplete_namespaces.discard(module)
203-
deferred, incomplete = semantic_analyze_target(target, state, node, active_type,
204-
not more_iterations, patches)
215+
deferred, incomplete, progress = semantic_analyze_target(target, state, node, active_type,
216+
final_iteration, patches)
217+
if final_iteration:
218+
assert not deferred, 'Must not defer during final iteration'
219+
if not progress:
220+
final_iteration = True
205221

206222
analyzer.incomplete_namespaces.discard(module)
207223
# After semantic analysis is done, discard local namespaces
@@ -226,14 +242,22 @@ def semantic_analyze_target(target: str,
226242
node: Union[MypyFile, FuncDef, OverloadedFuncDef, Decorator],
227243
active_type: Optional[TypeInfo],
228244
final_iteration: bool,
229-
patches: Patches) -> Tuple[List[str], bool]:
245+
patches: Patches) -> Tuple[List[str], bool, bool]:
246+
"""Semantically analyze a single target.
247+
248+
Return tuple with these items:
249+
- list of deferred targets
250+
- was some definition incomplete
251+
- were any new names were defined (or placeholders replaced)
252+
"""
230253
tree = state.tree
231254
assert tree is not None
232255
analyzer = state.manager.new_semantic_analyzer
233256
# TODO: Move initialization to somewhere else
234257
analyzer.global_decls = [set()]
235258
analyzer.nonlocal_decls = [set()]
236259
analyzer.globals = tree.names
260+
analyzer.progress = False
237261
with state.wrap_context(check_blockers=False):
238262
with analyzer.file_context(file_node=tree,
239263
fnam=tree.path,
@@ -247,9 +271,9 @@ def semantic_analyze_target(target: str,
247271
if isinstance(node, Decorator):
248272
infer_decorator_signature_if_simple(node, analyzer)
249273
if analyzer.deferred:
250-
return [target], analyzer.incomplete
274+
return [target], analyzer.incomplete, analyzer.progress
251275
else:
252-
return [], analyzer.incomplete
276+
return [], analyzer.incomplete, analyzer.progress
253277

254278

255279
def check_type_arguments(graph: 'Graph', scc: List[str], errors: Errors) -> None:

0 commit comments

Comments
 (0)