47
47
Patches = List [Tuple [int , Callable [[], None ]]]
48
48
49
49
50
- # Perform up to this many semantic analysis iterations until giving up trying to bind all names.
51
- MAX_ITERATIONS = 10
50
+ # If we perform this many iterations, raise an exception since we are likely stuck.
51
+ MAX_ITERATIONS = 20
52
+
52
53
53
54
# Number of passes over core modules before going on to the rest of the builtin SCC.
54
55
CORE_WARMUP = 2
@@ -132,29 +133,37 @@ def process_top_levels(graph: 'Graph', scc: List[str], patches: Patches) -> None
132
133
# named tuples in builtin SCC.
133
134
if all (m in worklist for m in core_modules ):
134
135
worklist += list (reversed (core_modules )) * CORE_WARMUP
135
- iteration = 0
136
136
final_iteration = False
137
+ iteration = 0
137
138
while worklist :
138
139
iteration += 1
139
- if iteration == MAX_ITERATIONS :
140
- # Give up. Likely it's impossible to bind all names.
140
+ if iteration > MAX_ITERATIONS :
141
+ state .manager .new_semantic_analyzer .report_hang ()
142
+ break
143
+ if final_iteration :
144
+ # Give up. It's impossible to bind all names.
141
145
state .manager .incomplete_namespaces .clear ()
142
- final_iteration = True
143
- elif iteration > MAX_ITERATIONS :
144
- assert False , 'Max iteration count reached in semantic analysis'
145
146
all_deferred = [] # type: List[str]
147
+ any_progress = False
146
148
while worklist :
147
149
next_id = worklist .pop ()
148
150
state = graph [next_id ]
149
151
assert state .tree is not None
150
- deferred , incomplete = semantic_analyze_target (next_id , state , state .tree , None ,
151
- final_iteration , patches )
152
+ deferred , incomplete , progress = semantic_analyze_target (next_id , state ,
153
+ state .tree ,
154
+ None ,
155
+ final_iteration ,
156
+ patches )
152
157
all_deferred += deferred
158
+ any_progress = any_progress or progress
153
159
if not incomplete :
154
160
state .manager .incomplete_namespaces .discard (next_id )
161
+ if final_iteration :
162
+ assert not all_deferred , 'Must not defer during final iteration'
155
163
# Reverse to process the targets in the same order on every iteration. This avoids
156
164
# processing the same target twice in a row, which is inefficient.
157
165
worklist = list (reversed (all_deferred ))
166
+ final_iteration = not any_progress
158
167
159
168
160
169
def process_functions (graph : 'Graph' , scc : List [str ], patches : Patches ) -> None :
@@ -187,21 +196,28 @@ def process_top_level_function(analyzer: 'NewSemanticAnalyzer',
187
196
Process the body of the function (including nested functions) again and again,
188
197
until all names have been resolved (ot iteration limit reached).
189
198
"""
190
- iteration = 0
191
199
# We need one more iteration after incomplete is False (e.g. to report errors, if any).
192
- more_iterations = incomplete = True
200
+ final_iteration = False
201
+ incomplete = True
193
202
# Start in the incomplete state (no missing names will be reported on first pass).
194
203
# Note that we use module name, since functions don't create qualified names.
195
204
deferred = [module ]
196
205
analyzer .incomplete_namespaces .add (module )
197
- while deferred and more_iterations :
206
+ iteration = 0
207
+ while deferred :
198
208
iteration += 1
199
- if not (deferred or incomplete ) or iteration == MAX_ITERATIONS :
209
+ if iteration == MAX_ITERATIONS :
210
+ analyzer .report_hang ()
211
+ break
212
+ if not (deferred or incomplete ) or final_iteration :
200
213
# OK, this is one last pass, now missing names will be reported.
201
- more_iterations = False
202
214
analyzer .incomplete_namespaces .discard (module )
203
- deferred , incomplete = semantic_analyze_target (target , state , node , active_type ,
204
- not more_iterations , patches )
215
+ deferred , incomplete , progress = semantic_analyze_target (target , state , node , active_type ,
216
+ final_iteration , patches )
217
+ if final_iteration :
218
+ assert not deferred , 'Must not defer during final iteration'
219
+ if not progress :
220
+ final_iteration = True
205
221
206
222
analyzer .incomplete_namespaces .discard (module )
207
223
# After semantic analysis is done, discard local namespaces
@@ -226,14 +242,22 @@ def semantic_analyze_target(target: str,
226
242
node : Union [MypyFile , FuncDef , OverloadedFuncDef , Decorator ],
227
243
active_type : Optional [TypeInfo ],
228
244
final_iteration : bool ,
229
- patches : Patches ) -> Tuple [List [str ], bool ]:
245
+ patches : Patches ) -> Tuple [List [str ], bool , bool ]:
246
+ """Semantically analyze a single target.
247
+
248
+ Return tuple with these items:
249
+ - list of deferred targets
250
+ - was some definition incomplete
251
+ - were any new names were defined (or placeholders replaced)
252
+ """
230
253
tree = state .tree
231
254
assert tree is not None
232
255
analyzer = state .manager .new_semantic_analyzer
233
256
# TODO: Move initialization to somewhere else
234
257
analyzer .global_decls = [set ()]
235
258
analyzer .nonlocal_decls = [set ()]
236
259
analyzer .globals = tree .names
260
+ analyzer .progress = False
237
261
with state .wrap_context (check_blockers = False ):
238
262
with analyzer .file_context (file_node = tree ,
239
263
fnam = tree .path ,
@@ -247,9 +271,9 @@ def semantic_analyze_target(target: str,
247
271
if isinstance (node , Decorator ):
248
272
infer_decorator_signature_if_simple (node , analyzer )
249
273
if analyzer .deferred :
250
- return [target ], analyzer .incomplete
274
+ return [target ], analyzer .incomplete , analyzer . progress
251
275
else :
252
- return [], analyzer .incomplete
276
+ return [], analyzer .incomplete , analyzer . progress
253
277
254
278
255
279
def check_type_arguments (graph : 'Graph' , scc : List [str ], errors : Errors ) -> None :
0 commit comments