Skip to content

Add line number to the diff report #72

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 7 commits into from
Feb 6, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 8 additions & 7 deletions _unittests/ut_reference/test_evaluator_yield.py
Original file line number Diff line number Diff line change
Expand Up @@ -422,18 +422,19 @@ def test_distance_sequence_str(self):
text = dc.to_str(s1, s2, align)
self.assertIn("OUTPUT", text)
expected = """
=|INPUTfloat322x2ABCDA|INPUTfloat322x2ABCDA
=|INPUTfloat322x2ABCDB|INPUTfloat322x2ABCDB
~|INPUTfloat322x3ABCDX|INPUTfloat322x2ABCDX
-|RESULTfloat322x2CEIOExpH|
=|RESULTfloat322x2CEIOLinearRegrY1|RESULTfloat322x2CEIOLinearRegrY1
~|RESULTfloat322x2CEIOAbsY|RESULTfloat322x3CEIPAbsZ
~|OUTPUTfloat322x2CEIOY|OUTPUTfloat322x2CEIPY
1=|INPUTfloat322x2ABCDA|INPUTfloat322x2ABCDA
2=|INPUTfloat322x2ABCDB|INPUTfloat322x2ABCDB
3~|INPUTfloat322x3ABCDX|INPUTfloat322x2ABCDX
4-|RESULTfloat322x2CEIOExpH|
5=|RESULTfloat322x2CEIOLinearRegrY1|RESULTfloat322x2CEIOLinearRegrY1
6~|RESULTfloat322x2CEIOAbsY|RESULTfloat322x3CEIPAbsZ
7~|OUTPUTfloat322x2CEIOY|OUTPUTfloat322x2CEIPY
""".replace(
" ", ""
).strip(
"\n "
)
self.maxDiff = None
self.assertEqual(expected, text.replace(" ", "").strip("\n"))

def test_compare_execution(self):
Expand Down
68 changes: 42 additions & 26 deletions onnx_array_api/reference/evaluator_yield.py
Original file line number Diff line number Diff line change
Expand Up @@ -118,6 +118,7 @@ def enumerate_results(
self,
output_names: Optional[List[str]] = None,
feed_inputs: Optional[Dict[str, Any]] = None,
raise_exc: bool = True,
) -> Iterator[Tuple[ResultType, str, Any]]:
"""
Executes the onnx model and enumerate all the intermediate results.
Expand Down Expand Up @@ -148,6 +149,7 @@ def enumerate_results(
yield ResultType.INPUT, k, v, None

# step 2: execute nodes
yield_output = True
for node in self.evaluator.rt_nodes_:
for i in node.input:
if i not in results:
Expand All @@ -160,39 +162,48 @@ def enumerate_results(
linked_attributes = {}
if node.has_linked_attribute and attributes:
linked_attributes["linked_attributes"] = attributes
if node.need_context():
outputs = node.run(*inputs, context=results, **linked_attributes)
else:
outputs = node.run(*inputs, **linked_attributes)

try:
if node.need_context():
outputs = node.run(*inputs, context=results, **linked_attributes)
else:
outputs = node.run(*inputs, **linked_attributes)
except Exception:
if raise_exc:
raise
yield_output = False
break

for name, value in zip(node.output, outputs):
yield ResultType.RESULT, name, value, node.op_type
results[name] = value

# step 3: outputs
for name in output_names:
if name not in results:
raise RuntimeError(
f"Unable to find output name {name!r} in {sorted(results)}, proto is\n{self.proto_}"
)
yield ResultType.OUTPUT, name, results[name], None
if yield_output:
for name in output_names:
if name not in results:
raise RuntimeError(
f"Unable to find output name {name!r} in {sorted(results)}, proto is\n{self.proto_}"
)
yield ResultType.OUTPUT, name, results[name], None

def enumerate_summarized(
self,
output_names: Optional[List[str]] = None,
feed_inputs: Optional[Dict[str, Any]] = None,
raise_exc: bool = True,
) -> Iterator[ResultExecution]:
"""
Executes the onnx model and enumerate intermediate results without their names.

Args:
output_names: requested outputs by names, None for all
feed_inputs: dictionary `{ input name: input value }`

Returns:
iterator on tuple(result kind, node.type, dtype, shape, value, result name)
:param output_names: requested outputs by names, None for all
:param feed_inputs: dictionary `{ input name: input value }`
:param raise_exc: raises an exception if the execution fails or stop
where it is
:return: iterator on ResultExecution
"""
for kind, name, value, op_type in self.enumerate_results(
output_names, feed_inputs
output_names, feed_inputs, raise_exc=raise_exc
):
summary = make_summary(value)
yield ResultExecution(
Expand Down Expand Up @@ -328,6 +339,7 @@ def to_str(
"""
rows = []
last = -1, -1
row_index = 1
for i, j in alignment:
assert i < len(s1), f"Unexpected value i={i} >= len(s1)={len(s1)}"
assert j < len(s2), f"Unexpected value i={j} >= len(s2)={len(s2)}"
Expand All @@ -338,20 +350,18 @@ def to_str(
d2 = s2[j]
d = self.distance_pair(d1, d2)
symbol = "=" if d == 0 else "~"
rows.append(
f"{symbol} | {_align(str(d1), column_size)} | {_align(str(d2), column_size)}"
)
line = f"{symbol} | {_align(str(d1), column_size)} | {_align(str(d2), column_size)}"
elif i == last[0]:
d2 = s2[j]
rows.append(
line = (
f"+ | {_align('', column_size)} | {_align(str(d2), column_size)} "
)
else:
d1 = s1[i]
rows.append(
f"- | {_align(str(d1), column_size)} | {_align('', column_size)}"
)
line = f"- | {_align(str(d1), column_size)} | {_align('', column_size)}"
rows.append(f"{row_index: 3d} {line}")
last = i, j
row_index += 1
return "\n".join(rows)


Expand Down Expand Up @@ -410,6 +420,7 @@ def compare_onnx_execution(
model2: ModelProto,
inputs: Optional[List[Any]] = None,
verbose: int = 0,
raise_exc: bool = True,
) -> Tuple[List[ResultExecution], List[ResultExecution], List[Tuple[int, int]]]:
"""
Compares the execution of two onnx models.
Expand All @@ -421,6 +432,7 @@ def compare_onnx_execution(
:param model2: second model
:param inputs: inputs to use
:param verbose: verbosity
:param raise_exc: raise exception if the execution fails or stop at the error
:return: four results, a sequence of results for the first model and the second model,
the alignment between the two, DistanceExecution
"""
Expand All @@ -433,11 +445,15 @@ def compare_onnx_execution(
if verbose:
print(f"[compare_onnx_execution] got {len(inputs)} inputs")
print("[compare_onnx_execution] execute first model")
res1 = list(YieldEvaluator(model1).enumerate_summarized(None, feeds1))
res1 = list(
YieldEvaluator(model1).enumerate_summarized(None, feeds1, raise_exc=raise_exc)
)
if verbose:
print(f"[compare_onnx_execution] got {len(res1)} results")
print("[compare_onnx_execution] execute second model")
res2 = list(YieldEvaluator(model2).enumerate_summarized(None, feeds2))
res2 = list(
YieldEvaluator(model2).enumerate_summarized(None, feeds2, raise_exc=raise_exc)
)
if verbose:
print(f"[compare_onnx_execution] got {len(res2)} results")
print("[compare_onnx_execution] compute edit distance")
Expand Down