Skip to content

Commit 7998b7f

Browse files
benkli01facebook-github-bot
authored andcommitted
Fix issue with debug TOSA dumps being overwritten (#5029)
Summary: When a graph is partitioned into multiple partitions the Arm Backend overwrote the debug dump of TOSA intermediate files so that only the files for the last partition were available. To fix this the delegation tag is now appended to the file names. Pull Request resolved: #5029 Reviewed By: cccclai Differential Revision: D62243216 Pulled By: digantdesai fbshipit-source-id: f7c6a94a01a23bffa17c138ead455b4d28db1c6a
1 parent 8874de2 commit 7998b7f

File tree

4 files changed

+71
-21
lines changed

4 files changed

+71
-21
lines changed

backends/arm/arm_backend.py

Lines changed: 18 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -168,6 +168,17 @@ def get_intermediate_path(compile_spec: List[CompileSpec]) -> Optional[str]:
168168
return None
169169

170170

171+
def _get_first_delegation_tag(graph_module) -> str | None:
172+
"""Get the first delegation tag from the graph_module or return None."""
173+
for node in graph_module.graph.nodes:
174+
tag = node.meta.get("delegation_tag")
175+
if tag:
176+
return tag
177+
178+
logger.debug("No delegation tag found in partition.")
179+
return None
180+
181+
171182
@final
172183
class ArmBackend(BackendDetails):
173184
@staticmethod
@@ -222,8 +233,13 @@ def preprocess( # noqa: C901
222233
# TODO: It would be awesome if this dump could somehow be done on top level and not here.
223234
# Problem is that the desc.json has to be created on the tosa_graph object, which we can't
224235
# access from top level.
225-
if artifact_path is not None:
226-
dbg_tosa_dump(tosa_graph, artifact_path)
236+
if artifact_path:
237+
tag = _get_first_delegation_tag(graph_module)
238+
dbg_tosa_dump(
239+
tosa_graph,
240+
artifact_path,
241+
suffix="{}".format(f"_{tag}" if tag else ""),
242+
)
227243

228244
# Serialize and return the program. While we have always produced TOSA
229245
# output as an intermediate, some flows compile to device binaries in

backends/arm/test/runner_utils.py

Lines changed: 13 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@
1010
import subprocess
1111
import tempfile
1212

13+
from pathlib import Path
1314
from typing import Dict, List, Optional, Tuple
1415

1516
import numpy as np
@@ -325,7 +326,18 @@ def run_tosa_ref_model(
325326
self._has_init_run
326327
), "RunnerUtil needs to be initialized using init_run() before running tosa reference."
327328

328-
desc_file_path = os.path.join(self.intermediate_path, "desc.json")
329+
all_desc_file_paths = [
330+
str(path) for path in Path(self.intermediate_path).glob("desc*.json")
331+
]
332+
assert (
333+
all_desc_file_paths
334+
), f"No TOSA description file found in '{self.intermediate_path}'."
335+
if len(all_desc_file_paths) != 1:
336+
raise NotImplementedError(
337+
"Graphs with more than one partition are currently not supported."
338+
)
339+
340+
desc_file_path = all_desc_file_paths[0]
329341
assert os.path.exists(
330342
desc_file_path
331343
), f"desc_file_path: {desc_file_path} does not exist"

backends/arm/test/tester/arm_tester.py

Lines changed: 35 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -34,6 +34,7 @@
3434
from executorch.backends.xnnpack.test.tester import Tester
3535
from executorch.exir import EdgeCompileConfig
3636
from executorch.exir.backend.compile_spec_schema import CompileSpec
37+
from executorch.exir.lowered_backend_module import LoweredBackendModule
3738
from torch.fx import Graph
3839

3940
logger = logging.getLogger(__name__)
@@ -44,21 +45,42 @@ class Partition(tester.Partition):
4445
def dump_artifact(self, path_to_dump: Optional[str]):
4546
super().dump_artifact(path_to_dump)
4647

47-
to_print = None
48-
for spec in self.graph_module.lowered_module_0.compile_specs:
49-
if spec.key == "output_format":
50-
if spec.value == b"tosa":
51-
tosa_fb = self.graph_module.lowered_module_0.processed_bytes
48+
def get_output_format(lowered_module) -> str | None:
49+
for spec in lowered_module.compile_specs:
50+
if spec.key == "output_format":
51+
return spec.value.decode()
52+
return None
53+
54+
output = ""
55+
for node in self.graph_module.graph.nodes:
56+
if node.op == "get_attr" and node.name.startswith("lowered_module_"):
57+
lowered_module = getattr(self.graph_module, node.name)
58+
assert isinstance(
59+
lowered_module, LoweredBackendModule
60+
), f"Attribute {node.name} must be of type LoweredBackendModule."
61+
62+
output_format = get_output_format(lowered_module)
63+
if output_format == "tosa":
64+
tosa_fb = lowered_module.processed_bytes
5265
to_print = dbg_tosa_fb_to_json(tosa_fb)
5366
to_print = pformat(to_print, compact=True, indent=1)
54-
to_print = f"\n TOSA deserialized: \n{to_print}"
55-
elif spec.value == b"vela":
56-
vela_cmd_stream = self.graph_module.lowered_module_0.processed_bytes
57-
to_print = str(vela_cmd_stream)
58-
to_print = f"\n Vela command stream: \n{to_print}"
59-
break
60-
assert to_print is not None, "No TOSA nor Vela compile spec found"
61-
_dump_str(to_print, path_to_dump)
67+
output += f"\nTOSA deserialized {node.name}: \n{to_print}\n"
68+
elif output_format == "vela":
69+
vela_cmd_stream = lowered_module.processed_bytes
70+
output += (
71+
f"\nVela command stream {node.name}: \n{vela_cmd_stream}\n"
72+
)
73+
else:
74+
logger.warning(
75+
f"No TOSA nor Vela compile spec found in compile specs of {node.name}."
76+
)
77+
continue
78+
79+
if not output:
80+
logger.warning("No output to print generated from artifact.")
81+
return
82+
83+
_dump_str(output, path_to_dump)
6284

6385

6486
class Serialize(tester.Serialize):

backends/arm/tosa_utils.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -50,10 +50,10 @@ def dbg_node(node):
5050

5151

5252
# Output TOSA flatbuffer and test harness file
53-
def dbg_tosa_dump(tosa_graph, path):
54-
filename = "output.tosa"
53+
def dbg_tosa_dump(tosa_graph: ts.TosaSerializer, path: str, suffix: str = ""):
54+
filename = f"output{suffix}.tosa"
5555

56-
logger.info(f"Emitting debug output to {path}")
56+
logger.info(f"Emitting debug output to: {path=}, {suffix=}")
5757

5858
os.makedirs(path, exist_ok=True)
5959

@@ -65,7 +65,7 @@ def dbg_tosa_dump(tosa_graph, path):
6565
f.write(fb)
6666
assert os.path.exists(filepath_tosa_fb), "Failed to write TOSA flatbuffer"
6767

68-
filepath_desc_json = os.path.join(path, "desc.json")
68+
filepath_desc_json = os.path.join(path, f"desc{suffix}.json")
6969
with open(filepath_desc_json, "w") as f:
7070
f.write(js)
7171
assert os.path.exists(filepath_desc_json), "Failed to write TOSA JSON"
@@ -76,7 +76,7 @@ def dbg_fail(node, tosa_graph, path):
7676
logger.warn("Internal error due to poorly handled node:")
7777
dbg_node(node)
7878
logger.warn(f"Debug output captured in '{path}'.")
79-
raise RuntimeError("TOSA Internal Error on node, enable logging for further info")
79+
raise RuntimeError("TOSA Internal Error on node, enable logging for further info.")
8080

8181

8282
# Helper function to match TOSA's broadcasting rank requirement

0 commit comments

Comments
 (0)