Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion backends/arm/_passes/arm_pass_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@

import torch
import torch.fx
from executorch.backends.arm.common.debug import get_node_debug_info
from executorch.backends.arm.tosa_utils import get_node_debug_info
from executorch.exir import ExportedProgram
from executorch.exir.dialects._ops import ops as exir_ops

Expand Down
4 changes: 0 additions & 4 deletions backends/arm/common/__init__.py

This file was deleted.

87 changes: 0 additions & 87 deletions backends/arm/common/debug.py

This file was deleted.

2 changes: 1 addition & 1 deletion backends/arm/quantizer/quantization_annotator.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,8 @@
import torch
import torch.fx
import torch.nn.functional as F
from executorch.backends.arm.common.debug import get_node_debug_info
from executorch.backends.arm.quantizer import QuantizationConfig
from executorch.backends.arm.tosa_utils import get_node_debug_info
from torch._subclasses import FakeTensor

from torch.fx import Node
Expand Down
6 changes: 3 additions & 3 deletions backends/arm/tosa_backend.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,12 +19,12 @@
from executorch.backends.arm._passes import (
ArmPassManager,
) # usort: skip
from executorch.backends.arm.common.debug import debug_fail, debug_tosa_dump
from executorch.backends.arm.process_node import (
process_call_function,
process_output,
process_placeholder,
)
from executorch.backends.arm.tosa_utils import dbg_fail, dbg_tosa_dump
from executorch.exir.backend.backend_details import BackendDetails, PreprocessResult
from executorch.exir.backend.compile_spec_schema import CompileSpec
from torch.export.exported_program import ExportedProgram
Expand Down Expand Up @@ -115,12 +115,12 @@ def preprocess( # noqa: C901
# any checking of compatibility.
raise RuntimeError(f"{node.name} is unsupported op {node.op}")
except Exception:
debug_fail(node, graph_module, tosa_graph, artifact_path)
dbg_fail(node, graph_module, tosa_graph, artifact_path)
raise

if artifact_path:
tag = arm_get_first_delegation_tag(graph_module)
debug_tosa_dump(
dbg_tosa_dump(
tosa_graph,
artifact_path,
suffix="{}".format(f"_{tag}" if tag else "") + (f"_{tosa_spec}"),
Expand Down
75 changes: 74 additions & 1 deletion backends/arm/tosa_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,8 @@
# pyre-unsafe

import logging
from typing import Any
import os
from typing import Any, Optional

import numpy as np
import serializer.tosa_serializer as ts # type: ignore
Expand All @@ -19,13 +20,85 @@

from executorch.backends.arm.tosa_specification import TosaSpecification
from executorch.exir.dialects._ops import ops as exir_ops
from executorch.exir.print_program import inspect_node

from torch._subclasses.fake_tensor import FakeTensor
from torch.fx import Node

logger = logging.getLogger(__name__)


def dbg_node(node: torch.fx.Node, graph_module: torch.fx.GraphModule):
# Debug output of node information
logger.info(get_node_debug_info(node, graph_module))


def get_node_debug_info(
node: torch.fx.Node, graph_module: torch.fx.GraphModule | None = None
) -> str:
output = (
f" {inspect_node(graph=graph_module.graph, node=node)}\n"
if graph_module
else ""
"-- NODE DEBUG INFO --\n"
f" Op is {node.op}\n"
f" Name is {node.name}\n"
f" Node target is {node.target}\n"
f" Node args is {node.args}\n"
f" Node kwargs is {node.kwargs}\n"
f" Node users is {node.users}\n"
" Node.meta = \n"
)
for k, v in node.meta.items():
if k == "stack_trace":
matches = v.split("\n")
output += " 'stack_trace =\n"
for m in matches:
output += f" {m}\n"
else:
output += f" '{k}' = {v}\n"

if isinstance(v, list):
for i in v:
output += f" {i}\n"
return output


# Output TOSA flatbuffer and test harness file
def dbg_tosa_dump(tosa_graph: ts.TosaSerializer, path: str, suffix: str = ""):
filename = f"output{suffix}.tosa"

logger.info(f"Emitting debug output to: {path=}, {suffix=}")

os.makedirs(path, exist_ok=True)

fb = tosa_graph.serialize()
js = tosa_graph.writeJson(filename)

filepath_tosa_fb = os.path.join(path, filename)
with open(filepath_tosa_fb, "wb") as f:
f.write(fb)
assert os.path.exists(filepath_tosa_fb), "Failed to write TOSA flatbuffer"

filepath_desc_json = os.path.join(path, f"desc{suffix}.json")
with open(filepath_desc_json, "w") as f:
f.write(js)
assert os.path.exists(filepath_desc_json), "Failed to write TOSA JSON"


def dbg_fail(
node,
graph_module,
tosa_graph: Optional[ts.TosaSerializer] = None,
path: Optional[str] = None,
):
logger.warning("Internal error due to poorly handled node:")
if tosa_graph is not None and path is not None:
dbg_tosa_dump(tosa_graph, path)
logger.warning(f"Debug output captured in '{path}'.")
dbg_node(node, graph_module)


def getNodeArgs(node: Node, tosa_spec: TosaSpecification) -> list[TosaArg]:
try:
return [TosaArg(arg, tosa_spec) for arg in node.args]
Expand Down
Loading