Skip to content

Commit 186d349

Browse files
hsharma35facebook-github-bot
authored andcommitted
Improve error message returned in case of memory planning failure.
Summary: as titled. Differential Revision: D78047056
1 parent 3afd18d commit 186d349

File tree

2 files changed

+6
-2
lines changed

2 files changed

+6
-2
lines changed

backends/cadence/aot/memory_planning.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,7 @@
1717
ConstraintsGenPass,
1818
get_aligned_offset,
1919
MemoryPlanningAlgo,
20+
MemoryPlanningAlgoFailure,
2021
MemoryPlanningState,
2122
)
2223
from executorch.backends.cadence.aot.utils import MemoryConfig
@@ -90,7 +91,7 @@ def plan(
9091
):
9192
self.plan_spec(spec, state)
9293
if not state.is_placed(spec):
93-
raise MemoryError(f"Cannot fit {spec} in any memory hierarchy")
94+
raise MemoryPlanningAlgoFailure(f"Cannot fit {spec} {spec.allocated_memory=} in any memory hierarchy for {self.memory_config}")
9495

9596
return state
9697

@@ -162,7 +163,7 @@ def plan(
162163
):
163164
self.plan_spec(spec, state)
164165
if not state.is_placed(spec):
165-
raise MemoryError(f"Cannot fit {spec} in any memory hierarchy")
166+
raise MemoryPlanningAlgoFailure(f"Cannot fit {spec} in any memory hierarchy for {self.memory_config}")
166167

167168
logging.debug(
168169
f"greedy by size for offset calculation with hierarchy returns bufsizes: {state.bufsizes}"

backends/cadence/aot/memory_planning_algo.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,9 @@
1818
from executorch.exir.tensor import TensorSpec
1919
from torch.export.exported_program import ExportGraphSignature
2020

21+
class MemoryPlanningAlgoFailure(Exception):
22+
pass
23+
2124

2225
def get_aligned_offset(pre_aligned_offset: int, alignment: int) -> int:
2326
return int(math.ceil(pre_aligned_offset / alignment) * alignment)

0 commit comments

Comments
 (0)