From 5ca595d891430dd5489c97ffdd1183b2680825e8 Mon Sep 17 00:00:00 2001 From: Hardik Sharma Date: Thu, 17 Jul 2025 10:55:49 -0700 Subject: [PATCH] Improve error message returned in case of memory planning failure. (#12403) Summary: as titled. Reviewed By: zonglinpeng Differential Revision: D78047056 --- backends/cadence/aot/memory_planning.py | 13 ++++++++++--- backends/cadence/aot/utils.py | 4 ++++ 2 files changed, 14 insertions(+), 3 deletions(-) diff --git a/backends/cadence/aot/memory_planning.py b/backends/cadence/aot/memory_planning.py index 0634af6ea61..67da42a9d3c 100644 --- a/backends/cadence/aot/memory_planning.py +++ b/backends/cadence/aot/memory_planning.py @@ -19,7 +19,10 @@ MemoryPlanningAlgo, MemoryPlanningState, ) -from executorch.backends.cadence.aot.utils import MemoryConfig +from executorch.backends.cadence.aot.utils import ( + MemoryConfig, + MemoryPlanningAlgoFailure, +) from executorch.exir import ExecutorchProgramManager from executorch.exir.memory_planning import collect_specs_from_nodes, Verifier @@ -95,7 +98,9 @@ def plan( ): self.plan_spec(spec, state, placement_constraints) if not state.is_placed(spec): - raise MemoryError(f"Cannot fit {spec} in any memory hierarchy") + raise MemoryPlanningAlgoFailure( + f"Cannot fit {spec} {spec.allocated_memory=} in any memory hierarchy for {self.memory_config}" + ) class GreedyWithHeuristic(MemoryPlanningAlgo): @@ -169,7 +174,9 @@ def plan( ): self.plan_spec(spec, state, placement_constraints) if not state.is_placed(spec): - raise MemoryError(f"Cannot fit {spec} in any memory hierarchy") + raise MemoryPlanningAlgoFailure( + f"Cannot fit {spec} in any memory hierarchy for {self.memory_config}" + ) logging.debug( f"greedy by size for offset calculation with hierarchy returns bufsizes: {state.bufsizes}" diff --git a/backends/cadence/aot/utils.py b/backends/cadence/aot/utils.py index 54208fd5794..379e3b24dd8 100644 --- a/backends/cadence/aot/utils.py +++ b/backends/cadence/aot/utils.py @@ -25,6 +25,10 @@ from torch.utils._pytree import tree_flatten +class MemoryPlanningAlgoFailure(Exception): + pass + + # Get the output size of a 1D convolution given the input size and parameters def get_conv1d_output_size( in_size: torch.Size,