File tree Expand file tree Collapse file tree 2 files changed +14
-3
lines changed
Expand file tree Collapse file tree 2 files changed +14
-3
lines changed Original file line number Diff line number Diff line change 1919 MemoryPlanningAlgo ,
2020 MemoryPlanningState ,
2121)
22- from executorch .backends .cadence .aot .utils import MemoryConfig
22+ from executorch .backends .cadence .aot .utils import (
23+ MemoryConfig ,
24+ MemoryPlanningAlgoFailure ,
25+ )
2326
2427from executorch .exir import ExecutorchProgramManager
2528from executorch .exir .memory_planning import collect_specs_from_nodes , Verifier
@@ -95,7 +98,9 @@ def plan(
9598 ):
9699 self .plan_spec (spec , state , placement_constraints )
97100 if not state .is_placed (spec ):
98- raise MemoryError (f"Cannot fit { spec } in any memory hierarchy" )
101+ raise MemoryPlanningAlgoFailure (
102+ f"Cannot fit { spec } { spec .allocated_memory = } in any memory hierarchy for { self .memory_config } "
103+ )
99104
100105
101106class GreedyWithHeuristic (MemoryPlanningAlgo ):
@@ -169,7 +174,9 @@ def plan(
169174 ):
170175 self .plan_spec (spec , state , placement_constraints )
171176 if not state .is_placed (spec ):
172- raise MemoryError (f"Cannot fit { spec } in any memory hierarchy" )
177+ raise MemoryPlanningAlgoFailure (
178+ f"Cannot fit { spec } in any memory hierarchy for { self .memory_config } "
179+ )
173180
174181 logging .debug (
175182 f"greedy by size for offset calculation with hierarchy returns bufsizes: { state .bufsizes } "
Original file line number Diff line number Diff line change 2525from torch .utils ._pytree import tree_flatten
2626
2727
28+ class MemoryPlanningAlgoFailure (Exception ):
29+ pass
30+
31+
2832# Get the output size of a 1D convolution given the input size and parameters
2933def get_conv1d_output_size (
3034 in_size : torch .Size ,
You can’t perform that action at this time.
0 commit comments