File tree Expand file tree Collapse file tree 2 files changed +14
-3
lines changed Expand file tree Collapse file tree 2 files changed +14
-3
lines changed Original file line number Diff line number Diff line change 19
19
MemoryPlanningAlgo ,
20
20
MemoryPlanningState ,
21
21
)
22
- from executorch .backends .cadence .aot .utils import MemoryConfig
22
+ from executorch .backends .cadence .aot .utils import (
23
+ MemoryConfig ,
24
+ MemoryPlanningAlgoFailure ,
25
+ )
23
26
24
27
from executorch .exir import ExecutorchProgramManager
25
28
from executorch .exir .memory_planning import collect_specs_from_nodes , Verifier
@@ -95,7 +98,9 @@ def plan(
95
98
):
96
99
self .plan_spec (spec , state , placement_constraints )
97
100
if not state .is_placed (spec ):
98
- raise MemoryError (f"Cannot fit { spec } in any memory hierarchy" )
101
+ raise MemoryPlanningAlgoFailure (
102
+ f"Cannot fit { spec } { spec .allocated_memory = } in any memory hierarchy for { self .memory_config } "
103
+ )
99
104
100
105
101
106
class GreedyWithHeuristic (MemoryPlanningAlgo ):
@@ -169,7 +174,9 @@ def plan(
169
174
):
170
175
self .plan_spec (spec , state , placement_constraints )
171
176
if not state .is_placed (spec ):
172
- raise MemoryError (f"Cannot fit { spec } in any memory hierarchy" )
177
+ raise MemoryPlanningAlgoFailure (
178
+ f"Cannot fit { spec } in any memory hierarchy for { self .memory_config } "
179
+ )
173
180
174
181
logging .debug (
175
182
f"greedy by size for offset calculation with hierarchy returns bufsizes: { state .bufsizes } "
Original file line number Diff line number Diff line change 25
25
from torch .utils ._pytree import tree_flatten
26
26
27
27
28
+ class MemoryPlanningAlgoFailure (Exception ):
29
+ pass
30
+
31
+
28
32
# Get the output size of a 1D convolution given the input size and parameters
29
33
def get_conv1d_output_size (
30
34
in_size : torch .Size ,
You can’t perform that action at this time.
0 commit comments