Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions backends/cadence/aot/TARGETS
Original file line number Diff line number Diff line change
Expand Up @@ -433,6 +433,7 @@ python_unittest(
srcs = [
"tests/test_memory_passes.py",
],
supports_static_listing = False,
typing = True,
deps = [
":compiler",
Expand All @@ -441,7 +442,9 @@ python_unittest(
":pass_utils",
"//caffe2:torch",
"//executorch/exir:memory",
"fbsource//third-party/pypi/parameterized:parameterized",
"//executorch/exir/dialects:lib",
"//executorch/backends/cadence/aot:graph_builder",
"//executorch/exir/tests:models",
],
)
24 changes: 19 additions & 5 deletions backends/cadence/aot/memory_constraints.py
Original file line number Diff line number Diff line change
Expand Up @@ -350,14 +350,28 @@ def is_slice_view(self, node: torch.fx.Node) -> bool:
def is_cat_along_outermost_dim(
self, graph_module: torch.fx.GraphModule, cat_node: torch.fx.Node
) -> bool:
assert len(cat_node.args) > 0
cat_tensors = cat_node.args[0]
if not isinstance(cat_tensors, Sequence) or not all(
isinstance(t, torch.fx.Node) for t in cat_tensors
):
raise ValueError("cat_tensors must be a sequence of torch.fx.Node objects.")

if len(cat_node.args) > 1:
cat_dim = cat_node.args[1]
else:
cat_dim = cat_node.kwargs.get("dim", None)
if not isinstance(cat_dim, int):
raise ValueError("cat_dim must be an integer.")

# If the cat op has default dim, then the concat dim is 0
if len(cat_node.args) == 1 or cat_node.args[1] == 0:
if len(cat_tensors) == 1 or cat_dim == 0:
return True
# Get the concatenation dimension and concatenated tensors
(cat_tensors, cat_dim) = cast(
tuple[Sequence[torch.fx.Node], int], cat_node.args
)

# Make sure all dimes before cat_dim are 1.
for tensor in cat_tensors:
if not isinstance(tensor, torch.fx.Node):
continue
shape = get_shape(graph_module, tensor)
if shape is None or not all(dim == 1 for dim in shape[0:cat_dim]):
return False
Expand Down
Loading
Loading