Skip to content
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
25 changes: 23 additions & 2 deletions exir/tensor.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@

import math
import typing
from typing import Dict, List, Optional, Tuple, Union
from typing import Dict, List, NamedTuple, Optional, Tuple, Union

import executorch.exir.schema as schema
import torch
Expand Down Expand Up @@ -70,8 +70,29 @@ def dim_order_from_stride(stride: Tuple[int]) -> Tuple[bytes]:
for _, s in enumerate(stride):
if s == 0:
raise ValueError("0 in strides is not supported for ExecuTorch.")

from torch.fx.experimental.symbolic_shapes import guard_size_oblivious

class K(NamedTuple):
stride: int

def __lt__(self, other):
return guard_size_oblivious(self.stride < other.stride)

def __gt__(self, other):
return guard_size_oblivious(self.stride > other.stride)

def __le__(self, other):
return guard_size_oblivious(self.stride <= other.stride)

def __ge__(self, other):
return guard_size_oblivious(self.stride >= other.stride)

def __eq__(self, other):
return guard_size_oblivious(self.stride == other.stride)

sorted_dims = [
i[0] for i in sorted(enumerate(stride), key=lambda x: x[1], reverse=True)
i[0] for i in sorted(enumerate(stride), key=lambda x: K(x[1]), reverse=True)
]
return tuple(typing.cast(Tuple[bytes], sorted_dims))

Expand Down