Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
66 commits
Select commit Hold shift + click to select a range
fd73fb9
Update
GregoryComer Jul 25, 2025
2de680e
Update
GregoryComer Jul 25, 2025
8b11366
Update
GregoryComer Jul 25, 2025
ca1b887
Update
GregoryComer Jul 25, 2025
aacddf5
Update
GregoryComer Jul 25, 2025
2adbf61
Update
GregoryComer Jul 25, 2025
709f39b
Update
GregoryComer Jul 25, 2025
109961f
Update
GregoryComer Jul 25, 2025
ca69f38
Update
GregoryComer Jul 25, 2025
29122b6
Update
GregoryComer Jul 25, 2025
6acef0f
Update
GregoryComer Jul 26, 2025
cc3332e
Update
GregoryComer Jul 26, 2025
4f4d58d
Update
GregoryComer Jul 26, 2025
346ab2e
Update
GregoryComer Jul 26, 2025
de4b52e
Update
GregoryComer Jul 26, 2025
f18a643
Update
GregoryComer Jul 26, 2025
2e3b840
Update
GregoryComer Jul 26, 2025
c40156b
Update
GregoryComer Jul 26, 2025
0edc858
Update
GregoryComer Jul 26, 2025
442c14e
Update
GregoryComer Jul 26, 2025
cfc76a3
Update
GregoryComer Jul 29, 2025
e2ccfec
Update
GregoryComer Jul 29, 2025
3d398bc
Update
GregoryComer Jul 29, 2025
c50e393
Update
GregoryComer Jul 29, 2025
92787a4
Update
GregoryComer Jul 29, 2025
8cfac49
Update
GregoryComer Jul 29, 2025
dc7724b
Update
GregoryComer Jul 29, 2025
90f7a7e
Update
GregoryComer Jul 29, 2025
52302df
Update
GregoryComer Jul 29, 2025
261d4f6
Update
GregoryComer Jul 29, 2025
e99cd87
Update
GregoryComer Jul 29, 2025
f41e9a7
Update
GregoryComer Jul 29, 2025
4193235
Update
GregoryComer Jul 29, 2025
c4005bd
Update
GregoryComer Jul 29, 2025
8f6131b
Update
GregoryComer Jul 29, 2025
af3c956
Update
GregoryComer Jul 29, 2025
a21281b
Update
GregoryComer Jul 29, 2025
7c52981
Update
GregoryComer Jul 29, 2025
ebba33d
Update
GregoryComer Jul 29, 2025
a265cca
Update
GregoryComer Jul 29, 2025
69742a8
Update
GregoryComer Jul 29, 2025
ad74b75
Update
GregoryComer Jul 29, 2025
d3b0aa8
Update
GregoryComer Jul 29, 2025
f30b6e0
Update
GregoryComer Jul 29, 2025
019f23f
Update
GregoryComer Jul 29, 2025
5221b22
Update
GregoryComer Jul 29, 2025
8e5da0c
Update
GregoryComer Jul 29, 2025
831f814
Update
GregoryComer Jul 29, 2025
ee24fcb
Update
GregoryComer Jul 29, 2025
910825c
Update
GregoryComer Jul 29, 2025
72e2bfc
Update
GregoryComer Jul 29, 2025
d0861b6
Update
GregoryComer Jul 29, 2025
959ec61
Update
GregoryComer Jul 29, 2025
2f0cc4f
Update
GregoryComer Jul 29, 2025
15504dc
[Backend Tester] Add SNR validation (#12924)
GregoryComer Jul 29, 2025
0c873ab
Update
GregoryComer Jul 29, 2025
20faa2e
Update
GregoryComer Jul 29, 2025
aef38ab
Update
GregoryComer Jul 29, 2025
6e07637
Update
GregoryComer Jul 29, 2025
f653a89
Update
GregoryComer Jul 29, 2025
b96e6d1
Update
GregoryComer Jul 29, 2025
10b2820
Update
GregoryComer Jul 29, 2025
a03b8bc
Update
GregoryComer Jul 29, 2025
e346d17
Update
GregoryComer Jul 29, 2025
9f034c5
Update
GregoryComer Jul 29, 2025
8e1c364
Update
GregoryComer Jul 29, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 4 additions & 1 deletion backends/test/harness/tester.py
Original file line number Diff line number Diff line change
Expand Up @@ -311,7 +311,10 @@ def run_method_and_compare_outputs(
print(f"Comparing Stage {stage} with Stage {reference_stage}")
for run_iteration in range(number_of_runs):
inputs_to_run = inputs if inputs else next(self.generate_random_inputs())
input_shapes = [generated_input.shape for generated_input in inputs_to_run]
input_shapes = [
generated_input.shape if hasattr(generated_input, "shape") else None
for generated_input in inputs_to_run
]
print(f"Run {run_iteration} with input shapes: {input_shapes}")

# Reference output (and quantization scale)
Expand Down
57 changes: 57 additions & 0 deletions backends/test/suite/operators/test_abs.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree.

# pyre-unsafe


import torch
from executorch.backends.test.suite.flow import TestFlow

from executorch.backends.test.suite.operators import (
dtype_test,
operator_test,
OperatorTest,
)


class AbsModel(torch.nn.Module):
def __init__(self):
super().__init__()

def forward(self, x):
return torch.abs(x)


@operator_test
class TestAbs(OperatorTest):
@dtype_test
def test_abs_dtype(self, flow: TestFlow, dtype) -> None:
# Test with different dtypes
model = AbsModel().to(dtype)
self._test_op(model, (torch.rand(10, 10).to(dtype) * 2 - 1,), flow)

def test_abs_shapes(self, flow: TestFlow) -> None:
# Test with different tensor shapes

# 1D tensor
self._test_op(AbsModel(), (torch.randn(20),), flow)

# 2D tensor
self._test_op(AbsModel(), (torch.randn(5, 10),), flow)

# 3D tensor
self._test_op(AbsModel(), (torch.randn(3, 4, 5),), flow)

def test_abs_edge_cases(self, flow: TestFlow) -> None:
# Test edge cases

# Tensor with infinity
x = torch.tensor([float("inf"), float("-inf"), 1.0, -1.0])
self._test_op(AbsModel(), (x,), flow, generate_random_test_inputs=False)

# Tensor with NaN
x = torch.tensor([float("nan"), 1.0, -1.0])
self._test_op(AbsModel(), (x,), flow, generate_random_test_inputs=False)
255 changes: 255 additions & 0 deletions backends/test/suite/operators/test_amax.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,255 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree.

# pyre-unsafe

from typing import List, Optional, Tuple, Union

import torch
from executorch.backends.test.suite.flow import TestFlow

from executorch.backends.test.suite.operators import (
dtype_test,
operator_test,
OperatorTest,
)


class AmaxModel(torch.nn.Module):
def __init__(
self,
dim: Optional[Union[int, Tuple[int, ...], List[int]]] = None,
keepdim: bool = False,
):
super().__init__()
self.dim = dim
self.keepdim = keepdim

def forward(self, x):
return torch.amax(x, dim=self.dim, keepdim=self.keepdim)


@operator_test
class Amax(OperatorTest):
@dtype_test
def test_amax_dtype(self, flow: TestFlow, dtype) -> None:
self._test_op(
AmaxModel().to(dtype),
(torch.rand(10, 10).to(dtype),),
flow,
)

def test_amax_dim(self, flow: TestFlow) -> None:
self._test_op(
AmaxModel(dim=0),
(torch.randn(5, 10),),
flow,
)

self._test_op(
AmaxModel(dim=1),
(torch.randn(5, 10),),
flow,
)

self._test_op(
AmaxModel(dim=0),
(torch.randn(3, 4, 5),),
flow,
)

self._test_op(
AmaxModel(dim=1),
(torch.randn(3, 4, 5),),
flow,
)

self._test_op(
AmaxModel(dim=2),
(torch.randn(3, 4, 5),),
flow,
)

self._test_op(
AmaxModel(dim=1),
(torch.randn(2, 3, 4, 5),),
flow,
)

self._test_op(
AmaxModel(dim=-1),
(torch.randn(3, 4, 5),),
flow,
)

self._test_op(
AmaxModel(dim=-2),
(torch.randn(3, 4, 5),),
flow,
)

def test_amax_multi_dim(self, flow: TestFlow) -> None:
self._test_op(
AmaxModel(dim=(0, 1)),
(torch.randn(3, 4, 5),),
flow,
)

self._test_op(
AmaxModel(dim=(0, 2)),
(torch.randn(3, 4, 5),),
flow,
)

self._test_op(
AmaxModel(dim=(1, 2)),
(torch.randn(3, 4, 5),),
flow,
)

self._test_op(
AmaxModel(dim=(1, 3)),
(torch.randn(2, 3, 4, 5),),
flow,
)

self._test_op(
AmaxModel(dim=(0, 2)),
(torch.randn(2, 3, 4, 5),),
flow,
)

self._test_op(
AmaxModel(dim=(-1, -3)),
(torch.randn(2, 3, 4, 5),),
flow,
)

self._test_op(
AmaxModel(dim=(0, 1, 2, 3)),
(torch.randn(2, 3, 4, 5),),
flow,
)

def test_amax_keepdim(self, flow: TestFlow) -> None:
self._test_op(
AmaxModel(dim=0, keepdim=True),
(torch.randn(5, 10),),
flow,
)

self._test_op(
AmaxModel(dim=1, keepdim=True),
(torch.randn(5, 10),),
flow,
)

self._test_op(
AmaxModel(dim=1, keepdim=True),
(torch.randn(3, 4, 5),),
flow,
)

self._test_op(
AmaxModel(dim=2, keepdim=True),
(torch.randn(2, 3, 4, 5),),
flow,
)

self._test_op(
AmaxModel(dim=(1, 2), keepdim=True),
(torch.randn(3, 4, 5),),
flow,
)

def test_amax_shapes(self, flow: TestFlow) -> None:
self._test_op(
AmaxModel(),
(torch.randn(20),),
flow,
)
self._test_op(
AmaxModel(dim=0),
(torch.randn(20),),
flow,
)

self._test_op(
AmaxModel(),
(torch.randn(5, 10),),
flow,
)

self._test_op(
AmaxModel(),
(torch.randn(3, 4, 5),),
flow,
)

self._test_op(
AmaxModel(),
(torch.randn(2, 3, 4, 5),),
flow,
)

self._test_op(
AmaxModel(),
(torch.randn(2, 2, 3, 4, 5),),
flow,
)

def test_amax_edge_cases(self, flow: TestFlow) -> None:
x = torch.tensor([[1.0, float("inf"), 3.0], [4.0, 5.0, float("inf")]])
self._test_op(
AmaxModel(),
(x,),
flow,
use_random_test_inputs=False,
)
self._test_op(
AmaxModel(dim=0),
(x,),
flow,
use_random_test_inputs=False,
)
self._test_op(
AmaxModel(dim=1),
(x,),
flow,
use_random_test_inputs=False,
)

x = torch.tensor([[1.0, float("nan"), 3.0], [4.0, 5.0, float("nan")]])
self._test_op(
AmaxModel(),
(x,),
flow,
use_random_test_inputs=False,
)
self._test_op(
AmaxModel(dim=0),
(x,),
flow,
use_random_test_inputs=False,
)
self._test_op(
AmaxModel(dim=1),
(x,),
flow,
use_random_test_inputs=False,
)

def test_amax_scalar(self, flow: TestFlow) -> None:
self._test_op(
AmaxModel(),
(torch.tensor([5.0]),),
flow,
)
self._test_op(
AmaxModel(dim=0),
(torch.tensor([5.0]),),
flow,
)
Loading
Loading