diff --git a/backends/test/compliance_suite/operators/test_log_softmax.py b/backends/test/compliance_suite/operators/test_log_softmax.py new file mode 100644 index 00000000000..9f54a84ba86 --- /dev/null +++ b/backends/test/compliance_suite/operators/test_log_softmax.py @@ -0,0 +1,48 @@ +# (c) Meta Platforms, Inc. and affiliates. Confidential and proprietary. + +# pyre-strict + +from typing import Callable + +import torch + +from executorch.backends.test.compliance_suite import ( + dtype_test, + operator_test, + OperatorTest, +) + +class Model(torch.nn.Module): + def __init__(self, dim=-1): + super().__init__() + self.dim = dim + + def forward(self, x): + return torch.nn.functional.log_softmax(x, dim=self.dim) + +@operator_test +class TestLogSoftmax(OperatorTest): + @dtype_test + def test_log_softmax_dtype(self, dtype, tester_factory: Callable) -> None: + self._test_op(Model(), ((torch.rand(2, 10) * 100).to(dtype),), tester_factory) + + def test_log_softmax_f32_dim_last(self, tester_factory: Callable) -> None: + # Default dim is -1 (last dimension) + self._test_op(Model(), (torch.randn(3, 4, 5),), tester_factory) + + def test_log_softmax_f32_dim_first(self, tester_factory: Callable) -> None: + # Test with dim=0 (first dimension) + self._test_op(Model(dim=0), (torch.randn(3, 4, 5),), tester_factory) + + def test_log_softmax_f32_dim_middle(self, tester_factory: Callable) -> None: + # Test with dim=1 (middle dimension) + self._test_op(Model(dim=1), (torch.randn(3, 4, 5),), tester_factory) + + def test_log_softmax_f32_1d_tensor(self, tester_factory: Callable) -> None: + # Test with 1D tensor + self._test_op(Model(), (torch.randn(10),), tester_factory) + + def test_log_softmax_f32_large_values(self, tester_factory: Callable) -> None: + # Test with large values to check numerical stability + x = torch.tensor([[1000.0, 0.0, -1000.0]]) + self._test_op(Model(), (x,), tester_factory) diff --git a/backends/test/compliance_suite/operators/test_softmax.py b/backends/test/compliance_suite/operators/test_softmax.py new file mode 100644 index 00000000000..139b942edbc --- /dev/null +++ b/backends/test/compliance_suite/operators/test_softmax.py @@ -0,0 +1,48 @@ +# (c) Meta Platforms, Inc. and affiliates. Confidential and proprietary. + +# pyre-strict + +from typing import Callable + +import torch + +from executorch.backends.test.compliance_suite import ( + dtype_test, + operator_test, + OperatorTest, +) + +class Model(torch.nn.Module): + def __init__(self, dim=-1): + super().__init__() + self.dim = dim + + def forward(self, x): + return torch.nn.functional.softmax(x, dim=self.dim) + +@operator_test +class TestSoftmax(OperatorTest): + @dtype_test + def test_softmax_dtype(self, dtype, tester_factory: Callable) -> None: + self._test_op(Model(), ((torch.rand(2, 10) * 100).to(dtype),), tester_factory) + + def test_softmax_f32_dim_last(self, tester_factory: Callable) -> None: + # Default dim is -1 (last dimension) + self._test_op(Model(), (torch.randn(3, 4, 5),), tester_factory) + + def test_softmax_f32_dim_first(self, tester_factory: Callable) -> None: + # Test with dim=0 (first dimension) + self._test_op(Model(dim=0), (torch.randn(3, 4, 5),), tester_factory) + + def test_softmax_f32_dim_middle(self, tester_factory: Callable) -> None: + # Test with dim=1 (middle dimension) + self._test_op(Model(dim=1), (torch.randn(3, 4, 5),), tester_factory) + + def test_softmax_f32_1d_tensor(self, tester_factory: Callable) -> None: + # Test with 1D tensor + self._test_op(Model(), (torch.randn(10),), tester_factory) + + def test_softmax_f32_large_values(self, tester_factory: Callable) -> None: + # Test with large values to check numerical stability + x = torch.tensor([[1000.0, 0.0, -1000.0]]) + self._test_op(Model(), (x,), tester_factory) diff --git a/backends/test/compliance_suite/operators/test_softmax2d.py b/backends/test/compliance_suite/operators/test_softmax2d.py new file mode 100644 index 00000000000..002553aec0f --- /dev/null +++ b/backends/test/compliance_suite/operators/test_softmax2d.py @@ -0,0 +1,49 @@ +# (c) Meta Platforms, Inc. and affiliates. Confidential and proprietary. + +# pyre-strict + +from typing import Callable + +import torch + +from executorch.backends.test.compliance_suite import ( + dtype_test, + operator_test, + OperatorTest, +) + +class Model(torch.nn.Module): + def forward(self, x): + # softmax2d is equivalent to softmax with dim=1 for 4D inputs + return torch.nn.functional.softmax(x, dim=1) + +@operator_test +class TestSoftmax2d(OperatorTest): + @dtype_test + def test_softmax2d_dtype(self, dtype, tester_factory: Callable) -> None: + # Input must be 4D (N, C, H, W) + self._test_op(Model(), ((torch.rand(2, 3, 4, 5) * 100).to(dtype),), tester_factory) + + def test_softmax2d_f32_various_shapes(self, tester_factory: Callable) -> None: + # Test with different shapes + self._test_op(Model(), (torch.randn(1, 3, 8, 8),), tester_factory) + + def test_softmax2d_f32_single_channel(self, tester_factory: Callable) -> None: + # Test with single channel (C=1) + self._test_op(Model(), (torch.randn(2, 1, 4, 4),), tester_factory) + + def test_softmax2d_f32_many_channels(self, tester_factory: Callable) -> None: + # Test with many channels + self._test_op(Model(), (torch.randn(2, 16, 4, 4),), tester_factory) + + def test_softmax2d_f32_single_batch(self, tester_factory: Callable) -> None: + # Test with single batch (N=1) + self._test_op(Model(), (torch.randn(1, 3, 4, 4),), tester_factory) + + def test_softmax2d_f32_large_values(self, tester_factory: Callable) -> None: + # Test with large values to check numerical stability + x = torch.zeros(2, 3, 2, 2) + x[:, 0] = 1000.0 # First channel has large positive values + x[:, 1] = 0.0 # Second channel has zeros + x[:, 2] = -1000.0 # Third channel has large negative values + self._test_op(Model(), (x,), tester_factory)