Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
48 changes: 48 additions & 0 deletions backends/test/compliance_suite/operators/test_log_softmax.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
# (c) Meta Platforms, Inc. and affiliates. Confidential and proprietary.

# pyre-strict

from typing import Callable

import torch

from executorch.backends.test.compliance_suite import (
dtype_test,
operator_test,
OperatorTest,
)

class Model(torch.nn.Module):
def __init__(self, dim=-1):
super().__init__()
self.dim = dim

def forward(self, x):
return torch.nn.functional.log_softmax(x, dim=self.dim)

@operator_test
class TestLogSoftmax(OperatorTest):
@dtype_test
def test_log_softmax_dtype(self, dtype, tester_factory: Callable) -> None:
self._test_op(Model(), ((torch.rand(2, 10) * 100).to(dtype),), tester_factory)

def test_log_softmax_f32_dim_last(self, tester_factory: Callable) -> None:
# Default dim is -1 (last dimension)
self._test_op(Model(), (torch.randn(3, 4, 5),), tester_factory)

def test_log_softmax_f32_dim_first(self, tester_factory: Callable) -> None:
# Test with dim=0 (first dimension)
self._test_op(Model(dim=0), (torch.randn(3, 4, 5),), tester_factory)

def test_log_softmax_f32_dim_middle(self, tester_factory: Callable) -> None:
# Test with dim=1 (middle dimension)
self._test_op(Model(dim=1), (torch.randn(3, 4, 5),), tester_factory)

def test_log_softmax_f32_1d_tensor(self, tester_factory: Callable) -> None:
# Test with 1D tensor
self._test_op(Model(), (torch.randn(10),), tester_factory)

def test_log_softmax_f32_large_values(self, tester_factory: Callable) -> None:
# Test with large values to check numerical stability
x = torch.tensor([[1000.0, 0.0, -1000.0]])
self._test_op(Model(), (x,), tester_factory)
48 changes: 48 additions & 0 deletions backends/test/compliance_suite/operators/test_softmax.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
# (c) Meta Platforms, Inc. and affiliates. Confidential and proprietary.

# pyre-strict

from typing import Callable

import torch

from executorch.backends.test.compliance_suite import (
dtype_test,
operator_test,
OperatorTest,
)

class Model(torch.nn.Module):
def __init__(self, dim=-1):
super().__init__()
self.dim = dim

def forward(self, x):
return torch.nn.functional.softmax(x, dim=self.dim)

@operator_test
class TestSoftmax(OperatorTest):
@dtype_test
def test_softmax_dtype(self, dtype, tester_factory: Callable) -> None:
self._test_op(Model(), ((torch.rand(2, 10) * 100).to(dtype),), tester_factory)

def test_softmax_f32_dim_last(self, tester_factory: Callable) -> None:
# Default dim is -1 (last dimension)
self._test_op(Model(), (torch.randn(3, 4, 5),), tester_factory)

def test_softmax_f32_dim_first(self, tester_factory: Callable) -> None:
# Test with dim=0 (first dimension)
self._test_op(Model(dim=0), (torch.randn(3, 4, 5),), tester_factory)

def test_softmax_f32_dim_middle(self, tester_factory: Callable) -> None:
# Test with dim=1 (middle dimension)
self._test_op(Model(dim=1), (torch.randn(3, 4, 5),), tester_factory)

def test_softmax_f32_1d_tensor(self, tester_factory: Callable) -> None:
# Test with 1D tensor
self._test_op(Model(), (torch.randn(10),), tester_factory)

def test_softmax_f32_large_values(self, tester_factory: Callable) -> None:
# Test with large values to check numerical stability
x = torch.tensor([[1000.0, 0.0, -1000.0]])
self._test_op(Model(), (x,), tester_factory)
49 changes: 49 additions & 0 deletions backends/test/compliance_suite/operators/test_softmax2d.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
# (c) Meta Platforms, Inc. and affiliates. Confidential and proprietary.

# pyre-strict

from typing import Callable

import torch

from executorch.backends.test.compliance_suite import (
dtype_test,
operator_test,
OperatorTest,
)

class Model(torch.nn.Module):
def forward(self, x):
# softmax2d is equivalent to softmax with dim=1 for 4D inputs
return torch.nn.functional.softmax(x, dim=1)

@operator_test
class TestSoftmax2d(OperatorTest):
@dtype_test
def test_softmax2d_dtype(self, dtype, tester_factory: Callable) -> None:
# Input must be 4D (N, C, H, W)
self._test_op(Model(), ((torch.rand(2, 3, 4, 5) * 100).to(dtype),), tester_factory)

def test_softmax2d_f32_various_shapes(self, tester_factory: Callable) -> None:
# Test with different shapes
self._test_op(Model(), (torch.randn(1, 3, 8, 8),), tester_factory)

def test_softmax2d_f32_single_channel(self, tester_factory: Callable) -> None:
# Test with single channel (C=1)
self._test_op(Model(), (torch.randn(2, 1, 4, 4),), tester_factory)

def test_softmax2d_f32_many_channels(self, tester_factory: Callable) -> None:
# Test with many channels
self._test_op(Model(), (torch.randn(2, 16, 4, 4),), tester_factory)

def test_softmax2d_f32_single_batch(self, tester_factory: Callable) -> None:
# Test with single batch (N=1)
self._test_op(Model(), (torch.randn(1, 3, 4, 4),), tester_factory)

def test_softmax2d_f32_large_values(self, tester_factory: Callable) -> None:
# Test with large values to check numerical stability
x = torch.zeros(2, 3, 2, 2)
x[:, 0] = 1000.0 # First channel has large positive values
x[:, 1] = 0.0 # Second channel has zeros
x[:, 2] = -1000.0 # Third channel has large negative values
self._test_op(Model(), (x,), tester_factory)
Loading