Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 4 additions & 1 deletion backends/test/suite/operators/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -133,7 +133,9 @@ def _create_test_for_backend(


class OperatorTest(unittest.TestCase):
def _test_op(self, model, inputs, flow: TestFlow):
def _test_op(
self, model, inputs, flow: TestFlow, generate_random_test_inputs: bool = True
):
context = get_active_test_context()

# This should be set in the wrapped test. See _make_wrapped_test above.
Expand All @@ -145,6 +147,7 @@ def _test_op(self, model, inputs, flow: TestFlow):
flow,
context.test_name,
context.params,
generate_random_test_inputs=generate_random_test_inputs,
)

log_test_summary(run_summary)
Expand Down
89 changes: 89 additions & 0 deletions backends/test/suite/operators/test_embedding.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,89 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree.

# pyre-unsafe

import torch
from executorch.backends.test.suite.flow import TestFlow

from executorch.backends.test.suite.operators import (
dtype_test,
operator_test,
OperatorTest,
)


class Model(torch.nn.Module):
def __init__(
self,
num_embeddings=100,
embedding_dim=50,
):
super().__init__()
self.embedding = torch.nn.Embedding(
num_embeddings=num_embeddings,
embedding_dim=embedding_dim,
)

def forward(self, x):
return self.embedding(x)


@operator_test
class Embedding(OperatorTest):
# Note that generate_random_test_inputs is used to avoid the tester
# generating random inputs that are out of range of the embedding size.
# The tester's random input generation is not smart enough to know that
# the index inputs must be within a certain range.

@dtype_test
def test_embedding_dtype(self, flow: TestFlow, dtype) -> None:
self._test_op(
Model().to(dtype),
(torch.randint(0, 10, (2, 8), dtype=torch.long),),
flow,
generate_random_test_inputs=False,
)

def test_embedding_sizes(self, flow: TestFlow) -> None:
self._test_op(
Model(num_embeddings=5, embedding_dim=3),
(torch.randint(0, 5, (2, 8), dtype=torch.long),),
flow,
generate_random_test_inputs=False,
)
self._test_op(
Model(num_embeddings=100, embedding_dim=10),
(torch.randint(0, 100, (2, 8), dtype=torch.long),),
flow,
generate_random_test_inputs=False,
)
self._test_op(
Model(num_embeddings=1000, embedding_dim=50),
(torch.randint(0, 1000, (2, 4), dtype=torch.long),),
flow,
generate_random_test_inputs=False,
)

def test_embedding_batch_dim(self, flow: TestFlow) -> None:
self._test_op(
Model(),
(torch.randint(0, 100, (5,), dtype=torch.long),),
flow,
generate_random_test_inputs=False,
)
self._test_op(
Model(),
(torch.randint(0, 100, (2, 8), dtype=torch.long),),
flow,
generate_random_test_inputs=False,
)
self._test_op(
Model(),
(torch.randint(0, 100, (2, 3, 4), dtype=torch.long),),
flow,
generate_random_test_inputs=False,
)
118 changes: 118 additions & 0 deletions backends/test/suite/operators/test_embedding_bag.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,118 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree.

# pyre-unsafe

import torch
from executorch.backends.test.suite.flow import TestFlow

from executorch.backends.test.suite.operators import (
dtype_test,
operator_test,
OperatorTest,
)


class Model(torch.nn.Module):
def __init__(
self,
num_embeddings=10,
embedding_dim=5,
mode="mean",
include_last_offset: bool = False,
):
super().__init__()
self.embedding_bag = torch.nn.EmbeddingBag(
num_embeddings=num_embeddings,
embedding_dim=embedding_dim,
mode=mode,
include_last_offset=include_last_offset,
)

def forward(self, x, offsets=None):
return self.embedding_bag(x, offsets)


@operator_test
class EmbeddingBag(OperatorTest):
# Note that generate_random_test_inputs is used to avoid the tester
# generating random inputs that are out of range of the embedding size.
# The tester's random input generation is not smart enough to know that
# the index inputs must be within a certain range.

@dtype_test
def test_embedding_bag_dtype(self, flow: TestFlow, dtype) -> None:
indices = torch.tensor([1, 2, 4, 5, 4, 3, 2, 9], dtype=torch.long)
offsets = torch.tensor([0, 4], dtype=torch.long)
self._test_op(
Model().to(dtype),
(indices, offsets),
flow,
generate_random_test_inputs=False,
)

def test_embedding_bag_sizes(self, flow: TestFlow) -> None:
indices = torch.tensor([1, 2, 3, 1], dtype=torch.long)
offsets = torch.tensor([0, 2], dtype=torch.long)

self._test_op(
Model(num_embeddings=5, embedding_dim=3),
(indices, offsets),
flow,
generate_random_test_inputs=False,
)

indices = torch.tensor([5, 20, 10, 43, 7], dtype=torch.long)
offsets = torch.tensor([0, 2, 4], dtype=torch.long)
self._test_op(
Model(num_embeddings=50, embedding_dim=10),
(indices, offsets),
flow,
generate_random_test_inputs=False,
)

indices = torch.tensor([100, 200, 300, 400], dtype=torch.long)
offsets = torch.tensor([0, 2], dtype=torch.long)
self._test_op(
Model(num_embeddings=500, embedding_dim=20),
(indices, offsets),
flow,
generate_random_test_inputs=False,
)

def test_embedding_bag_modes(self, flow: TestFlow) -> None:
indices = torch.tensor([1, 2, 4, 5, 4, 3, 2, 9], dtype=torch.long)
offsets = torch.tensor([0, 4], dtype=torch.long)

self._test_op(
Model(mode="sum"),
(indices, offsets),
flow,
generate_random_test_inputs=False,
)
self._test_op(
Model(mode="mean"),
(indices, offsets),
flow,
generate_random_test_inputs=False,
)
self._test_op(
Model(mode="max"),
(indices, offsets),
flow,
generate_random_test_inputs=False,
)

def test_embedding_bag_include_last_offset(self, flow: TestFlow) -> None:
indices = torch.tensor([1, 2, 4, 5, 4, 3, 2, 9], dtype=torch.long)
offsets = torch.tensor([0, 4], dtype=torch.long)

self._test_op(
Model(include_last_offset=True),
(indices, offsets),
flow,
generate_random_test_inputs=False,
)
5 changes: 4 additions & 1 deletion backends/test/suite/runner.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@ def run_test( # noqa: C901
test_name: str,
params: dict | None,
dynamic_shapes: Any | None = None,
generate_random_test_inputs: bool = True,
) -> TestCaseSummary:
"""
Top-level test run function for a model, input set, and tester. Handles test execution
Expand Down Expand Up @@ -102,7 +103,9 @@ def build_result(
# the cause of a failure in run_method_and_compare_outputs. We can look for
# AssertionErrors to catch output mismatches, but this might catch more than that.
try:
tester.run_method_and_compare_outputs()
tester.run_method_and_compare_outputs(
inputs=None if generate_random_test_inputs else inputs
)
except AssertionError as e:
return build_result(TestResult.OUTPUT_MISMATCH_FAIL, e)
except Exception as e:
Expand Down
Loading