Skip to content

Commit bd4af6c

Browse files
gmagogsfmfacebook-github-bot
authored andcommitted
executorch/extension/llm/modules/test
Reviewed By: avikchaudhuri Differential Revision: D67383699
1 parent b0bf9aa commit bd4af6c

File tree

2 files changed

+7
-2
lines changed

2 files changed

+7
-2
lines changed

extension/llm/modules/test/test_attention.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -150,6 +150,7 @@ def test_attention_export(self):
150150
(self.x, self.x),
151151
kwargs={"input_pos": self.input_pos},
152152
dynamic_shapes=self.dynamic_shapes,
153+
strict=True,
153154
)
154155
et_res = et_mha_ep.module()(self.x, self.x, input_pos=self.input_pos)
155156
tt_res = self.tt_mha(self.x, self.x, input_pos=self.input_pos)
@@ -196,6 +197,7 @@ def test_attention_executorch(self):
196197
(self.x, self.x),
197198
kwargs={"input_pos": self.input_pos},
198199
dynamic_shapes=self.dynamic_shapes,
200+
strict=True,
199201
)
200202
et_program = to_edge(
201203
et_mha_ep,

extension/llm/modules/test/test_position_embeddings.py

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -49,14 +49,14 @@ def test_tile_positional_embedding_smoke(self):
4949
self.assertTrue(torch.allclose(y, ref_y))
5050

5151
def test_tile_positional_embedding_export(self):
52-
5352
tpe_ep = torch.export.export(
5453
self.tpe,
5554
(self.x, self.aspect_ratio),
5655
dynamic_shapes=(
5756
self.dynamic_shape,
5857
None,
5958
), # assuming aspect ratio is static
59+
strict=True,
6060
)
6161

6262
y = tpe_ep.module()(self.x, self.aspect_ratio)
@@ -91,6 +91,7 @@ def test_tile_positional_embedding_et(self):
9191
self.dynamic_shape,
9292
None,
9393
), # assuming aspect ratio is static
94+
strict=True,
9495
)
9596
et_program = to_edge(
9697
tpe_ep,
@@ -148,14 +149,14 @@ def test_tiled_token_positional_embedding_smoke(self):
148149
assert_close(y, ref_y)
149150

150151
def test_tiled_token_positional_embedding_export(self):
151-
152152
tpe_ep = torch.export.export(
153153
self.tpe,
154154
(self.x, self.aspect_ratio),
155155
dynamic_shapes=(
156156
self.dynamic_shape,
157157
None,
158158
), # assuming aspect ratio is static
159+
strict=True,
159160
)
160161

161162
y = tpe_ep.module()(self.x, self.aspect_ratio)
@@ -172,6 +173,7 @@ def test_tiled_token_positional_embedding_aoti(self):
172173
self.dynamic_shape,
173174
None,
174175
), # assuming aspect ratio is static
176+
strict=True,
175177
)
176178

177179
with tempfile.TemporaryDirectory() as tmpdir:
@@ -195,6 +197,7 @@ def test_tiled_token_positional_embedding_et(self):
195197
self.dynamic_shape,
196198
None,
197199
), # assuming aspect ratio is static
200+
strict=True,
198201
)
199202
et_program = to_edge(
200203
tpe_ep,

0 commit comments

Comments
 (0)