Skip to content

Commit 719065a

Browse files
committed
Fine tune atol and rtol in test_fp16_exp in backends/xnnpack/test/ops/test_exp.py
1 parent db8ef6b commit 719065a

File tree

1 file changed

+4
-5
lines changed

1 file changed

+4
-5
lines changed

backends/xnnpack/test/ops/test_exp.py

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -21,8 +21,8 @@ def __init__(self):
2121
def forward(self, x):
2222
return torch.exp(x)
2323

24-
def run_exp_test(self, inputs):
25-
(
24+
def get_exp_test(self, inputs):
25+
return (
2626
Tester(self.Exp(), inputs)
2727
.export()
2828
.check_count({"torch.ops.aten.exp.default": 1})
@@ -31,13 +31,12 @@ def run_exp_test(self, inputs):
3131
.check_not(["executorch_exir_dialects_edge__ops_aten_exp_default"])
3232
.to_executorch()
3333
.serialize()
34-
.run_method_and_compare_outputs()
3534
)
3635

3736
def test_fp16_exp(self):
3837
inputs = (torch.randn(20).to(torch.float16),)
39-
self.run_exp_test(inputs)
38+
self.get_exp_test(inputs).run_method_and_compare_outputs(atol=0.01, rtol=0.001)
4039

4140
def test_fp32_exp(self):
4241
inputs = (torch.randn(20),)
43-
self.run_exp_test(inputs)
42+
self.get_exp_test(inputs).run_method_and_compare_outputs()

0 commit comments

Comments
 (0)