Skip to content

Commit 70023a1

Browse files
committed
Only test for bf16-mixed
1 parent 064caf7 commit 70023a1

File tree

1 file changed

+2
-3
lines changed

1 file changed

+2
-3
lines changed

tests/tests_pytorch/plugins/precision/test_amp.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -55,13 +55,12 @@ def test_optimizer_amp_scaling_support_in_step_method():
5555
precision.clip_gradients(optimizer, clip_val=1.0)
5656

5757

58-
@pytest.mark.parametrize("precision", ["16-mixed", "bf16-mixed"])
59-
def test_amp_with_no_grad(precision: str):
58+
def test_amp_with_no_grad():
6059
"""Test that asserts using `no_grad` context wrapper with a persistent AMP context wrapper does not break gradient
6160
tracking."""
6261
layer = nn.Linear(2, 1)
6362
x = torch.randn(1, 2)
64-
amp = MixedPrecision(precision=precision, device="cpu")
63+
amp = MixedPrecision(precision="bf16-mixed", device="cpu")
6564

6665
with amp.autocast_context_manager():
6766
with torch.no_grad():

0 commit comments

Comments
 (0)