Skip to content

Commit ed2fe05

Browse files
committed
Wrap AMP test module in FSDP
1 parent a42b974 commit ed2fe05

File tree

1 file changed

+2
-1
lines changed

1 file changed

+2
-1
lines changed

tests/tests_pytorch/plugins/precision/test_amp.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@
1414
from unittest.mock import Mock
1515

1616
import pytest
17+
from torch.distributed.fsdp import FullyShardedDataParallel as FSDP
1718
from torch.nn import Module
1819
from torch.optim import Optimizer
1920

@@ -23,7 +24,7 @@
2324

2425
def test_clip_gradients():
2526
"""Test that `.clip_gradients()` is a no-op when clipping is disabled."""
26-
module = Mock(spec=Module)
27+
module = FSDP(Mock(spec=Module))
2728
optimizer = Mock(spec=Optimizer)
2829
precision = MixedPrecision(precision="16-mixed", device="cuda:0", scaler=Mock())
2930
precision.clip_grad_by_value = Mock()

0 commit comments

Comments
 (0)