We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent a42b974 commit ed2fe05Copy full SHA for ed2fe05
tests/tests_pytorch/plugins/precision/test_amp.py
@@ -14,6 +14,7 @@
14
from unittest.mock import Mock
15
16
import pytest
17
+from torch.distributed.fsdp import FullyShardedDataParallel as FSDP
18
from torch.nn import Module
19
from torch.optim import Optimizer
20
@@ -23,7 +24,7 @@
23
24
25
def test_clip_gradients():
26
"""Test that `.clip_gradients()` is a no-op when clipping is disabled."""
- module = Mock(spec=Module)
27
+ module = FSDP(Mock(spec=Module))
28
optimizer = Mock(spec=Optimizer)
29
precision = MixedPrecision(precision="16-mixed", device="cuda:0", scaler=Mock())
30
precision.clip_grad_by_value = Mock()
0 commit comments