We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent e21dcd3 commit be6110bCopy full SHA for be6110b
tests/test_gradients.py
@@ -59,10 +59,6 @@ def test_sparse_supported(sparse_optimizer):
59
optimizer.zero_grad()
60
optimizer.step()
61
62
- optimizer = opt([param], momentum=0.9, weight_decay=1e-3)
63
- optimizer.reset()
64
- optimizer.zero_grad()
65
-
66
if sparse_optimizer == 'madgrad':
67
optimizer = opt([param], weight_decay=1e-3, decouple_decay=True)
68
optimizer.reset()
@@ -71,6 +67,10 @@ def test_sparse_supported(sparse_optimizer):
71
with pytest.raises(NoSparseGradientError):
72
73
69
70
+ optimizer = opt([param], momentum=0.9, weight_decay=1e-3)
+ optimizer.reset()
+ optimizer.zero_grad()
+
74
75
76
0 commit comments