Skip to content

Commit 4c9f1cc

Browse files
committed
refactor: test_optimizers
1 parent fa6f473 commit 4c9f1cc

File tree

1 file changed

+3
-3
lines changed

1 file changed

+3
-3
lines changed

tests/test_optimizers.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -92,8 +92,8 @@ def build_lookahead(*parameters, **kwargs):
9292
(AdamP, {'lr': 5e-1, 'weight_decay': 1e-3}, 200),
9393
(DiffGrad, {'lr': 5e-1, 'weight_decay': 1e-3}, 200),
9494
(DiffRGrad, {'lr': 5e-1, 'weight_decay': 1e-3}, 200),
95-
(Lamb, {'lr': 1e-1, 'weight_decay': 1e-3}, 200),
96-
(Lamb, {'lr': 2e-1, 'weight_decay': 1e-3, 'pre_norm': True, 'eps': 1e-8}, 500),
95+
(Lamb, {'lr': 1e-1, 'weight_decay': 1e-3}, 500),
96+
(Lamb, {'lr': 1e-1, 'weight_decay': 1e-3, 'pre_norm': True, 'eps': 1e-8}, 500),
9797
(RaLamb, {'lr': 1e-1, 'weight_decay': 1e-3}, 200),
9898
(MADGRAD, {'lr': 1e-2, 'weight_decay': 1e-3}, 500),
9999
(RAdam, {'lr': 1e-1, 'weight_decay': 1e-3}, 200),
@@ -260,7 +260,7 @@ def test_pc_grad_optimizers(optimizer_pc_grad_config):
260260
optimizer.pc_backward([loss1, loss2])
261261
optimizer.step()
262262

263-
assert tensor_to_numpy(init_loss) > 2.0 * tensor_to_numpy(loss)
263+
assert tensor_to_numpy(init_loss) > 1.5 * tensor_to_numpy(loss)
264264

265265

266266
@pytest.mark.parametrize('optimizer_config', OPTIMIZERS, ids=ids)

0 commit comments

Comments
 (0)