Skip to content

Commit 30b7a25

Browse files
committed
update: recipe
1 parent a87fc32 commit 30b7a25

File tree

2 files changed

+3
-5
lines changed

2 files changed

+3
-5
lines changed

tests/constants.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -408,4 +408,5 @@
408408
(AdaMod, {'lr': 1e2, 'weight_decay': 1e-3, 'adam_debias': True}, 20),
409409
(AdaMax, {'lr': 1e0, 'weight_decay': 1e-3, 'adam_debias': True}, 5),
410410
(AvaGrad, {'lr': 1e1, 'weight_decay': 1e-3, 'adam_debias': True}, 5),
411+
(AdaHessian, {'lr': 5e0, 'weight_decay': 1e-3, 'adam_debias': True}, 5),
411412
]

tests/test_optimizers.py

Lines changed: 2 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -66,10 +66,7 @@ def _closure() -> float:
6666
if init_loss == np.inf:
6767
init_loss = loss
6868

69-
if optimizer_name in ('AdaHessian', 'SophiaH'):
70-
loss.backward(create_graph=True)
71-
else:
72-
loss.backward()
69+
loss.backward(create_graph=optimizer_name in ('AdaHessian', 'SophiaH'))
7370

7471
optimizer.step(closure(loss) if optimizer_name == 'AliG' else None)
7572

@@ -236,7 +233,7 @@ def test_adamd_optimizers(optimizer_config, environment):
236233
if init_loss == np.inf:
237234
init_loss = loss
238235

239-
loss.backward()
236+
loss.backward(create_graph=optimizer_class.__name__ in ('AdaHessian',))
240237

241238
optimizer.step()
242239

0 commit comments

Comments
 (0)