Skip to content

Commit 9c2650e

Browse files
committed
update: Ranger21
1 parent 18a5fda commit 9c2650e

File tree

1 file changed

+3
-3
lines changed

1 file changed

+3
-3
lines changed

tests/test_optimizers.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -11,16 +11,15 @@
1111
SGDP,
1212
AdaBelief,
1313
AdaBound,
14-
AdaHessian,
1514
AdamP,
1615
DiffGrad,
1716
DiffRGrad,
1817
Lamb,
1918
Lookahead,
2019
RAdam,
2120
Ranger,
21+
Ranger21,
2222
)
23-
from pytorch_optimizer.types import BETAS
2423

2524
__REFERENCE__ = 'https://github.com/jettify/pytorch-optimizer/blob/master/tests/test_optimizer_with_nn.py'
2625

@@ -67,7 +66,7 @@ def build_lookahead(*parameters, **kwargs):
6766
return Lookahead(AdamP(*parameters, **kwargs))
6867

6968

70-
OPTIMIZERS: List[Tuple[Any, Dict[str, Union[float, bool, int, BETAS]], int]] = [
69+
OPTIMIZERS: List[Tuple[Any, Dict[str, Union[float, bool, int]], int]] = [
7170
(build_lookahead, {'lr': 1e-2, 'weight_decay': 1e-3}, 200),
7271
(AdaBelief, {'lr': 1e-2, 'weight_decay': 1e-3}, 200),
7372
(AdaBound, {'lr': 1e-2, 'gamma': 0.1, 'weight_decay': 1e-3}, 200),
@@ -79,6 +78,7 @@ def build_lookahead(*parameters, **kwargs):
7978
(RAdam, {'lr': 1e-1, 'weight_decay': 1e-3}, 200),
8079
(SGDP, {'lr': 1e-1, 'weight_decay': 1e-3}, 200),
8180
(Ranger, {'lr': 1e-1, 'weight_decay': 1e-3}, 200),
81+
(Ranger21, {'lr': 5e-1, 'weight_decay': 1e-3, 'num_iterations': 1000}, 500),
8282
# (AdaHessian, {'lr': 1e-2, 'weight_decay': 1e-3}, 200),
8383
]
8484

0 commit comments

Comments
 (0)