Skip to content

Commit d728f9e

Browse files
committed
update: AdamG optimizer
1 parent 35dcf77 commit d728f9e

File tree

3 files changed

+6
-1
lines changed

3 files changed

+6
-1
lines changed

pytorch_optimizer/__init__.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -44,6 +44,7 @@
4444
from pytorch_optimizer.optimizer.adalite import Adalite
4545
from pytorch_optimizer.optimizer.adam_mini import AdamMini
4646
from pytorch_optimizer.optimizer.adamax import AdaMax
47+
from pytorch_optimizer.optimizer.adamg import AdamG
4748
from pytorch_optimizer.optimizer.adamod import AdaMod
4849
from pytorch_optimizer.optimizer.adamp import AdamP
4950
from pytorch_optimizer.optimizer.adams import AdamS
@@ -206,6 +207,7 @@
206207
StableAdamW,
207208
AdamMini,
208209
AdaLOMO,
210+
AdamG,
209211
]
210212
OPTIMIZERS: Dict[str, OPTIMIZER] = {str(optimizer.__name__).lower(): optimizer for optimizer in OPTIMIZER_LIST}
211213

tests/constants.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,7 @@
2525
Adai,
2626
Adalite,
2727
AdaMax,
28+
AdamG,
2829
AdaMod,
2930
AdamP,
3031
AdamS,
@@ -136,6 +137,7 @@
136137
'grokfastadamw',
137138
'stableadamw',
138139
'adammini',
140+
'adamg',
139141
]
140142

141143
VALID_LR_SCHEDULER_NAMES: List[str] = [
@@ -468,6 +470,7 @@
468470
(GrokFastAdamW, {'lr': 1e0, 'weight_decay': 1e-3}, 10),
469471
(Kate, {'lr': 5e-2}, 10),
470472
(StableAdamW, {'lr': 1e0}, 5),
473+
(AdamG, {'lr': 1e0}, 20),
471474
]
472475
ADANORM_SUPPORTED_OPTIMIZERS: List[Tuple[Any, Dict[str, Union[float, bool, int]], int]] = [
473476
(AdaBelief, {'lr': 5e-1, 'weight_decay': 1e-3, 'adanorm': True}, 10),

tests/test_load_modules.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,7 @@ def test_load_lr_scheduler_invalid(invalid_lr_scheduler_names):
3838

3939

4040
def test_get_supported_optimizers():
41-
assert len(get_supported_optimizers()) == 73
41+
assert len(get_supported_optimizers()) == 74
4242

4343

4444
def test_get_supported_lr_schedulers():

0 commit comments

Comments
 (0)