We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent f1381d7 commit c7c3cfbCopy full SHA for c7c3cfb
pytorch_optimizer/__init__.py
@@ -18,6 +18,7 @@
18
from pytorch_optimizer.optimizer.adabound import AdaBound
19
from pytorch_optimizer.optimizer.adai import Adai
20
from pytorch_optimizer.optimizer.adamp import AdamP
21
+from pytorch_optimizer.optimizer.adams import AdamS
22
from pytorch_optimizer.optimizer.adan import Adan
23
from pytorch_optimizer.optimizer.adapnm import AdaPNM
24
from pytorch_optimizer.optimizer.agc import agc
@@ -88,6 +89,7 @@
88
89
DAdaptAdaGrad,
90
DAdaptAdam,
91
DAdaptSGD,
92
+ AdamS,
93
]
94
OPTIMIZERS: Dict[str, OPTIMIZER] = {str(optimizer.__name__).lower(): optimizer for optimizer in OPTIMIZER_LIST}
95
0 commit comments