We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 0511190 commit d392a01Copy full SHA for d392a01
pytorch_optimizer/__init__.py
@@ -9,6 +9,7 @@
9
from pytorch_optimizer.diffrgrad import DiffRGrad
10
from pytorch_optimizer.fp16 import DynamicLossScaler, SafeFP16Optimizer
11
from pytorch_optimizer.gc import centralize_gradient
12
+from pytorch_optimizer.lamb import Lamb
13
from pytorch_optimizer.lookahead import Lookahead
14
from pytorch_optimizer.madgrad import MADGRAD
15
from pytorch_optimizer.optimizers import load_optimizers
0 commit comments