We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 2d6fe2a commit 7b59b46Copy full SHA for 7b59b46
pytorch_optimizer/optimizer/sm3.py
@@ -8,10 +8,13 @@
8
class SM3(Optimizer, BaseOptimizer):
9
r"""Memory-Efficient Adaptive Optimization.
10
11
+ Reference : https://github.com/Enealor/PyTorch-SM3/blob/master/src/SM3/SM3.py
12
+
13
:param params: PARAMETERS. iterable of parameters to optimize or dicts defining parameter groups.
14
:param lr: float. learning rate.
- :param momentum: float.
- :param beta: float.
15
+ :param momentum: float. coefficient used to scale prior updates before adding. This drastically increases
16
+ memory usage if `momentum > 0.0`. This is ignored if the parameter's gradient is sparse.
17
+ :param beta: float. coefficient used for exponential moving averages.
18
"""
19
20
def __init__(
0 commit comments