Skip to content

Commit 48342a5

Browse files
committed
[skip ci] docs: docstring
1 parent ed25206 commit 48342a5

File tree

1 file changed

+10
-4
lines changed

1 file changed

+10
-4
lines changed

pytorch_optimizer/ranger.py

Lines changed: 10 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -16,11 +16,7 @@
1616

1717
class Ranger(Optimizer):
1818
"""
19-
Ranger optimizer (RAdam + Lookahead + Gradient Centralization, combined into one optimizer)
2019
Reference : https://github.com/lessw2020/Ranger-Deep-Learning-Optimizer/blob/master/ranger/ranger.py
21-
22-
:param use_gc: bool. use Gradient Centralization (both convolution & fc layers)
23-
:param gc_conv_only: bool. use Gradient Centralization (only convolution layer)
2420
"""
2521

2622
def __init__(
@@ -36,6 +32,16 @@ def __init__(
3632
use_gc: bool = True,
3733
gc_conv_only: bool = False,
3834
):
35+
"""Ranger optimizer (RAdam + Lookahead + Gradient Centralization, combined into one optimizer)
36+
:param params: PARAMS. iterable of parameters to optimize or dicts defining parameter groups
37+
:param lr: float. learning rate.
38+
:param n_sma_threshold: int. (recommended is 5)
39+
:param betas: BETAS. coefficients used for computing running averages of gradient and the squared hessian trace
40+
:param eps: float. term added to the denominator to improve numerical stability
41+
:param weight_decay: float. weight decay (L2 penalty)
42+
:param use_gc: bool. use Gradient Centralization (both convolution & fc layers)
43+
:param gc_conv_only: bool. use Gradient Centralization (only convolution layer)
44+
"""
3945
self.lr = lr
4046
self.alpha = alpha
4147
self.k = k

0 commit comments

Comments
 (0)