Skip to content

Commit 0915adb

Browse files
committed
fix: not to assign a default value to num_iterations parameter
1 parent 4a7c8d4 commit 0915adb

File tree

1 file changed

+3
-3
lines changed

1 file changed

+3
-3
lines changed

pytorch_optimizer/ranger21.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -41,12 +41,12 @@ class Ranger21(Optimizer, BaseOptimizer):
4141
def __init__(
4242
self,
4343
params: PARAMETERS,
44+
num_iterations: int,
4445
lr: float = 1e-3,
4546
beta0: float = 0.9,
4647
betas: BETAS = (0.9, 0.999),
4748
use_softplus: bool = True,
4849
beta_softplus: float = 50.0,
49-
num_iterations: Optional[int] = None,
5050
num_warm_up_iterations: Optional[int] = None,
5151
num_warm_down_iterations: Optional[int] = None,
5252
warm_down_min_lr: float = 3e-5,
@@ -68,8 +68,8 @@ def __init__(
6868
:param betas: BETAS. coefficients used for computing running averages of gradient and the squared hessian trace
6969
:param use_softplus: bool. use softplus to smooth
7070
:param beta_softplus: float. beta
71-
:param agc_clipping_value: float
72-
:param agc_eps: float
71+
:param agc_clipping_value: float.
72+
:param agc_eps: float. eps for AGC
7373
:param centralize_gradients: bool. use GC both convolution & fc layers
7474
:param normalize_gradients: bool. use gradient normalization
7575
:param lookahead_merge_time: int. merge time

0 commit comments

Comments
 (0)