We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 476df4e commit b8d314fCopy full SHA for b8d314f
pytorch_optimizer/optimizer/ranger.py
@@ -30,12 +30,11 @@ def __init__(
30
self,
31
params: PARAMETERS,
32
lr: float = 1e-3,
33
+ betas: BETAS = (0.95, 0.999),
34
alpha: float = 0.5,
35
k: int = 6,
36
n_sma_threshold: int = 5,
37
degenerated_to_sgd: bool = False,
- betas: BETAS = (0.95, 0.999),
38
- eps: float = 1e-5,
39
weight_decay: float = 0.0,
40
weight_decouple: bool = True,
41
fixed_decay: bool = False,
@@ -44,11 +43,13 @@ def __init__(
44
43
r: float = 0.95,
45
adanorm: bool = False,
46
adam_debias: bool = False,
+ eps: float = 1e-5,
47
):
48
self.validate_learning_rate(lr)
49
self.validate_betas(betas)
50
- self.validate_non_negative(weight_decay, 'weight_decay')
+ self.validate_range(alpha, 'alpha', 0.0, 1.0, range_type='[]')
51
self.validate_positive(k, 'k')
52
+ self.validate_non_negative(weight_decay, 'weight_decay')
53
self.validate_non_negative(eps, 'eps')
54
55
self.n_sma_threshold = n_sma_threshold
0 commit comments