We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 26fca7b commit 27d6b99Copy full SHA for 27d6b99
pytorch_optimizer/optimizer/adabelief.py
@@ -95,7 +95,9 @@ def step(self, closure: CLOSURE = None) -> LOSS:
95
96
for group in self.param_groups:
97
beta1, beta2 = group['betas']
98
- n_sma_max: float = 2 / (1 - beta2) - 1
+ if self.rectify:
99
+ n_sma_max: float = 2.0 / (1.0 - beta2) - 1.0
100
+
101
for p in group['params']:
102
if p.grad is None:
103
continue
0 commit comments