We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent d657060 commit 51fdbceCopy full SHA for 51fdbce
pytorch_optimizer/adabelief.py
@@ -150,7 +150,7 @@ def step(self, closure: CLOSURE = None) -> LOSS:
150
if group['amsgrad']:
151
exp_avg_var = torch.max(state['max_exp_avg_var'], exp_avg_var)
152
153
- de_nom = (exp_avg_var.add_(group['eps']).sqrt() / math.sqrt(bias_correction2)).add_(group['eps'])
+ de_nom = (exp_avg_var.sqrt() / math.sqrt(bias_correction2)).add_(group['eps'])
154
155
if not self.rectify:
156
step_size = group['lr']
0 commit comments