We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent b82f7c4 commit a3f84ffCopy full SHA for a3f84ff
pytorch_optimizer/optimizer/adopt.py
@@ -115,8 +115,6 @@ def step(self, closure: CLOSURE = None) -> LOSS:
115
exp_avg_sq.addcmul_(grad, grad.conj())
116
continue
117
118
- exp_avg_sq.mul_(beta2).addcmul_(grad, grad.conj(), value=1.0 - beta2)
119
-
120
de_nom = exp_avg_sq.sqrt().clamp_(min=group['eps'])
121
122
normed_grad = grad.div(de_nom)
@@ -137,4 +135,6 @@ def step(self, closure: CLOSURE = None) -> LOSS:
137
135
138
136
p.add_(update, alpha=-lr)
139
+ exp_avg_sq.mul_(beta2).addcmul_(grad, grad.conj(), value=1.0 - beta2)
+
140
return loss
0 commit comments