We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 0464b21 commit fbc09ccCopy full SHA for fbc09cc
pytorch_optimizer/optimizer/adams.py
@@ -118,7 +118,7 @@ def step(self, closure: CLOSURE = None) -> LOSS:
118
if param_size == 0:
119
raise ZeroParameterSizeError()
120
121
- exp_avg_sq_hat_mean = math.sqrt(exp_avg_sq_hat_sum / param_size)
+ exp_avg_sq_hat_mean = math.sqrt(exp_avg_sq_hat_sum / param_size) + self.eps
122
123
for group in self.param_groups:
124
beta1, beta2 = group['betas']
0 commit comments