We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 1445e85 commit 9f55ec6Copy full SHA for 9f55ec6
pytorch_optimizer/ranger21.py
@@ -241,11 +241,11 @@ def step(self, closure: CLOSURE = None) -> LOSS:
241
variance_ma.mul_(beta2).addcmul_(grad, grad, value=1.0 - beta2)
242
variance_ma_sum += (variance_ma / bias_correction2).sum()
243
244
- if not self.param_size:
+ if self.param_size == 0:
245
self.param_size = param_size
246
247
# stable weight decay
248
- variance_normalized = math.sqrt(variance_ma_sum / param_size)
+ variance_normalized = math.sqrt(variance_ma_sum / self.param_size)
249
if math.isnan(variance_normalized):
250
raise RuntimeError('hit nan for variance_normalized')
251
0 commit comments