We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 9aea11d commit e874336Copy full SHA for e874336
pytorch_optimizer/ranger21.py
@@ -185,7 +185,7 @@ def step(self, closure: CLOSURE = None) -> LOSS:
185
param_size: int = 0
186
variance_ma_sum: float = 1.0
187
188
- # Phase 1 - Accumulate all of the variance_ma_sum to use in stable weight decay
+ # Phase 1 - Accumulate all the variance_ma_sum to use in stable weight decay
189
for group in self.param_groups:
190
for p in group['params']:
191
if p.grad is None:
0 commit comments