We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent b58a01e commit 42edb7bCopy full SHA for 42edb7b
pytorch_optimizer/optimizer/dadapt.py
@@ -327,7 +327,7 @@ def step(self, closure: CLOSURE = None) -> LOSS:
327
328
# it's not Adam Debias
329
d_lr: float = self.apply_adam_debias(
330
- group['bias_correction'], step_size=d * lr, bias_correction1=bias_correction
+ not group['bias_correction'], step_size=d * lr, bias_correction1=bias_correction
331
)
332
333
sk_l1 = torch.tensor([0.0], device=device)
0 commit comments