We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 71da75a commit 0b86b5bCopy full SHA for 0b86b5b
pytorch_optimizer/optimizer/adashift.py
@@ -69,7 +69,7 @@ def step(self, closure: CLOSURE = None) -> LOSS:
69
70
beta1, beta2 = group['betas']
71
72
- exp_weight_sum: int = sum(beta1**i for i in range(group['keep_num']))
+ exp_weight_sum: int = sum(beta1 ** i for i in range(group['keep_num'])) # fmt: skip
73
first_grad_weight: float = beta1 ** (group['keep_num'] - 1) / exp_weight_sum
74
last_grad_weight: float = 1.0 / exp_weight_sum
75
0 commit comments