We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 2d0fbd5 commit 6f7451eCopy full SHA for 6f7451e
pytorch_optimizer/optimizer/adashift.py
@@ -86,7 +86,7 @@ def step(self, closure: CLOSURE = None) -> LOSS:
86
state = self.state[p]
87
88
if len(state) == 0:
89
- state['grad_queue'] = deque([p.grad.clone()], maxlen=group['keep_num'])
+ state['grad_queue'] = deque([grad.clone()], maxlen=group['keep_num'])
90
state['exp_avg'] = torch.zeros_like(p)
91
state['exp_avg_sq'] = torch.zeros_like(p)
92
0 commit comments