We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 11286cd commit f7fa2c8Copy full SHA for f7fa2c8
pytorch_optimizer/optimizer/novograd.py
@@ -99,7 +99,7 @@ def step(self, closure: CLOSURE = None) -> LOSS:
99
raise NoSparseGradientError(self.__str__)
100
101
state = self.state[p]
102
- g_2 = grad**2
+ g_2 = grad ** 2 # fmt: skip
103
104
if len(state) == 0:
105
state['moments'] = grad.div(g_2.sqrt() + group['eps']) + weight_decay * p
0 commit comments