We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent fa05118 commit 808ee73Copy full SHA for 808ee73
pytorch_optimizer/lars.py
@@ -71,6 +71,9 @@ def step(self, closure: CLOSURE = None) -> LOSS:
71
if p.grad is None:
72
continue
73
74
+ if p.grad.data.is_sparse:
75
+ raise RuntimeError('LARS does not support sparse gradients')
76
+
77
dp = p.grad
78
79
if p.ndim > 1: # if not normalization gamma/beta or bias
0 commit comments