We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent e74a8d8 commit 9aea11dCopy full SHA for 9aea11d
pytorch_optimizer/agc.py
@@ -4,10 +4,10 @@
4
5
6
def agc(p: torch.Tensor, agc_eps: float, agc_clip_val: float, eps: float = 1e-6):
7
- """Clip gradient values in excess of the unit-wise norm.
8
- :param p: parameter.
9
- :param agc_eps: float.
10
- :param agc_clip_val: float.
+ """Clip gradient values in excess of the unit-wise norm
+ :param p: parameter. parameter
+ :param agc_eps: float. epsilon
+ :param agc_clip_val: float. norm clip
11
:param eps: float. simple stop from div by zero and no relation to standard optimizer eps
12
"""
13
p_norm = unit_norm(p).clamp_(agc_eps)
0 commit comments