File tree Expand file tree Collapse file tree 6 files changed +66
-0
lines changed Expand file tree Collapse file tree 6 files changed +66
-0
lines changed Original file line number Diff line number Diff line change 1717class AdamP (Optimizer ):
1818 """
1919 Reference : https://github.com/clovaai/AdamP/blob/master/adamp/adamp.py
20+ Example :
21+ from pytorch_optimizer import AdamP
22+ ...
23+ model = YourModel()
24+ optimizer = AdaHessian(model.parameters())
25+ ...
26+ for input, output in data:
27+ optimizer.zero_grad()
28+ loss = loss_function(output, model(input))
29+ loss.backward()
30+ optimizer.step()
2031 """
2132
2233 def __init__ (
Original file line number Diff line number Diff line change 1414class MADGRAD (Optimizer ):
1515 """
1616 Reference : https://github.com/facebookresearch/madgrad/blob/main/madgrad/madgrad.py
17+ Example :
18+ from pytorch_optimizer import MADGRAD
19+ ...
20+ model = YourModel()
21+ optimizer = MADGRAD(model.parameters())
22+ ...
23+ for input, output in data:
24+ optimizer.zero_grad()
25+ loss = loss_function(output, model(input))
26+ loss.backward()
27+ optimizer.step()
1728 """
1829
1930 def __init__ (
Original file line number Diff line number Diff line change 1616class RAdam (Optimizer ):
1717 """
1818 Reference : https://github.com/LiyuanLucasLiu/RAdam/blob/master/radam/radam.py#L5
19+ Example :
20+ from pytorch_optimizer import RAdam
21+ ...
22+ model = YourModel()
23+ optimizer = RAdam(model.parameters())
24+ ...
25+ for input, output in data:
26+ optimizer.zero_grad()
27+ loss = loss_function(output, model(input))
28+ loss.backward()
29+ optimizer.step()
1930 """
2031
2132 def __init__ (
Original file line number Diff line number Diff line change 1717class Ranger (Optimizer ):
1818 """
1919 Reference : https://github.com/lessw2020/Ranger-Deep-Learning-Optimizer/blob/master/ranger/ranger.py
20+ Example :
21+ from pytorch_optimizer import Ranger
22+ ...
23+ model = YourModel()
24+ optimizer = Ranger(model.parameters())
25+ ...
26+ for input, output in data:
27+ optimizer.zero_grad()
28+ loss = loss_function(output, model(input))
29+ loss.backward()
30+ optimizer.step()
2031 """
2132
2233 def __init__ (
Original file line number Diff line number Diff line change 3232class Ranger21 (Optimizer ):
3333 """
3434 Reference : https://github.com/lessw2020/Ranger21/blob/main/ranger21/ranger21.py
35+ Example :
36+ from pytorch_optimizer import Ranger21
37+ ...
38+ model = YourModel()
39+ optimizer = Ranger21(model.parameters())
40+ ...
41+ for input, output in data:
42+ optimizer.zero_grad()
43+ loss = loss_function(output, model(input))
44+ loss.backward()
45+ optimizer.step()
3546 """
3647
3748 def __init__ (
Original file line number Diff line number Diff line change 1111class SGDP (Optimizer ):
1212 """
1313 Reference : https://github.com/clovaai/AdamP/blob/master/adamp/sgdp.py
14+ Example :
15+ from pytorch_optimizer import SGDP
16+ ...
17+ model = YourModel()
18+ optimizer = SGDP(model.parameters())
19+ ...
20+ for input, output in data:
21+ optimizer.zero_grad()
22+ loss = loss_function(output, model(input))
23+ loss.backward()
24+ optimizer.step()
1425 """
1526
1627 def __init__ (
You can’t perform that action at this time.
0 commit comments