Skip to content

Commit 00236fe

Browse files
committed
docs: Examples
1 parent c28ea08 commit 00236fe

File tree

6 files changed

+66
-0
lines changed

6 files changed

+66
-0
lines changed

pytorch_optimizer/adamp.py

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,17 @@
1717
class AdamP(Optimizer):
1818
"""
1919
Reference : https://github.com/clovaai/AdamP/blob/master/adamp/adamp.py
20+
Example :
21+
from pytorch_optimizer import AdamP
22+
...
23+
model = YourModel()
24+
optimizer = AdaHessian(model.parameters())
25+
...
26+
for input, output in data:
27+
optimizer.zero_grad()
28+
loss = loss_function(output, model(input))
29+
loss.backward()
30+
optimizer.step()
2031
"""
2132

2233
def __init__(

pytorch_optimizer/madgrad.py

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,17 @@
1414
class MADGRAD(Optimizer):
1515
"""
1616
Reference : https://github.com/facebookresearch/madgrad/blob/main/madgrad/madgrad.py
17+
Example :
18+
from pytorch_optimizer import MADGRAD
19+
...
20+
model = YourModel()
21+
optimizer = MADGRAD(model.parameters())
22+
...
23+
for input, output in data:
24+
optimizer.zero_grad()
25+
loss = loss_function(output, model(input))
26+
loss.backward()
27+
optimizer.step()
1728
"""
1829

1930
def __init__(

pytorch_optimizer/radam.py

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,17 @@
1616
class RAdam(Optimizer):
1717
"""
1818
Reference : https://github.com/LiyuanLucasLiu/RAdam/blob/master/radam/radam.py#L5
19+
Example :
20+
from pytorch_optimizer import RAdam
21+
...
22+
model = YourModel()
23+
optimizer = RAdam(model.parameters())
24+
...
25+
for input, output in data:
26+
optimizer.zero_grad()
27+
loss = loss_function(output, model(input))
28+
loss.backward()
29+
optimizer.step()
1930
"""
2031

2132
def __init__(

pytorch_optimizer/ranger.py

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,17 @@
1717
class Ranger(Optimizer):
1818
"""
1919
Reference : https://github.com/lessw2020/Ranger-Deep-Learning-Optimizer/blob/master/ranger/ranger.py
20+
Example :
21+
from pytorch_optimizer import Ranger
22+
...
23+
model = YourModel()
24+
optimizer = Ranger(model.parameters())
25+
...
26+
for input, output in data:
27+
optimizer.zero_grad()
28+
loss = loss_function(output, model(input))
29+
loss.backward()
30+
optimizer.step()
2031
"""
2132

2233
def __init__(

pytorch_optimizer/ranger21.py

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -32,6 +32,17 @@
3232
class Ranger21(Optimizer):
3333
"""
3434
Reference : https://github.com/lessw2020/Ranger21/blob/main/ranger21/ranger21.py
35+
Example :
36+
from pytorch_optimizer import Ranger21
37+
...
38+
model = YourModel()
39+
optimizer = Ranger21(model.parameters())
40+
...
41+
for input, output in data:
42+
optimizer.zero_grad()
43+
loss = loss_function(output, model(input))
44+
loss.backward()
45+
optimizer.step()
3546
"""
3647

3748
def __init__(

pytorch_optimizer/sgdp.py

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,17 @@
1111
class SGDP(Optimizer):
1212
"""
1313
Reference : https://github.com/clovaai/AdamP/blob/master/adamp/sgdp.py
14+
Example :
15+
from pytorch_optimizer import SGDP
16+
...
17+
model = YourModel()
18+
optimizer = SGDP(model.parameters())
19+
...
20+
for input, output in data:
21+
optimizer.zero_grad()
22+
loss = loss_function(output, model(input))
23+
loss.backward()
24+
optimizer.step()
1425
"""
1526

1627
def __init__(

0 commit comments

Comments
 (0)