Skip to content

Commit 34fd10b

Browse files
authored
Merge pull request #76 from kozistr/refactor/hubconf
[Docs, Refactor] torch.hub usage
2 parents 1949e2f + ac5daac commit 34fd10b

File tree

3 files changed

+29
-19
lines changed

3 files changed

+29
-19
lines changed

Makefile

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -5,15 +5,15 @@ init:
55
python -m poetry install
66

77
format:
8-
isort --profile black -l 119 pytorch_optimizer tests lint.py
9-
black -S -l 119 pytorch_optimizer tests lint.py
8+
isort --profile black -l 119 pytorch_optimizer tests lint.py hubconf.py
9+
black -S -l 119 pytorch_optimizer tests lint.py hubconf.py
1010

1111
test:
1212
python -m pytest -sv -vv --cov=pytorch_optimizer --cov-report=xml ./tests
1313

1414
check:
15-
isort --check-only --profile black -l 119 pytorch_optimizer tests lint.py
16-
black -S -l 119 --check pytorch_optimizer tests lint.py
15+
isort --check-only --profile black -l 119 pytorch_optimizer tests lint.py hubconf.py
16+
black -S -l 119 --check pytorch_optimizer tests lint.py hubconf.py
1717
python lint.py
1818

1919
requirements:

README.rst

Lines changed: 12 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -38,22 +38,27 @@ Simple Usage
3838

3939
from pytorch_optimizer import AdamP
4040

41-
...
4241
model = YourModel()
4342
optimizer = AdamP(model.parameters())
44-
...
4543

46-
or you can use optimizer loader, simply passing a name of the optimizer.
47-
48-
::
44+
# or you can use optimizer loader, simply passing a name of the optimizer.
4945

5046
from pytorch_optimizer import load_optimizer
5147

52-
...
5348
model = YourModel()
5449
opt = load_optimizer(optimizer='adamp')
5550
optimizer = opt(model.parameters())
56-
...
51+
52+
Also, you can load the optimizer via `torch.hub`
53+
54+
::
55+
56+
import torch
57+
58+
model = YourModel()
59+
opt = torch.hub.load('kozistr/pytorch_optimizer', 'adamp')
60+
optimizer = opt(model.parameters())
61+
5762

5863
Supported Optimizers
5964
--------------------

hubconf.py

Lines changed: 13 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1,15 +1,20 @@
1-
dependencies = ['torch']
1+
"""
2+
PyTorch Hub models
3+
Usage:
4+
import torch
5+
optimizer = torch.hub.load('kozistr/pytorch_optimizer', 'adamp')
6+
"""
7+
from functools import partial as _partial
8+
from functools import update_wrapper as _update_wrapper
29

3-
from functools import partial as _partial, update_wrapper as _update_wrapper
10+
from pytorch_optimizer import get_supported_optimizers as _get_supported_optimizers
11+
from pytorch_optimizer import load_optimizer as _load_optimizer
412

5-
from pytorch_optimizer import (
6-
get_supported_optimizers as _get_supported_optimizers,
7-
load_optimizer as _load_optimizer,
8-
)
13+
dependencies = ['torch']
914

1015
for optimizer in _get_supported_optimizers():
11-
name = optimizer.__name__
12-
for n in (name, name.lower()):
16+
name: str = optimizer.__name__
17+
for n in (name, name.lower(), name.upper()):
1318
func = _partial(_load_optimizer, optimizer=n)
1419
_update_wrapper(func, optimizer)
1520
globals()[n] = func

0 commit comments

Comments
 (0)