Skip to content

Commit 524cb00

Browse files
committed
refactor: types to type
1 parent dc72798 commit 524cb00

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

92 files changed

+91
-91
lines changed

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -97,7 +97,7 @@ select = [
9797
"TID", "ARG", "ERA", "RUF", "YTT", "PL", "Q"
9898
]
9999
ignore = [
100-
"A005", "B905",
100+
"B905",
101101
"D100", "D102", "D104", "D105", "D107", "D203", "D213", "D413",
102102
"PLR0912", "PLR0913", "PLR0915", "PLR2004",
103103
"Q003", "ARG002",

pytorch_optimizer/base/optimizer.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@
66
from torch.optim import Optimizer
77

88
from pytorch_optimizer.base.exception import NegativeLRError, NegativeStepError
9-
from pytorch_optimizer.base.types import (
9+
from pytorch_optimizer.base.type import (
1010
BETAS,
1111
CLOSURE,
1212
DEFAULTS,
File renamed without changes.

pytorch_optimizer/loss/dice.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44
from torch.nn.functional import logsigmoid, one_hot
55
from torch.nn.modules.loss import _Loss
66

7-
from pytorch_optimizer.base.types import CLASS_MODE
7+
from pytorch_optimizer.base.type import CLASS_MODE
88

99

1010
def soft_dice_score(

pytorch_optimizer/loss/jaccard.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44
from torch.nn.functional import logsigmoid, one_hot
55
from torch.nn.modules.loss import _Loss
66

7-
from pytorch_optimizer.base.types import CLASS_MODE
7+
from pytorch_optimizer.base.type import CLASS_MODE
88

99

1010
def soft_jaccard_score(

pytorch_optimizer/lr_scheduler/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@
1414
StepLR,
1515
)
1616

17-
from pytorch_optimizer.base.types import SCHEDULER
17+
from pytorch_optimizer.base.type import SCHEDULER
1818
from pytorch_optimizer.lr_scheduler.chebyshev import get_chebyshev_perm_steps, get_chebyshev_schedule
1919
from pytorch_optimizer.lr_scheduler.cosine_anealing import CosineAnnealingWarmupRestarts
2020
from pytorch_optimizer.lr_scheduler.experimental.deberta_v3_lr_scheduler import deberta_v3_large_lr_scheduler

pytorch_optimizer/lr_scheduler/experimental/deberta_v3_lr_scheduler.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
from torch import nn
22

3-
from pytorch_optimizer.base.types import PARAMETERS
3+
from pytorch_optimizer.base.type import PARAMETERS
44

55

66
def deberta_v3_large_lr_scheduler(

pytorch_optimizer/optimizer/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@
77
from torch import nn
88
from torch.optim import SGD, Adam, AdamW, Optimizer
99

10-
from pytorch_optimizer.base.types import OPTIMIZER, PARAMETERS
10+
from pytorch_optimizer.base.type import OPTIMIZER, PARAMETERS
1111
from pytorch_optimizer.optimizer.a2grad import A2Grad
1212
from pytorch_optimizer.optimizer.adabelief import AdaBelief
1313
from pytorch_optimizer.optimizer.adabound import AdaBound

pytorch_optimizer/optimizer/a2grad.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55

66
from pytorch_optimizer.base.exception import NoSparseGradientError
77
from pytorch_optimizer.base.optimizer import BaseOptimizer
8-
from pytorch_optimizer.base.types import CLOSURE, DEFAULTS, LOSS, PARAMETERS
8+
from pytorch_optimizer.base.type import CLOSURE, DEFAULTS, LOSS, PARAMETERS
99

1010
VARIANTS = Literal['uni', 'inc', 'exp']
1111

pytorch_optimizer/optimizer/adabelief.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44

55
from pytorch_optimizer.base.exception import NoSparseGradientError
66
from pytorch_optimizer.base.optimizer import BaseOptimizer
7-
from pytorch_optimizer.base.types import BETAS, CLOSURE, DEFAULTS, LOSS, PARAMETERS
7+
from pytorch_optimizer.base.type import BETAS, CLOSURE, DEFAULTS, LOSS, PARAMETERS
88

99

1010
class AdaBelief(BaseOptimizer):

0 commit comments

Comments
 (0)