Skip to content

Commit 5a65b51

Browse files
authored
Merge pull request #271 from kozistr/fix/kwargs
[Fix] Add missing kwargs
2 parents a8eb19c + 480bd4c commit 5a65b51

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

70 files changed

+126
-15
lines changed

docs/changelogs/v3.1.2.md

Lines changed: 5 additions & 0 deletions

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -71,7 +71,7 @@ lint.select = [
7171
]
7272
lint.ignore = [
7373
"B905", "D100", "D102", "D104", "D105", "D107", "D203", "D213", "D413", "PIE790", "PLR0912", "PLR0913", "PLR0915",
74-
"PLR2004", "RUF013", "Q003",
74+
"PLR2004", "RUF013", "Q003", "ARG002",
7575
]
7676
lint.fixable = ["ALL"]
7777
lint.unfixable = ["F401"]

pytorch_optimizer/lr_scheduler/rex.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -52,7 +52,7 @@ def get_linear_lr(self) -> float:
5252

5353
return self.min_lr + (self.max_lr - self.min_lr) * ((1.0 - progress) / (1.0 - progress / 2.0))
5454

55-
def step(self, epoch: Optional[int] = None) -> float: # noqa: ARG002
55+
def step(self, epoch: Optional[int] = None) -> float:
5656
value: float = self.get_linear_lr()
5757

5858
self.step_t += 1

pytorch_optimizer/optimizer/a2grad.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,7 @@ def __init__(
2727
lips: float = 10.0,
2828
rho: float = 0.5,
2929
variant: str = 'uni',
30+
**kwargs,
3031
):
3132
self.validate_learning_rate(lr)
3233
self.validate_non_negative(lips, 'lips')

pytorch_optimizer/optimizer/adabelief.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -42,6 +42,7 @@ def __init__(
4242
adanorm: bool = False,
4343
adam_debias: bool = False,
4444
eps: float = 1e-16,
45+
**kwargs,
4546
):
4647
self.validate_learning_rate(lr)
4748
self.validate_betas(betas)

pytorch_optimizer/optimizer/adabound.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -37,6 +37,7 @@ def __init__(
3737
ams_bound: bool = False,
3838
adam_debias: bool = False,
3939
eps: float = 1e-8,
40+
**kwargs,
4041
):
4142
self.validate_learning_rate(lr)
4243
self.validate_betas(betas)

pytorch_optimizer/optimizer/adadelta.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,7 @@ def __init__(
2626
weight_decouple: bool = False,
2727
fixed_decay: bool = False,
2828
eps: float = 1e-6,
29+
**kwargs,
2930
):
3031
self.validate_learning_rate(lr)
3132
self.validate_range(rho, 'rho', 0.0, 1.0)

pytorch_optimizer/optimizer/adafactor.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -49,6 +49,7 @@ def __init__(
4949
eps1: float = 1e-30,
5050
eps2: float = 1e-3,
5151
momentum_dtype: torch.dtype = torch.bfloat16,
52+
**kwargs,
5253
):
5354
self.validate_learning_rate(lr)
5455
self.validate_betas(betas)

pytorch_optimizer/optimizer/adahessian.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -40,6 +40,7 @@ def __init__(
4040
hessian_distribution: HUTCHINSON_G = 'rademacher',
4141
adam_debias: bool = False,
4242
eps: float = 1e-16,
43+
**kwargs,
4344
):
4445
self.validate_learning_rate(lr)
4546
self.validate_betas(betas)

pytorch_optimizer/optimizer/adai.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -35,6 +35,7 @@ def __init__(
3535
dampening: float = 1.0,
3636
use_gc: bool = False,
3737
eps: float = 1e-3,
38+
**kwargs,
3839
):
3940
self.validate_learning_rate(lr)
4041
self.validate_betas(betas)

0 commit comments

Comments
 (0)