Skip to content

Commit 6610d2f

Browse files
authored
Add optimizer parameter to PolyScheduler constructor. (#295)
1 parent 1524b89 commit 6610d2f

File tree

1 file changed

+2
-2
lines changed

1 file changed

+2
-2
lines changed

pytorch_optimizer/lr_scheduler/linear_warmup.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -28,13 +28,13 @@ class PolyScheduler(BaseLinearWarmupScheduler):
2828
:param poly_order: float. lr scheduler decreases with steps.
2929
"""
3030

31-
def __init__(self, poly_order: float = 0.5, **kwargs):
31+
def __init__(self, optimizer, poly_order: float = 0.5, **kwargs):
3232
self.poly_order = poly_order
3333

3434
if poly_order <= 0:
3535
raise ValueError(f'[-] poly_order must be positive. {poly_order}')
3636

37-
super().__init__(**kwargs)
37+
super().__init__(optimizer, **kwargs)
3838

3939
def _step(self) -> float:
4040
return self.min_lr + (self.max_lr - self.min_lr) * (self.step_t - self.warmup_steps) ** self.poly_order

0 commit comments

Comments
 (0)