Skip to content

Commit 896a16a

Browse files
committed
update: min_lr
1 parent fbca29b commit 896a16a

File tree

1 file changed

+2
-5
lines changed

1 file changed

+2
-5
lines changed

pytorch_optimizer/optimizer/ranger21.py

Lines changed: 2 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
from torch.nn import functional as f
66
from torch.optim import Optimizer
77

8-
from pytorch_optimizer.base.exception import NegativeLRError, NoSparseGradientError, ZeroParameterSizeError
8+
from pytorch_optimizer.base.exception import NoSparseGradientError, ZeroParameterSizeError
99
from pytorch_optimizer.base.optimizer import BaseOptimizer
1010
from pytorch_optimizer.base.types import BETAS, CLOSURE, DEFAULTS, LOSS, PARAMETERS
1111
from pytorch_optimizer.optimizer.agc import agc
@@ -73,6 +73,7 @@ def __init__( # pylint: disable=R0913
7373
eps: float = 1e-8,
7474
):
7575
self.lr = lr
76+
self.min_lr = warm_down_min_lr
7677
self.beta0 = beta0
7778
self.betas = betas
7879
self.use_softplus = use_softplus
@@ -96,7 +97,6 @@ def __init__( # pylint: disable=R0913
9697
# learning rate
9798
self.starting_lr = lr
9899
self.current_lr = lr
99-
self.min_lr = warm_down_min_lr
100100

101101
defaults: DEFAULTS = {
102102
'lr': lr,
@@ -184,9 +184,6 @@ def warm_down(self, lr: float, iteration: int) -> float:
184184
new_lr: float = self.starting_lr - self.warm_down_lr_delta * warm_down_pct
185185
new_lr = max(new_lr, self.min_lr)
186186

187-
if new_lr < 0.0:
188-
raise NegativeLRError(new_lr)
189-
190187
self.current_lr = new_lr
191188

192189
return new_lr

0 commit comments

Comments
 (0)