Skip to content

Commit 18d760a

Browse files
authored
Fix LR schedulers when optimizers with frequencies
1 parent 93822de commit 18d760a

File tree

2 files changed

+2
-2
lines changed

2 files changed

+2
-2
lines changed

src/pytorch_lightning/loops/epoch/training_epoch_loop.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -390,7 +390,7 @@ def update_lr_schedulers(self, interval: str, update_plateau_schedulers: bool) -
390390
if interval == "step" and self._should_accumulate():
391391
return
392392
active_optimizers = _get_active_optimizers(
393-
self.trainer.optimizers, self.trainer.optimizer_frequencies, self.total_batch_idx
393+
self.trainer.optimizers, self.trainer.optimizer_frequencies, self.batch_idx
394394
)
395395
self._update_learning_rates(
396396
interval=interval,

tests/tests_pytorch/trainer/optimization/test_optimizers.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -286,7 +286,7 @@ def configure_optimizers(self):
286286
(dict(step_size=5), dict(T_max=2)),
287287
("epoch", "epoch"),
288288
(5, 10),
289-
(2, 3),
289+
(4, 1),
290290
3,
291291
),
292292
],

0 commit comments

Comments
 (0)