@@ -204,7 +204,6 @@ Here is an example training a simple GAN with multiple optimizers using manual o
204204 d_opt = torch.optim.Adam(self.D.parameters(), lr=1e-5)
205205 return g_opt, d_opt
206206
207-
208207Learning Rate Scheduling
209208========================
210209
@@ -230,6 +229,10 @@ Here is an example calling ``lr_scheduler.step()`` every step.
230229 super().__init__()
231230 self.automatic_optimization = False
232231
232+ def configure_optimizers(self):
233+ optimizer = torch.optim.Adam(self.parameters(), lr=1e-3)
234+ scheduler = torch.optim.lr_scheduler.StepLR(optimizer, step_size=10, gamma=0.1)
235+ return [optimizer], [scheduler]
233236
234237 def training_step(self, batch, batch_idx):
235238 # do forward, backward, and optimization
@@ -252,6 +255,11 @@ If you want to call ``lr_scheduler.step()`` every ``N`` steps/epochs, do the fol
252255 super().__init__()
253256 self.automatic_optimization = False
254257
258+ def configure_optimizers(self):
259+ optimizer = torch.optim.Adam(self.parameters(), lr=1e-3)
260+ scheduler = torch.optim.lr_scheduler.StepLR(optimizer, step_size=10, gamma=0.1)
261+ return [optimizer], [scheduler]
262+
255263
256264 def training_step(self, batch, batch_idx):
257265 # do forward, backward, and optimization
@@ -275,13 +283,22 @@ If you want to call schedulers that require a metric value after each epoch, con
275283 super().__init__()
276284 self.automatic_optimization = False
277285
286+ def configure_optimizers(self):
287+ optimizer = torch.optim.Adam(self.parameters(), lr=1e-3)
288+ scheduler = torch.optim.lr_scheduler.ReduceLROnPlateau(optimizer, mode='min', factor=0.1, patience=10)
289+ return [optimizer], [scheduler]
278290
279291 def on_train_epoch_end(self):
280292 sch = self.lr_schedulers()
281293
282- # If the selected scheduler is a ReduceLROnPlateau scheduler.
283- if isinstance(sch, torch.optim.lr_scheduler.ReduceLROnPlateau):
284- sch.step(self.trainer.callback_metrics["loss"])
294+ sch.step(self.trainer.callback_metrics["loss"])
295+
296+ .. note ::
297+ :meth: `~lightning.pytorch.core.LightningModule.configure_optimizers ` supports 6 different ways to define and return
298+ optimizers and learning rate schedulers. Regardless of the way you define them, `self.optimizers() ` will always return
299+ either a single optimizer if you defined a single optimizer, or a list of optimizers if you defined multiple
300+ optimizers. The same applies to the `self.lr_schedulers() ` method, which will return a single scheduler
301+ if you defined a single scheduler, or a list of schedulers if you defined multiple schedulers
285302
286303
287304Optimizer Steps at Different Frequencies
0 commit comments