@@ -204,7 +204,6 @@ Here is an example training a simple GAN with multiple optimizers using manual o
204
204
d_opt = torch.optim.Adam(self.D.parameters(), lr=1e-5)
205
205
return g_opt, d_opt
206
206
207
-
208
207
Learning Rate Scheduling
209
208
========================
210
209
@@ -230,6 +229,10 @@ Here is an example calling ``lr_scheduler.step()`` every step.
230
229
super().__init__()
231
230
self.automatic_optimization = False
232
231
232
+ def configure_optimizers(self):
233
+ optimizer = torch.optim.Adam(self.parameters(), lr=1e-3)
234
+ scheduler = torch.optim.lr_scheduler.StepLR(optimizer, step_size=10, gamma=0.1)
235
+ return [optimizer], [scheduler]
233
236
234
237
def training_step(self, batch, batch_idx):
235
238
# do forward, backward, and optimization
@@ -252,6 +255,11 @@ If you want to call ``lr_scheduler.step()`` every ``N`` steps/epochs, do the fol
252
255
super().__init__()
253
256
self.automatic_optimization = False
254
257
258
+ def configure_optimizers(self):
259
+ optimizer = torch.optim.Adam(self.parameters(), lr=1e-3)
260
+ scheduler = torch.optim.lr_scheduler.StepLR(optimizer, step_size=10, gamma=0.1)
261
+ return [optimizer], [scheduler]
262
+
255
263
256
264
def training_step(self, batch, batch_idx):
257
265
# do forward, backward, and optimization
@@ -275,13 +283,22 @@ If you want to call schedulers that require a metric value after each epoch, con
275
283
super().__init__()
276
284
self.automatic_optimization = False
277
285
286
+ def configure_optimizers(self):
287
+ optimizer = torch.optim.Adam(self.parameters(), lr=1e-3)
288
+ scheduler = torch.optim.lr_scheduler.ReduceLROnPlateau(optimizer, mode='min', factor=0.1, patience=10)
289
+ return [optimizer], [scheduler]
278
290
279
291
def on_train_epoch_end(self):
280
292
sch = self.lr_schedulers()
281
293
282
- # If the selected scheduler is a ReduceLROnPlateau scheduler.
283
- if isinstance(sch, torch.optim.lr_scheduler.ReduceLROnPlateau):
284
- sch.step(self.trainer.callback_metrics["loss"])
294
+ sch.step(self.trainer.callback_metrics["loss"])
295
+
296
+ .. note ::
297
+ :meth: `~lightning.pytorch.core.LightningModule.configure_optimizers ` supports 6 different ways to define and return
298
+ optimizers and learning rate schedulers. Regardless of the way you define them, `self.optimizers() ` will always return
299
+ either a single optimizer if you defined a single optimizer, or a list of optimizers if you defined multiple
300
+ optimizers. The same applies to the `self.lr_schedulers() ` method, which will return a single scheduler
301
+ if you defined a single scheduler, or a list of schedulers if you defined multiple schedulers
285
302
286
303
287
304
Optimizer Steps at Different Frequencies
0 commit comments