@@ -295,28 +295,28 @@ def on_after_backward(self) -> None:
295
295
296
296
"""
297
297
298
- # def on_before_optimizer_setup(self) -> None:
299
- # """Called after :meth:`~lightning.pytorch.core.hooks.ModelHooks.configure_model` but before
300
- # :meth:`~lightning.pytorch.core.hooks.ModelHooks.configure_optimizers`.
298
+ def on_before_optimizer_setup (self ) -> None :
299
+ """Called after :meth:`~lightning.pytorch.core.hooks.ModelHooks.configure_model` but before
300
+ :meth:`~lightning.pytorch.core.hooks.ModelHooks.configure_optimizers`.
301
301
302
- # This hook provides a safe point to modify, freeze, or inspect model parameters before optimizers are created.
303
- # It’s particularly useful for callbacks such as
304
- # :class:`~lightning.pytorch.callbacks.finetuning.BaseFinetuning`, where parameters must be frozen
305
- # prior to optimizer setup.
302
+ This hook provides a safe point to modify, freeze, or inspect model parameters before optimizers are created.
303
+ It’s particularly useful for callbacks such as
304
+ :class:`~lightning.pytorch.callbacks.finetuning.BaseFinetuning`, where parameters must be frozen
305
+ prior to optimizer setup.
306
306
307
- # This hook runs once in fit stage, after the model
308
- # has been fully instantiated by ``configure_model``, but before optimizers are created by
309
- # ``configure_optimizers``.
307
+ This hook runs once in fit stage, after the model
308
+ has been fully instantiated by ``configure_model``, but before optimizers are created by
309
+ ``configure_optimizers``.
310
310
311
- # Example::
311
+ Example::
312
312
313
- # class MyFinetuneCallback(Callback):
314
- # def on_before_optimizer_setup(self, trainer, pl_module):
315
- # # freeze the backbone before optimizers are created
316
- # for param in pl_module.backbone.parameters():
317
- # param.requires_grad = False
313
+ class MyFinetuneCallback(Callback):
314
+ def on_before_optimizer_setup(self, trainer, pl_module):
315
+ # freeze the backbone before optimizers are created
316
+ for param in pl_module.backbone.parameters():
317
+ param.requires_grad = False
318
318
319
- # """
319
+ """
320
320
321
321
def on_before_optimizer_step (self , optimizer : Optimizer ) -> None :
322
322
"""Called before ``optimizer.step()``.
0 commit comments