Skip to content

Commit e56ea5c

Browse files
committed
update
1 parent a19b3cb commit e56ea5c

File tree

2 files changed

+18
-18
lines changed

2 files changed

+18
-18
lines changed

src/lightning/pytorch/core/hooks.py

Lines changed: 17 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -295,28 +295,28 @@ def on_after_backward(self) -> None:
295295
296296
"""
297297

298-
# def on_before_optimizer_setup(self) -> None:
299-
# """Called after :meth:`~lightning.pytorch.core.hooks.ModelHooks.configure_model` but before
300-
# :meth:`~lightning.pytorch.core.hooks.ModelHooks.configure_optimizers`.
298+
def on_before_optimizer_setup(self) -> None:
299+
"""Called after :meth:`~lightning.pytorch.core.hooks.ModelHooks.configure_model` but before
300+
:meth:`~lightning.pytorch.core.hooks.ModelHooks.configure_optimizers`.
301301
302-
# This hook provides a safe point to modify, freeze, or inspect model parameters before optimizers are created.
303-
# It’s particularly useful for callbacks such as
304-
# :class:`~lightning.pytorch.callbacks.finetuning.BaseFinetuning`, where parameters must be frozen
305-
# prior to optimizer setup.
302+
This hook provides a safe point to modify, freeze, or inspect model parameters before optimizers are created.
303+
It’s particularly useful for callbacks such as
304+
:class:`~lightning.pytorch.callbacks.finetuning.BaseFinetuning`, where parameters must be frozen
305+
prior to optimizer setup.
306306
307-
# This hook runs once in fit stage, after the model
308-
# has been fully instantiated by ``configure_model``, but before optimizers are created by
309-
# ``configure_optimizers``.
307+
This hook runs once in fit stage, after the model
308+
has been fully instantiated by ``configure_model``, but before optimizers are created by
309+
``configure_optimizers``.
310310
311-
# Example::
311+
Example::
312312
313-
# class MyFinetuneCallback(Callback):
314-
# def on_before_optimizer_setup(self, trainer, pl_module):
315-
# # freeze the backbone before optimizers are created
316-
# for param in pl_module.backbone.parameters():
317-
# param.requires_grad = False
313+
class MyFinetuneCallback(Callback):
314+
def on_before_optimizer_setup(self, trainer, pl_module):
315+
# freeze the backbone before optimizers are created
316+
for param in pl_module.backbone.parameters():
317+
param.requires_grad = False
318318
319-
# """
319+
"""
320320

321321
def on_before_optimizer_step(self, optimizer: Optimizer) -> None:
322322
"""Called before ``optimizer.step()``.

src/lightning/pytorch/trainer/trainer.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -992,7 +992,7 @@ def _run(
992992
# run hook `on_before_optimizer_setup` before optimizers are set up & after model is configured
993993
if self.state.fn == TrainerFn.FITTING:
994994
call._call_callback_hooks(self, "on_before_optimizer_setup")
995-
# call._call_lightning_module_hook(self, "on_before_optimizer_setup")
995+
call._call_lightning_module_hook(self, "on_before_optimizer_setup")
996996

997997
# check if we should delay restoring checkpoint till later
998998
if not self.strategy.restore_checkpoint_after_setup:

0 commit comments

Comments
 (0)