Skip to content

Commit 9b45b99

Browse files
authored
Update fsdp.py
1 parent 1025875 commit 9b45b99

File tree

1 file changed

+2
-2
lines changed
  • src/lightning/fabric/strategies

1 file changed

+2
-2
lines changed

src/lightning/fabric/strategies/fsdp.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -269,7 +269,7 @@ def setup_environment(self) -> None:
269269
@override
270270
def setup_module_and_optimizers(
271271
self, module: Module, optimizers: List[Optimizer], scheduler: Optional[_LRScheduler] = None
272-
) -> Tuple[Module, List[Optimizer]]:
272+
) -> Tuple[Module, List[Optimizer], Optional[_LRScheduler]]:
273273
"""Wraps the model into a :class:`~torch.distributed.fsdp.fully_sharded_data_parallel.FullyShardedDataParallel`
274274
module and sets `use_orig_params=True` to keep the reference to the original parameters in the optimizer."""
275275
use_orig_params = self._fsdp_kwargs.get("use_orig_params")
@@ -281,7 +281,7 @@ def setup_module_and_optimizers(
281281
" call `setup_optimizer`."
282282
)
283283
module = self.setup_module(module)
284-
return module, optimizers
284+
return module, optimizers, scheduler
285285

286286
@override
287287
def setup_module(self, module: Module) -> Module:

0 commit comments

Comments
 (0)