We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 62153b3 commit 97e2d4dCopy full SHA for 97e2d4d
src/lightning/fabric/strategies/fsdp.py
@@ -353,7 +353,7 @@ def _optimizer_has_flat_params(optimizer: Optimizer) -> bool:
353
_FSDP_FLATTENED = "_fsdp_flattened"
354
if _TORCH_GREATER_EQUAL_1_13:
355
return any(
356
- getattr(param, _FSDP_FLATTENED, False) for group in optimizer.param_group for param in group["params"]
+ getattr(param, _FSDP_FLATTENED, False) for group in optimizer.param_groups for param in group["params"]
357
)
358
359
from torch.distributed.fsdp import FlatParameter
0 commit comments