We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 6f04f9c commit dee2225Copy full SHA for dee2225
src/lightning/pytorch/plugins/precision/fsdp.py
@@ -86,6 +86,7 @@ def clip_grad_by_norm(self, module: Optional[Module], optimizer: Optimizer, clip
86
# see https://pytorch.org/docs/stable/fsdp.html#torch.distributed.fsdp.FullyShardedDataParallel.clip_grad_norm_
87
if module is None:
88
return
89
+ assert isinstance(module.clip_grad_norm_, Module)
90
module.clip_grad_norm_(clip_val)
91
92
@property
0 commit comments