Skip to content

Commit 2fc2fb7

Browse files
authored
Update fsdp.py
1 parent d25014d commit 2fc2fb7

File tree

1 file changed

+0
-3
lines changed
  • src/lightning/pytorch/plugins/precision

1 file changed

+0
-3
lines changed

src/lightning/pytorch/plugins/precision/fsdp.py

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -76,9 +76,6 @@ def __init__(self, precision: _PRECISION_INPUT, scaler: Optional["ShardedGradSca
7676
@override
7777
def clip_grad_by_norm(self, *_: Any, **__: Any) -> None:
7878
# see https://pytorch.org/docs/stable/fsdp.html#torch.distributed.fsdp.FullyShardedDataParallel.clip_grad_norm_
79-
# section `Gradient Clipping`, using `torch.nn.utils.clip_grad_norm_` is incorrect with FSDP.
80-
# To overcome this we need to call root_sharded_module.clip_grad_norm(clip_val), but we don't have a reference
81-
# to the root module
8279
raise MisconfigurationException(
8380
f"`gradient_clip_algorithm='norm'` is currently not supported for `{self.__class__.__name__}`"
8481
)

0 commit comments

Comments
 (0)