Skip to content

Commit 7c829b6

Browse files
authored
Update amp.py
1 parent 188ca22 commit 7c829b6

File tree

1 file changed

+2
-2
lines changed
  • src/lightning/pytorch/plugins/precision

1 file changed

+2
-2
lines changed

src/lightning/pytorch/plugins/precision/amp.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -101,18 +101,18 @@ def optimizer_step( # type: ignore[override]
101101
@override
102102
def clip_gradients(
103103
self,
104-
module: Optional[Module],
105104
optimizer: Optimizer,
106105
clip_val: Union[int, float] = 0.0,
107106
gradient_clip_algorithm: GradClipAlgorithmType = GradClipAlgorithmType.NORM,
107+
module: Optional[Module] = None,
108108
) -> None:
109109
if clip_val > 0 and _optimizer_handles_unscaling(optimizer):
110110
raise RuntimeError(
111111
f"The current optimizer, {type(optimizer).__qualname__}, does not allow for gradient clipping"
112112
" because it performs unscaling of gradients internally. HINT: Are you using a 'fused' optimizer?"
113113
)
114114
super().clip_gradients(
115-
module=module, optimizer=optimizer, clip_val=clip_val, gradient_clip_algorithm=gradient_clip_algorithm
115+
optimizer=optimizer, clip_val=clip_val, gradient_clip_algorithm=gradient_clip_algorithm, module=module
116116
)
117117

118118
def autocast_context_manager(self) -> torch.autocast:

0 commit comments

Comments
 (0)