Skip to content

Commit 2cbe3c5

Browse files
[pre-commit.ci] auto fixes from pre-commit.com hooks
for more information, see https://pre-commit.ci
1 parent 87ff3fa commit 2cbe3c5

File tree

2 files changed

+2
-2
lines changed
  • src/lightning
    • fabric/plugins/precision
    • pytorch/plugins/precision

2 files changed

+2
-2
lines changed

src/lightning/fabric/plugins/precision/amp.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -53,7 +53,7 @@ def __init__(
5353
scaler = (
5454
torch.amp.GradScaler(device=device)
5555
if _TORCH_GREATER_EQUAL_2_4
56-
else getattr(torch, device.split(':')[0] if device.split(':')[0] != "cpu" else "cuda").amp.GradScaler()
56+
else getattr(torch, device.split(":")[0] if device.split(":")[0] != "cpu" else "cuda").amp.GradScaler()
5757
)
5858
if scaler is not None and self.precision == "bf16-mixed":
5959
raise ValueError(f"`precision='bf16-mixed'` does not use a scaler, found {scaler}.")

src/lightning/pytorch/plugins/precision/amp.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -53,7 +53,7 @@ def __init__(
5353
scaler = (
5454
torch.amp.GradScaler(device=device)
5555
if _TORCH_GREATER_EQUAL_2_4
56-
else getattr(torch, device.split(':')[0] if device.split(':')[0] != "cpu" else "cuda").amp.GradScaler()
56+
else getattr(torch, device.split(":")[0] if device.split(":")[0] != "cpu" else "cuda").amp.GradScaler()
5757
)
5858
if scaler is not None and self.precision == "bf16-mixed":
5959
raise MisconfigurationException(f"`precision='bf16-mixed'` does not use a scaler, found {scaler}.")

0 commit comments

Comments
 (0)