Skip to content

Commit fe825e5

Browse files
[pre-commit.ci] auto fixes from pre-commit.com hooks
for more information, see https://pre-commit.ci
1 parent 880870a commit fe825e5

File tree

4 files changed

+5
-5
lines changed

4 files changed

+5
-5
lines changed

src/lightning/fabric/connector.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -464,9 +464,9 @@ def _check_and_init_precision(self) -> Precision:
464464
return DeepSpeedPrecision(self._precision_input) # type: ignore
465465
if isinstance(self.strategy, FSDPStrategy):
466466
return FSDPPrecision(
467-
precision=self._precision_input, # type: ignore[arg-type]
467+
precision=self._precision_input, # type: ignore[arg-type]
468468
device=self._accelerator_flag.get_device() if isinstance(self._accelerator_flag, Accelerator) else None,
469-
)
469+
)
470470
mp_precision_supported = ("32-true", "bf16-mixed", "bf16-true", "16-true")
471471
if isinstance(self.strategy, ModelParallelStrategy) and self._precision_input not in mp_precision_supported:
472472
raise ValueError(

src/lightning/fabric/plugins/precision/amp.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -53,7 +53,7 @@ def __init__(
5353
scaler = (
5454
torch.amp.GradScaler(device=device)
5555
if _TORCH_GREATER_EQUAL_2_4
56-
else getattr(torch, device.split(':')[0] if device.split(':')[0] != "cpu" else "cuda").amp.GradScaler()
56+
else getattr(torch, device.split(":")[0] if device.split(":")[0] != "cpu" else "cuda").amp.GradScaler()
5757
)
5858
if scaler is not None and self.precision == "bf16-mixed":
5959
raise ValueError(f"`precision='bf16-mixed'` does not use a scaler, found {scaler}.")

src/lightning/pytorch/plugins/precision/amp.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -53,7 +53,7 @@ def __init__(
5353
scaler = (
5454
torch.amp.GradScaler(device=device)
5555
if _TORCH_GREATER_EQUAL_2_4
56-
else getattr(torch, device.split(':')[0] if device.split(':')[0] != "cpu" else "cuda").amp.GradScaler()
56+
else getattr(torch, device.split(":")[0] if device.split(":")[0] != "cpu" else "cuda").amp.GradScaler()
5757
)
5858
if scaler is not None and self.precision == "bf16-mixed":
5959
raise MisconfigurationException(f"`precision='bf16-mixed'` does not use a scaler, found {scaler}.")

src/lightning/pytorch/trainer/connectors/accelerator_connector.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -508,7 +508,7 @@ def _check_and_init_precision(self) -> Precision:
508508
return DeepSpeedPrecision(self._precision_flag) # type: ignore[arg-type]
509509
if isinstance(self.strategy, FSDPStrategy):
510510
return FSDPPrecision(
511-
precision=self._precision_flag, # type: ignore[arg-type]
511+
precision=self._precision_flag, # type: ignore[arg-type]
512512
device=self._accelerator_flag.get_device() if isinstance(self._accelerator_flag, Accelerator) else None,
513513
)
514514
if self._precision_flag in ("16-true", "bf16-true"):

0 commit comments

Comments
 (0)