Skip to content

Commit 3b87924

Browse files
Update src/lightning/fabric/plugins/precision/fsdp.py
Co-authored-by: Justus Schock <[email protected]>
1 parent 59d93ef commit 3b87924

File tree

1 file changed

+2
-3
lines changed
  • src/lightning/fabric/plugins/precision

1 file changed

+2
-3
lines changed

src/lightning/fabric/plugins/precision/fsdp.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -87,9 +87,8 @@ def mixed_precision_config(self) -> "TorchMixedPrecision":
8787

8888
if self.precision in ("16-true", "bf16-true"):
8989
rank_zero_warn(
90-
f"FSDPPrecision `{self.precision}` enables mixed-precision execution. "
91-
"Model parameters remain in full precision `torch.float32`, while forward and backward passes "
92-
f"run with reduced precision `{self._desired_input_dtype}` for speed and memory efficiency."
90+
f"FSDP with `{self.precision}` enables computation in lower precision. "
91+
"FSDP will always retain a full-precision copy of the model parameters for sharding."
9392
)
9493

9594
if self.precision in ("16-true", "16-mixed"):

0 commit comments

Comments
 (0)