We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent feede35 commit 59d93efCopy full SHA for 59d93ef
src/lightning/pytorch/plugins/precision/fsdp.py
@@ -63,7 +63,7 @@ def __init__(self, precision: _PRECISION_INPUT, scaler: Optional["ShardedGradSca
63
if scaler is not None and self.precision != "16-mixed":
64
raise ValueError(f"`precision={precision!r}` does not use a scaler, found {scaler}.")
65
66
- self.scaler = ShardedGradScaler() if scaler is None and precision in ("16-mixed", "16-true") else None
+ self.scaler = ShardedGradScaler() if scaler is None and precision == "16-mixed" else None
67
self.precision = precision
68
69
precision_to_type = {
0 commit comments