We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent b7ca4d3 commit 92f0103Copy full SHA for 92f0103
src/lightning/pytorch/trainer/connectors/accelerator_connector.py
@@ -515,7 +515,7 @@ def _check_and_init_precision(self) -> Precision:
515
rank_zero_info(
516
f"Using {'16bit' if self._precision_flag == '16-mixed' else 'bfloat16'} Automatic Mixed Precision (AMP)"
517
)
518
- device = "cpu" if self._accelerator_flag == "cpu" else "cuda"
+ device = self._accelerator_flag if self._accelerator_flag in ("cpu", "mps") else "cuda"
519
return MixedPrecision(self._precision_flag, device) # type: ignore[arg-type]
520
521
raise RuntimeError("No precision set")
0 commit comments