We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 76f0c54 commit 92b13a8Copy full SHA for 92b13a8
src/lightning/pytorch/trainer/connectors/accelerator_connector.py
@@ -520,7 +520,7 @@ def _check_and_init_precision(self) -> Precision:
520
rank_zero_info(
521
f"Using {'16bit' if self._precision_flag == '16-mixed' else 'bfloat16'} Automatic Mixed Precision (AMP)"
522
)
523
- device = "cpu" if self._accelerator_flag == "cpu" else "cuda"
+ device = "cpu" if self._accelerator_flag == "cpu" else "mps" if self._accelerator_flag == "mps" else "cuda"
524
return MixedPrecision(self._precision_flag, device) # type: ignore[arg-type]
525
526
raise RuntimeError("No precision set")
0 commit comments