Skip to content

Commit f457f97

Browse files
authored
Raise exception for ddp_cpu not supported for TPUs (#8530)
1 parent c5167e9 commit f457f97

File tree

2 files changed

+12
-0
lines changed

2 files changed

+12
-0
lines changed

pytorch_lightning/trainer/connectors/accelerator_connector.py

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -741,6 +741,11 @@ def set_distributed_mode(self, distributed_backend: Optional[str] = None):
741741

742742
# special case with DDP on CPUs
743743
if self.distributed_backend == "ddp_cpu":
744+
if _TPU_AVAILABLE:
745+
raise MisconfigurationException(
746+
"`accelerator='ddp_cpu'` is not supported on TPU machines. "
747+
"Learn more: https://github.com/PyTorchLightning/pytorch-lightning/issues/7810"
748+
)
744749
self._distrib_type = DistributedType.DDP_SPAWN
745750
if self.num_gpus > 0:
746751
rank_zero_warn(

tests/accelerators/test_tpu_backend.py

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -253,3 +253,10 @@ def on_train_end(self, trainer, pl_module):
253253

254254
for param, param_copy in zip(model.parameters(), model_copy.parameters()):
255255
assert not torch.equal(param.cpu().data, param_copy.data)
256+
257+
258+
@RunIf(tpu=True)
259+
def test_ddp_cpu_not_supported_on_tpus():
260+
261+
with pytest.raises(MisconfigurationException, match="`accelerator='ddp_cpu'` is not supported on TPU machines"):
262+
Trainer(accelerator="ddp_cpu")

0 commit comments

Comments
 (0)