Skip to content

Commit 204c5cd

Browse files
Update remaining tests in test_accelerator_connector in preparation for #11040 (#12466)
Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
1 parent 7c7a4ba commit 204c5cd

File tree

1 file changed

+18
-18
lines changed

1 file changed

+18
-18
lines changed

tests/accelerators/test_accelerator_connector.py

Lines changed: 18 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -56,11 +56,11 @@ def test_accelerator_choice_cpu(tmpdir):
5656
assert isinstance(trainer.strategy, SingleDeviceStrategy)
5757

5858

59-
@pytest.mark.parametrize(("num_processes", "num_nodes"), ([(1, 1), (1, 2), (2, 1), (2, 2)]))
60-
def test_accelerator_choice_ddp_cpu(tmpdir, num_processes: int, num_nodes: int):
61-
trainer = Trainer(fast_dev_run=True, accelerator="ddp_cpu", num_processes=num_processes, num_nodes=num_nodes)
59+
@pytest.mark.parametrize(("devices", "num_nodes"), ([(1, 1), (1, 2), (2, 1), (2, 2)]))
60+
def test_accelerator_choice_ddp_cpu(tmpdir, devices: int, num_nodes: int):
61+
trainer = Trainer(fast_dev_run=True, accelerator="ddp_cpu", devices=devices, num_nodes=num_nodes)
6262
assert isinstance(trainer.accelerator, CPUAccelerator)
63-
no_spawn = num_processes == 1 and num_nodes > 1
63+
no_spawn = devices == 1 and num_nodes > 1
6464
assert isinstance(trainer.strategy, DDPStrategy if no_spawn else DDPSpawnStrategy)
6565
assert isinstance(trainer.strategy.cluster_environment, LightningEnvironment)
6666

@@ -205,7 +205,7 @@ def test_accelerator_choice_ddp2_te(*_):
205205
@mock.patch("torch.cuda.device_count", return_value=0)
206206
@mock.patch("pytorch_lightning.strategies.DDPStrategy.setup_distributed", autospec=True)
207207
def test_accelerator_choice_ddp_cpu_te(*_):
208-
trainer = Trainer(fast_dev_run=True, accelerator="ddp_cpu", num_processes=2)
208+
trainer = Trainer(fast_dev_run=True, accelerator="ddp_cpu", devices=2)
209209
assert isinstance(trainer.accelerator, CPUAccelerator)
210210
assert isinstance(trainer.strategy, DDPStrategy)
211211
assert isinstance(trainer.strategy.cluster_environment, TorchElasticEnvironment)
@@ -251,7 +251,7 @@ def test_accelerator_choice_ddp_kubeflow(*_):
251251
@mock.patch("torch.cuda.device_count", return_value=0)
252252
@mock.patch("pytorch_lightning.strategies.DDPStrategy.setup_distributed", autospec=True)
253253
def test_accelerator_choice_ddp_cpu_kubeflow(*_):
254-
trainer = Trainer(fast_dev_run=True, accelerator="ddp_cpu", num_processes=1)
254+
trainer = Trainer(fast_dev_run=True, accelerator="ddp_cpu", devices=1)
255255
assert isinstance(trainer.accelerator, CPUAccelerator)
256256
assert isinstance(trainer.strategy, DDPStrategy)
257257
assert isinstance(trainer.strategy.cluster_environment, KubeflowEnvironment)
@@ -273,7 +273,7 @@ def test_accelerator_choice_ddp_cpu_kubeflow(*_):
273273
@mock.patch("torch.cuda.device_count", return_value=0)
274274
@mock.patch("pytorch_lightning.strategies.DDPStrategy.setup_distributed", autospec=True)
275275
def test_accelerator_choice_ddp_cpu_slurm(*_):
276-
trainer = Trainer(fast_dev_run=True, accelerator="ddp_cpu", num_processes=2)
276+
trainer = Trainer(fast_dev_run=True, accelerator="ddp_cpu", devices=2)
277277
assert trainer._accelerator_connector._is_slurm_managing_tasks()
278278
assert isinstance(trainer.accelerator, CPUAccelerator)
279279
assert isinstance(trainer.strategy, DDPStrategy)
@@ -299,7 +299,7 @@ def _test_accelerator_choice_ddp_cpu_and_strategy(tmpdir, ddp_strategy_class):
299299
strategy=ddp_strategy_class(find_unused_parameters=True),
300300
fast_dev_run=True,
301301
accelerator="ddp_cpu",
302-
num_processes=2,
302+
devices=2,
303303
)
304304
assert isinstance(trainer.strategy, ddp_strategy_class)
305305
assert isinstance(trainer.accelerator, CPUAccelerator)
@@ -332,7 +332,7 @@ def creates_processes_externally(self) -> bool:
332332
return True
333333

334334
trainer = Trainer(
335-
default_root_dir=tmpdir, plugins=[CustomCluster()], fast_dev_run=True, accelerator="ddp_cpu", num_processes=2
335+
default_root_dir=tmpdir, plugins=[CustomCluster()], fast_dev_run=True, accelerator="ddp_cpu", devices=2
336336
)
337337
assert isinstance(trainer.accelerator, CPUAccelerator)
338338
assert isinstance(trainer.strategy, DDPStrategy)
@@ -381,7 +381,7 @@ class Strat(SingleDeviceStrategy):
381381
pass
382382

383383
strategy = Strat(device=torch.device("cpu"), accelerator=Accel(), precision_plugin=Prec())
384-
trainer = Trainer(strategy=strategy, fast_dev_run=True, num_processes=2)
384+
trainer = Trainer(strategy=strategy, fast_dev_run=True, devices=2)
385385
assert isinstance(trainer.accelerator, Accel)
386386
assert isinstance(trainer.strategy, Strat)
387387
assert isinstance(trainer.precision_plugin, Prec)
@@ -391,7 +391,7 @@ class Strat(DDPStrategy):
391391
pass
392392

393393
strategy = Strat(accelerator=Accel(), precision_plugin=Prec())
394-
trainer = Trainer(strategy=strategy, fast_dev_run=True, num_processes=2)
394+
trainer = Trainer(strategy=strategy, fast_dev_run=True, devices=2)
395395
assert isinstance(trainer.accelerator, Accel)
396396
assert isinstance(trainer.strategy, Strat)
397397
assert isinstance(trainer.precision_plugin, Prec)
@@ -422,16 +422,16 @@ def test_dist_backend_accelerator_mapping(*_):
422422
def test_ipython_incompatible_backend_error(_, monkeypatch):
423423
monkeypatch.setattr(pytorch_lightning.utilities, "_IS_INTERACTIVE", True)
424424
with pytest.raises(MisconfigurationException, match=r"strategy='ddp'\)`.*is not compatible"):
425-
Trainer(strategy="ddp", gpus=2)
425+
Trainer(strategy="ddp", accelerator="gpu", devices=2)
426426

427427
with pytest.raises(MisconfigurationException, match=r"strategy='ddp2'\)`.*is not compatible"):
428-
Trainer(strategy="ddp2", gpus=2)
428+
Trainer(strategy="ddp2", accelerator="gpu", devices=2)
429429

430430
with pytest.raises(MisconfigurationException, match=r"strategy='ddp_spawn'\)`.*is not compatible"):
431-
Trainer(strategy="ddp_spawn")
431+
Trainer(strategy="ddp_spawn", accelerator="gpu", devices=2)
432432

433433
with pytest.raises(MisconfigurationException, match=r"strategy='ddp_sharded_spawn'\)`.*is not compatible"):
434-
Trainer(strategy="ddp_sharded_spawn")
434+
Trainer(strategy="ddp_sharded_spawn", accelerator="gpu", devices=2)
435435

436436
with pytest.raises(MisconfigurationException, match=r"strategy='ddp'\)`.*is not compatible"):
437437
# Edge case: AcceleratorConnector maps dp to ddp if accelerator != gpu
@@ -517,13 +517,13 @@ def test_passing_zero_and_empty_list_to_devices_flag(_, devices):
517517

518518
@RunIf(min_gpus=1)
519519
def test_accelerator_gpu():
520-
trainer = Trainer(accelerator="gpu", gpus=1)
520+
trainer = Trainer(accelerator="gpu", devices=1)
521521
assert isinstance(trainer.accelerator, GPUAccelerator)
522522

523523
trainer = Trainer(accelerator="gpu")
524524
assert isinstance(trainer.accelerator, GPUAccelerator)
525525

526-
trainer = Trainer(accelerator="auto", gpus=1)
526+
trainer = Trainer(accelerator="auto", devices=1)
527527
assert isinstance(trainer.accelerator, GPUAccelerator)
528528

529529

@@ -750,7 +750,7 @@ def test_strategy_choice_ddp_slurm(setup_distributed_mock, strategy):
750750
def test_strategy_choice_ddp2_slurm(
751751
set_device_mock, device_count_mock, setup_distributed_mock, is_available_mock, strategy
752752
):
753-
trainer = Trainer(fast_dev_run=True, strategy=strategy, gpus=2)
753+
trainer = Trainer(fast_dev_run=True, strategy=strategy, accelerator="gpu", devices=2)
754754
assert trainer._accelerator_connector._is_slurm_managing_tasks()
755755
assert isinstance(trainer.accelerator, GPUAccelerator)
756756
assert isinstance(trainer.strategy, DDP2Strategy)

0 commit comments

Comments
 (0)