Skip to content

Commit c74bdab

Browse files
authored
Run DDP test_reapply_compile on gpu
1 parent d3645c4 commit c74bdab

File tree

1 file changed

+3
-3
lines changed

1 file changed

+3
-3
lines changed

tests/tests_pytorch/strategies/test_ddp_integration.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -451,12 +451,12 @@ def creates_processes_externally(self):
451451
trainer.fit(model)
452452

453453

454-
@RunIf(dynamo=True)
454+
@RunIf(min_cuda_gpus=2, standalone=True, dynamo=True)
455455
@mock.patch("lightning.fabric.wrappers.torch.compile", Mock(wraps=torch.compile))
456456
@mock.patch.dict(os.environ, {})
457457
def test_reapply_compile(tmp_path):
458458
"""Test that Trainer can rewrap a compiled module such that compilation happens over the DDP-wrapper."""
459-
trainer = Trainer(accelerator="cpu", devices=2, strategy="ddp", max_steps=2, logger=False)
459+
trainer = Trainer(accelerator="gpu", devices=2, strategy="ddp", max_steps=2, logger=False)
460460

461461
model = BoringModel()
462462
compile_kwargs = {"mode": "reduce-overhead"}
@@ -475,5 +475,5 @@ def test_reapply_compile(tmp_path):
475475

476476
# Smoke-testing forward to ensure we don't get compilation errors
477477
for _ in range(3):
478-
trainer_model(torch.randn(2, 32, device="cpu")).sum().backward()
478+
trainer_model(torch.randn(2, 32, device="gpu")).sum().backward()
479479
assert True

0 commit comments

Comments
 (0)