We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent c8c9523 commit ebbe682Copy full SHA for ebbe682
tests/tests_pytorch/trainer/test_ddp_sigterm_handling.py
@@ -11,6 +11,11 @@
11
from lightning.pytorch.strategies.ddp import DDPStrategy
12
from lightning.pytorch.utilities.exceptions import SIGTERMException
13
14
+# Skip the test if DDP or multiple devices are not available
15
+pytestmark = pytest.mark.skipif(
16
+ not torch.distributed.is_available() or torch.cuda.device_count() < 2,
17
+ reason="Test requires torch.distributed and at least 2 CUDA devices"
18
+)
19
20
class DummyModel(LightningModule):
21
def training_step(self, batch, batch_idx):
0 commit comments