Skip to content

Commit 996b1f9

Browse files
neggertwilliamFalcon
authored andcommitted
When running DDP without DistributedSampler, throw warning instead of exception (#91)
1 parent c1434f0 commit 996b1f9

File tree

2 files changed

+7
-4
lines changed

2 files changed

+7
-4
lines changed

pytorch_lightning/models/trainer.py

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -444,8 +444,9 @@ def get_dataloaders(self, model):
444444

445445
if self.use_ddp and not isinstance(self.tng_dataloader.sampler, DistributedSampler):
446446
msg = """
447-
when using multiple gpus and multiple nodes you must pass
448-
a DistributedSampler to DataLoader(sampler).
447+
You're using multiple gpus and multiple nodes without using a DistributedSampler
448+
to assign a subset of your data to each process. To silence this warning, pass a
449+
DistributedSampler to your DataLoader.
449450
450451
ie: this:
451452
dataset = myDataset()
@@ -455,8 +456,10 @@ def get_dataloaders(self, model):
455456
dataset = myDataset()
456457
dist_sampler = torch.utils.data.distributed.DistributedSampler(dataset)
457458
dataloader = Dataloader(dataset, sampler=dist_sampler)
459+
460+
If you want each process to load the full dataset, ignore this warning.
458461
"""
459-
raise MisconfigurationException(msg)
462+
warnings.warn(msg)
460463

461464
# -----------------------------
462465
# MODEL TRAINING

tests/test_models.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -665,7 +665,7 @@ def test_ddp_sampler_error():
665665
use_amp=True
666666
)
667667

668-
with pytest.raises(MisconfigurationException):
668+
with pytest.warns(UserWarning):
669669
trainer.get_dataloaders(model)
670670

671671
clear_save_dir()

0 commit comments

Comments
 (0)