@@ -639,7 +639,8 @@ def get_dataloaders(self, model):
639639 # call warnings from proc zero only which triggers dataloaders
640640 # if those have to download data it will only happen on proc 0
641641 if self .proc_rank == 0 :
642- if self .use_ddp or self .use_ddp2 and not isinstance (self .get_train_dataloader ().sampler , DistributedSampler ):
642+ on_ddp = self .use_ddp or self .use_ddp2
643+ if on_ddp and not isinstance (self .get_train_dataloader ().sampler , DistributedSampler ):
643644 msg = """
644645 You're using multiple gpus and multiple nodes without using a DistributedSampler
645646 to assign a subset of your data to each process. To silence this warning, pass a
@@ -658,14 +659,14 @@ def get_dataloaders(self, model):
658659 """
659660 warnings .warn (msg )
660661
661- if self . use_ddp or self . use_ddp2 and self .get_val_dataloaders is not None :
662+ if on_ddp and self .get_val_dataloaders is not None :
662663 for dataloader in self .get_val_dataloaders ():
663664 if not isinstance (dataloader .sampler , DistributedSampler ):
664665 msg = """
665666 Your val_dataloader(s) don't use DistributedSampler.
666- You're using multiple gpus and multiple nodes without using a DistributedSampler
667- to assign a subset of your data to each process. To silence this warning, pass a
668- DistributedSampler to your DataLoader.
667+ You're using multiple gpus and multiple nodes without using a
668+ DistributedSampler to assign a subset of your data to each process.
669+ To silence this warning, pass a DistributedSampler to your DataLoader.
669670
670671 ie: this:
671672 dataset = myDataset()
@@ -681,14 +682,14 @@ def get_dataloaders(self, model):
681682 warnings .warn (msg )
682683 break
683684
684- if self . use_ddp or self . use_ddp2 and self .get_test_dataloaders is not None :
685+ if on_ddp and self .get_test_dataloaders is not None :
685686 for dataloader in self .get_test_dataloaders ():
686687 if not isinstance (dataloader .sampler , DistributedSampler ):
687688 msg = """
688689 Your test_dataloader(s) don't use DistributedSampler.
689- You're using multiple gpus and multiple nodes without using a DistributedSampler
690- to assign a subset of your data to each process. To silence this warning, pass a
691- DistributedSampler to your DataLoader.
690+ You're using multiple gpus and multiple nodes without using a
691+ DistributedSampler to assign a subset of your data to each process.
692+ To silence this warning, pass a DistributedSampler to your DataLoader.
692693
693694 ie: this:
694695 dataset = myDataset()
0 commit comments