@@ -388,11 +388,9 @@ def teardown(self, stage: str) -> None:
388388 """
389389
390390 def train_dataloader (self ) -> TRAIN_DATALOADERS :
391- """Implement one or more PyTorch DataLoaders for training.
391+ """An iterable or collection of iterables specifying training samples .
392392
393- Return:
394- A collection of :class:`torch.utils.data.DataLoader` specifying training samples.
395- In the case of multiple dataloaders, please see this :ref:`section <multiple-dataloaders>`.
393+ For more information about multiple dataloaders, see this :ref:`section <multiple-dataloaders>`.
396394
397395 The dataloader you return will not be reloaded unless you set
398396 :paramref:`~lightning.pytorch.trainer.Trainer.reload_dataloaders_every_n_epochs` to
@@ -412,55 +410,15 @@ def train_dataloader(self) -> TRAIN_DATALOADERS:
412410 - :meth:`setup`
413411
414412 Note:
415- Lightning adds the correct sampler for distributed and arbitrary hardware.
413+ Lightning tries to add the correct sampler for distributed and arbitrary hardware.
416414 There is no need to set it yourself.
417-
418- Example::
419-
420- # single dataloader
421- def train_dataloader(self):
422- transform = transforms.Compose([transforms.ToTensor(),
423- transforms.Normalize((0.5,), (1.0,))])
424- dataset = MNIST(root='/path/to/mnist/', train=True, transform=transform,
425- download=True)
426- loader = torch.utils.data.DataLoader(
427- dataset=dataset,
428- batch_size=self.batch_size,
429- shuffle=True
430- )
431- return loader
432-
433- # multiple dataloaders, return as list
434- def train_dataloader(self):
435- mnist = MNIST(...)
436- cifar = CIFAR(...)
437- mnist_loader = torch.utils.data.DataLoader(
438- dataset=mnist, batch_size=self.batch_size, shuffle=True
439- )
440- cifar_loader = torch.utils.data.DataLoader(
441- dataset=cifar, batch_size=self.batch_size, shuffle=True
442- )
443- # each batch will be a list of tensors: [batch_mnist, batch_cifar]
444- return [mnist_loader, cifar_loader]
445-
446- # multiple dataloader, return as dict
447- def train_dataloader(self):
448- mnist = MNIST(...)
449- cifar = CIFAR(...)
450- mnist_loader = torch.utils.data.DataLoader(
451- dataset=mnist, batch_size=self.batch_size, shuffle=True
452- )
453- cifar_loader = torch.utils.data.DataLoader(
454- dataset=cifar, batch_size=self.batch_size, shuffle=True
455- )
456- # each batch will be a dict of tensors: {'mnist': batch_mnist, 'cifar': batch_cifar}
457- return {'mnist': mnist_loader, 'cifar': cifar_loader}
458415 """
459416 raise MisconfigurationException ("`train_dataloader` must be implemented to be used with the Lightning Trainer" )
460417
461418 def test_dataloader (self ) -> EVAL_DATALOADERS :
462- r"""
463- Implement one or multiple PyTorch DataLoaders for testing.
419+ r"""An iterable or collection of iterables specifying test samples.
420+
421+ For more information about multiple dataloaders, see this :ref:`section <multiple-dataloaders>`.
464422
465423 For data processing use the following pattern:
466424
@@ -477,44 +435,19 @@ def test_dataloader(self) -> EVAL_DATALOADERS:
477435 - :meth:`setup`
478436
479437 Note:
480- Lightning adds the correct sampler for distributed and arbitrary hardware.
438+ Lightning tries to add the correct sampler for distributed and arbitrary hardware.
481439 There is no need to set it yourself.
482440
483- Return:
484- A :class:`torch.utils.data.DataLoader` or a sequence of them specifying testing samples.
485-
486- Example::
487-
488- def test_dataloader(self):
489- transform = transforms.Compose([transforms.ToTensor(),
490- transforms.Normalize((0.5,), (1.0,))])
491- dataset = MNIST(root='/path/to/mnist/', train=False, transform=transform,
492- download=True)
493- loader = torch.utils.data.DataLoader(
494- dataset=dataset,
495- batch_size=self.batch_size,
496- shuffle=False
497- )
498-
499- return loader
500-
501- # can also return multiple dataloaders
502- def test_dataloader(self):
503- return [loader_a, loader_b, ..., loader_n]
504-
505441 Note:
506442 If you don't need a test dataset and a :meth:`test_step`, you don't need to implement
507443 this method.
508-
509- Note:
510- In the case where you return multiple test dataloaders, the :meth:`test_step`
511- will have an argument ``dataloader_idx`` which matches the order here.
512444 """
513445 raise MisconfigurationException ("`test_dataloader` must be implemented to be used with the Lightning Trainer" )
514446
515447 def val_dataloader (self ) -> EVAL_DATALOADERS :
516- r"""
517- Implement one or multiple PyTorch DataLoaders for validation.
448+ r"""An iterable or collection of iterables specifying validation samples.
449+
450+ For more information about multiple dataloaders, see this :ref:`section <multiple-dataloaders>`.
518451
519452 The dataloader you return will not be reloaded unless you set
520453 :paramref:`~lightning.pytorch.trainer.Trainer.reload_dataloaders_every_n_epochs` to
@@ -528,44 +461,19 @@ def val_dataloader(self) -> EVAL_DATALOADERS:
528461 - :meth:`setup`
529462
530463 Note:
531- Lightning adds the correct sampler for distributed and arbitrary hardware
464+ Lightning tries to add the correct sampler for distributed and arbitrary hardware
532465 There is no need to set it yourself.
533466
534- Return:
535- A :class:`torch.utils.data.DataLoader` or a sequence of them specifying validation samples.
536-
537- Examples::
538-
539- def val_dataloader(self):
540- transform = transforms.Compose([transforms.ToTensor(),
541- transforms.Normalize((0.5,), (1.0,))])
542- dataset = MNIST(root='/path/to/mnist/', train=False,
543- transform=transform, download=True)
544- loader = torch.utils.data.DataLoader(
545- dataset=dataset,
546- batch_size=self.batch_size,
547- shuffle=False
548- )
549-
550- return loader
551-
552- # can also return multiple dataloaders
553- def val_dataloader(self):
554- return [loader_a, loader_b, ..., loader_n]
555-
556467 Note:
557468 If you don't need a validation dataset and a :meth:`validation_step`, you don't need to
558469 implement this method.
559-
560- Note:
561- In the case where you return multiple validation dataloaders, the :meth:`validation_step`
562- will have an argument ``dataloader_idx`` which matches the order here.
563470 """
564471 raise MisconfigurationException ("`val_dataloader` must be implemented to be used with the Lightning Trainer" )
565472
566473 def predict_dataloader (self ) -> EVAL_DATALOADERS :
567- r"""
568- Implement one or multiple PyTorch DataLoaders for prediction.
474+ r"""An iterable or collection of iterables specifying prediction samples.
475+
476+ For more information about multiple dataloaders, see this :ref:`section <multiple-dataloaders>`.
569477
570478 It's recommended that all data downloads and preparation happen in :meth:`prepare_data`.
571479
@@ -574,15 +482,11 @@ def predict_dataloader(self) -> EVAL_DATALOADERS:
574482 - :meth:`setup`
575483
576484 Note:
577- Lightning adds the correct sampler for distributed and arbitrary hardware
485+ Lightning tries to add the correct sampler for distributed and arbitrary hardware
578486 There is no need to set it yourself.
579487
580488 Return:
581489 A :class:`torch.utils.data.DataLoader` or a sequence of them specifying prediction samples.
582-
583- Note:
584- In the case where you return multiple prediction dataloaders, the :meth:`predict_step`
585- will have an argument ``dataloader_idx`` which matches the order here.
586490 """
587491 raise MisconfigurationException (
588492 "`predict_dataloader` must be implemented to be used with the Lightning Trainer"
0 commit comments