@@ -583,7 +583,7 @@ def load(
583
583
all splits in a `Dict[Split, tf.data.Dataset]`
584
584
data_dir: directory to read/write data. Defaults to the value of the
585
585
environment variable TFDS_DATA_DIR, if set, otherwise falls back to
586
- datasets are stored .
586
+ '~/tensorflow_datasets' .
587
587
batch_size: `int`, if set, add a batch dimension to examples. Note that
588
588
variable length features will be 0-padded. If `batch_size=-1`, will return
589
589
the full dataset as `tf.Tensor`s.
@@ -593,7 +593,7 @@ def load(
593
593
`tfds.core.DatasetBuilder.download_and_prepare` before calling
594
594
`tfds.core.DatasetBuilder.as_dataset`. If `False`, data is expected to be
595
595
in `data_dir`. If `True` and the data is already in `data_dir`,
596
- when data_dir is a Placer path .
596
+ `download_and_prepare` is a no-op .
597
597
as_supervised: `bool`, if `True`, the returned `tf.data.Dataset` will have a
598
598
2-tuple structure `(input, label)` according to
599
599
`builder.info.supervised_keys`. If `False`, the default, the returned
@@ -637,7 +637,7 @@ def load(
637
637
(version, features, splits, num_examples,...). Note that the `ds_info`
638
638
object documents the entire dataset, regardless of the `split` requested.
639
639
Split-specific information is available in `ds_info.splits`.
640
- """
640
+ """ # fmt: skip
641
641
dbuilder = _fetch_builder (
642
642
name ,
643
643
data_dir ,
@@ -752,12 +752,12 @@ def data_source(
752
752
all splits in a `Dict[Split, Sequence]`
753
753
data_dir: directory to read/write data. Defaults to the value of the
754
754
environment variable TFDS_DATA_DIR, if set, otherwise falls back to
755
- datasets are stored .
755
+ '~/tensorflow_datasets' .
756
756
download: `bool` (optional), whether to call
757
757
`tfds.core.DatasetBuilder.download_and_prepare` before calling
758
758
`tfds.core.DatasetBuilder.as_data_source`. If `False`, data is expected to
759
759
be in `data_dir`. If `True` and the data is already in `data_dir`,
760
- when data_dir is a Placer path .
760
+ `download_and_prepare` is a no-op .
761
761
decoders: Nested dict of `Decoder` objects which allow to customize the
762
762
decoding. The structure should match the feature structure, but only
763
763
customized feature keys need to be present. See [the
@@ -784,7 +784,7 @@ def data_source(
784
784
Returns:
785
785
`Sequence` if `split`,
786
786
`dict<key: tfds.Split, value: Sequence>` otherwise.
787
- """
787
+ """ # fmt:skip
788
788
builder_kwargs = _set_file_format_for_data_source (builder_kwargs )
789
789
dbuilder = _fetch_builder (
790
790
name ,
0 commit comments