@@ -87,7 +87,7 @@ def create_dataset_files(
87
87
) -> None :
88
88
"""Creates the dataset files."""
89
89
# Creates the root directory
90
- dataset_dir = dataset_dir .expanduser ().resolve () / dataset_name
90
+ dataset_dir = dataset_dir .expanduser ().resolve () / dataset_name . lower ()
91
91
dataset_dir .mkdir (parents = True )
92
92
in_tfds = 'tensorflow_datasets' in dataset_dir .parts
93
93
@@ -118,28 +118,34 @@ def create_dataset_files(
118
118
)
119
119
120
120
121
+ def _get_filename (info : utils .DatasetInfo ) -> str :
122
+ """Returns the dataset builder filename without Py extension."""
123
+ return f'{ info .name .lower ()} _dataset_builder'
124
+
125
+
121
126
def _create_dataset_file (info : utils .DatasetInfo ) -> None :
122
127
"""Create a new dataset from a template."""
123
- file_path = info .path / f' { info . name } _dataset_builder. py'
128
+ file_path = info .path / ( _get_filename ( info ) + '. py')
124
129
125
130
content = builder_templates .create_builder_template (info )
126
131
file_path .write_text (content )
127
132
128
133
129
134
def _create_dataset_test (info : utils .DatasetInfo ) -> None :
130
135
"""Adds the `dummy_data/` directory."""
131
- file_path = info .path .joinpath (f'{ info .name } _dataset_builder_test.py' )
136
+ filename = _get_filename (info )
137
+ file_path = info .path / (filename + '_test.py' )
132
138
133
139
content = textwrap .dedent (f'''\
134
140
"""{ info .name } dataset."""
135
141
136
- from { info .ds_import } import { info . name } _dataset_builder
142
+ from { info .ds_import } import { filename }
137
143
import { info .tfds_api } as tfds
138
144
139
145
class { info .cls_name } Test(tfds.testing.DatasetBuilderTestCase):
140
146
"""Tests for { info .name } dataset."""
141
147
# { info .todo } :
142
- DATASET_CLASS = { info . name } _dataset_builder .Builder
148
+ DATASET_CLASS = { filename } .Builder
143
149
SPLITS = {{
144
150
'train': 3, # Number of fake train example
145
151
'test': 1, # Number of fake test example
0 commit comments