Skip to content

Commit f1a0388

Browse files
committed
Minor refactoring items
1 parent 61b8427 commit f1a0388

File tree

6 files changed

+9
-14
lines changed

6 files changed

+9
-14
lines changed

src/stem_continuation_dataset_generator/cluster.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,6 @@
66
from stem_continuation_dataset_generator.constants import DASK_CLUSTER_NAME
77

88
NUM_WORKERS = [4, 50]
9-
BUCKET = 's3://stem-continuation-dataset'
109

1110

1211
def get_client(

src/stem_continuation_dataset_generator/constants.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,8 @@
11
import os
22

33
DATASET_TAGS = ['medium']
4-
CLEARML_DATASET_TRAINING_NAME = 'stem-continuation-dataset'
5-
CLEARML_DATASET_TRAINING_VERSION = '1.0.0'
4+
CLEARML_DATASET_NAME = 'stem_continuation_dataset'
5+
CLEARML_DATASET_VERSION = '1.0.0'
66
DEFAULT_STEM_NAME = 'drum'
77
STORAGE_BUCKET_NAME = 'stem-continuation-dataset'
88
DASK_CLUSTER_NAME = 'stem-continuation-dataset-generator-cluster'

src/stem_continuation_dataset_generator/dataset.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11

22
from clearml import Dataset
3-
from stem_continuation_dataset_generator.constants import CLEARML_DATASET_TRAINING_NAME, CLEARML_DATASET_TRAINING_VERSION
3+
from stem_continuation_dataset_generator.constants import CLEARML_DATASET_NAME, CLEARML_DATASET_VERSION
44
from stem_continuation_dataset_generator.utils.constants import get_clearml_project_name
55

66

@@ -16,8 +16,8 @@ def get_remote_dataset_by_id(id: str):
1616
def get_remote_dataset_by_tag(tag: str):
1717
dataset = Dataset.get(
1818
dataset_project=get_clearml_project_name(),
19-
dataset_name=CLEARML_DATASET_TRAINING_NAME,
20-
dateset_version=CLEARML_DATASET_TRAINING_VERSION,
19+
dataset_name=CLEARML_DATASET_NAME,
20+
dateset_version=CLEARML_DATASET_VERSION,
2121
dataset_tags=[tag],
2222
only_completed=False, # True
2323
only_published=False,

src/stem_continuation_dataset_generator/steps/encode.py

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -55,10 +55,6 @@ def encode_all(source_directory: str, output_directory: str):
5555
))
5656

5757
print('Encoding audio tracks')
58-
59-
# for i in range(len(params_list) - 1, 0, -1):
60-
# print(f'Processing {i} of {len(params_list)} {round(cast(float, i) / len(params_list) * 100)}')
61-
# encode(params_list[i])
6258

6359
futures = client.map(encode, params_list, retries=2, batch_size=8)
6460
progress(futures)

src/stem_continuation_dataset_generator/steps/upload.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@
66
import multiprocessing
77
import multiprocessing.pool
88

9-
from stem_continuation_dataset_generator.constants import CLEARML_DATASET_TRAINING_VERSION, DATASET_TAGS, get_split_files_path
9+
from stem_continuation_dataset_generator.constants import CLEARML_DATASET_VERSION, DATASET_TAGS, get_split_files_path
1010
from stem_continuation_dataset_generator.utils.utils import upload_dataset
1111

1212

@@ -61,7 +61,7 @@ def upload(split_files_path: str, tags: List[str]):
6161
list(tqdm(pool.imap(download_file, inputs), total=len(inputs)))
6262

6363
print(f'Uploading {set} dataset to ClearML')
64-
upload_dataset(path=local_directory, version=CLEARML_DATASET_TRAINING_VERSION, tags=tags + ['final'], dataset_set=set)
64+
upload_dataset(path=local_directory, version=CLEARML_DATASET_VERSION, tags=tags + ['final'], dataset_set=set)
6565

6666

6767
if __name__ == '__main__':

src/stem_continuation_dataset_generator/utils/utils.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
from clearml import Dataset
22
import numpy as np
33

4-
from stem_continuation_dataset_generator.constants import CLEARML_DATASET_TRAINING_NAME
4+
from stem_continuation_dataset_generator.constants import CLEARML_DATASET_NAME
55
from stem_continuation_dataset_generator.utils.constants import get_clearml_project_name
66

77

@@ -10,7 +10,7 @@ def upload_dataset(path: str, version: str, tags: list[str] = [], dataset_set=No
1010
tags = [f'{dataset_set}-set'] + tags if dataset_set is not None else tags
1111
dataset = Dataset.create(
1212
dataset_project=get_clearml_project_name(),
13-
dataset_name=CLEARML_DATASET_TRAINING_NAME,
13+
dataset_name=CLEARML_DATASET_NAME,
1414
dataset_version=version,
1515
dataset_tags=tags,
1616
)

0 commit comments

Comments
 (0)