Skip to content

Commit ed2f550

Browse files
committed
calling appropriate methods for v1 and v2
1 parent cb7f40a commit ed2f550

File tree

1 file changed

+15
-38
lines changed

1 file changed

+15
-38
lines changed

pyclowder/datasets.py

Lines changed: 15 additions & 38 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,11 @@
1616
from pyclowder.utils import StatusMessage
1717

1818
clowder_version = int(os.getenv('CLOWDER_VERSION', '1'))
19+
# Import dataset API methods based on Clowder version
20+
if clowder_version == 2:
21+
import pyclowder.api.v2.datasets as datasets
22+
else:
23+
import pyclowder.api.v1.datasets as datasets
1924

2025
def create_empty(connector, host, key, datasetname, description, parentid=None, spaceid=None):
2126
"""Create a new dataset in Clowder.
@@ -30,10 +35,7 @@ def create_empty(connector, host, key, datasetname, description, parentid=None,
3035
spaceid -- id of the space to add dataset to
3136
"""
3237
client = ClowderClient(host=host, key=key)
33-
if clowder_version == 2:
34-
datasetid = v2datasets.create_empty(connector, client, datasetname, description, parentid, spaceid)
35-
else:
36-
datasetid = v1datasets.create_empty(connector, client, datasetname, description, parentid, spaceid)
38+
datasetid = datasets.create_empty(connector, client, datasetname, description, parentid, spaceid)
3739
return datasetid
3840

3941

@@ -47,10 +49,7 @@ def delete(connector, host, key, datasetid):
4749
datasetid -- the dataset to delete
4850
"""
4951
client = ClowderClient(host=host, key=key)
50-
if clowder_version == 2:
51-
result = v2datasets.delete(connector, client, datasetid)
52-
else:
53-
result = v1datasets.delete(connector, client, datasetid)
52+
result = datasets.delete(connector, client, datasetid)
5453
result.raise_for_status()
5554

5655
return json.loads(result.text)
@@ -91,10 +90,7 @@ def download(connector, host, key, datasetid):
9190
datasetid -- the file that is currently being processed
9291
"""
9392
client = ClowderClient(host=host, key=key)
94-
if clowder_version == 2:
95-
zipfile = v2datasets.download(connector, client, datasetid)
96-
else:
97-
zipfile = v1datasets.download(connector, client, datasetid)
93+
zipfile = datasets.download(connector, client, datasetid)
9894
return zipfile
9995

10096

@@ -109,12 +105,8 @@ def download_metadata(connector, host, key, datasetid, extractor=None):
109105
extractor -- extractor name to filter results (if only one extractor's metadata is desired)
110106
"""
111107
client = ClowderClient(host=host, key=key)
112-
if clowder_version == 2:
113-
result_json = v2datasets.download_metadata(connector, client, datasetid, extractor)
114-
return result_json
115-
else:
116-
result_json = v1datasets.download_metadata(connector, client, datasetid, extractor)
117-
return result_json
108+
result_json = datasets.download_metadata(connector, client, datasetid, extractor)
109+
return result_json
118110

119111

120112
def get_info(connector, host, key, datasetid):
@@ -127,10 +119,7 @@ def get_info(connector, host, key, datasetid):
127119
datasetid -- the dataset to get info of
128120
"""
129121
client = ClowderClient(host=host, key=key)
130-
if clowder_version == 2:
131-
info = v2datasets.get_info(connector, client, datasetid)
132-
else:
133-
info = v1datasets.get_info(connector, client, datasetid)
122+
info = datasets.get_info(connector, client, datasetid)
134123
return info
135124

136125

@@ -144,10 +133,7 @@ def get_file_list(connector, host, key, datasetid):
144133
datasetid -- the dataset to get filelist of
145134
"""
146135
client = ClowderClient(host=host, key=key)
147-
if clowder_version == 2:
148-
file_list = v2datasets.get_file_list(connector, client, datasetid)
149-
else:
150-
file_list = v1datasets.get_file_list(connector, client, datasetid)
136+
file_list = datasets.get_file_list(connector, client, datasetid)
151137
return file_list
152138

153139

@@ -163,10 +149,7 @@ def remove_metadata(connector, host, key, datasetid, extractor=None):
163149
!!! ALL JSON-LD METADATA WILL BE REMOVED IF NO extractor PROVIDED !!!
164150
"""
165151
client = ClowderClient(host=host, key=key)
166-
if clowder_version == 2:
167-
v2datasets.remove_metadata(connector, client, datasetid, extractor)
168-
else:
169-
v1datasets.remove_metadata(connector, client, datasetid, extractor)
152+
datasets.remove_metadata(connector, client, datasetid, extractor)
170153

171154

172155
def submit_extraction(connector, host, key, datasetid, extractorname):
@@ -180,10 +163,7 @@ def submit_extraction(connector, host, key, datasetid, extractorname):
180163
extractorname -- registered name of extractor to trigger
181164
"""
182165
client = ClowderClient(host=host, key=key)
183-
if clowder_version == 2:
184-
result_status_code = v2datasets.submit_extraction(connector, client, datasetid, extractorname)
185-
else:
186-
result_status_code = v1datasets.submit_extraction(connector, client, datasetid, extractorname)
166+
return datasets.submit_extraction(connector, client, datasetid, extractorname)
187167

188168

189169
def submit_extractions_by_collection(connector, host, key, collectionid, extractorname, recursive=True):
@@ -242,7 +222,4 @@ def upload_metadata(connector, host, key, datasetid, metadata):
242222
metadata -- the metadata to be uploaded
243223
"""
244224
client = ClowderClient(host=host, key=key)
245-
if clowder_version == 2:
246-
v2datasets.upload_metadata(connector, client, datasetid, metadata)
247-
else:
248-
v1datasets.upload_metadata(connector, client, datasetid, metadata)
225+
datasets.upload_metadata(connector, client, datasetid, metadata)

0 commit comments

Comments
 (0)