1616from pyclowder .utils import StatusMessage
1717
1818clowder_version = int (os .getenv ('CLOWDER_VERSION' , '1' ))
19+ # Import dataset API methods based on Clowder version
20+ if clowder_version == 2 :
21+ import pyclowder .api .v2 .datasets as datasets
22+ else :
23+ import pyclowder .api .v1 .datasets as datasets
24+
1925
2026def create_empty (connector , host , key , datasetname , description , parentid = None , spaceid = None ):
2127 """Create a new dataset in Clowder.
@@ -30,10 +36,7 @@ def create_empty(connector, host, key, datasetname, description, parentid=None,
3036 spaceid -- id of the space to add dataset to
3137 """
3238 client = ClowderClient (host = host , key = key )
33- if clowder_version == 2 :
34- datasetid = v2datasets .create_empty (connector , client , datasetname , description , parentid , spaceid )
35- else :
36- datasetid = v1datasets .create_empty (connector , client , datasetname , description , parentid , spaceid )
39+ datasetid = datasets .create_empty (connector , client , datasetname , description , parentid , spaceid )
3740 return datasetid
3841
3942
@@ -47,10 +50,7 @@ def delete(connector, host, key, datasetid):
4750 datasetid -- the dataset to delete
4851 """
4952 client = ClowderClient (host = host , key = key )
50- if clowder_version == 2 :
51- result = v2datasets .delete (connector , client , datasetid )
52- else :
53- result = v1datasets .delete (connector , client , datasetid )
53+ result = datasets .delete (connector , client , datasetid )
5454 result .raise_for_status ()
5555
5656 return json .loads (result .text )
@@ -91,10 +91,7 @@ def download(connector, host, key, datasetid):
9191 datasetid -- the file that is currently being processed
9292 """
9393 client = ClowderClient (host = host , key = key )
94- if clowder_version == 2 :
95- zipfile = v2datasets .download (connector , client , datasetid )
96- else :
97- zipfile = v1datasets .download (connector , client , datasetid )
94+ zipfile = datasets .download (connector , client , datasetid )
9895 return zipfile
9996
10097
@@ -109,12 +106,8 @@ def download_metadata(connector, host, key, datasetid, extractor=None):
109106 extractor -- extractor name to filter results (if only one extractor's metadata is desired)
110107 """
111108 client = ClowderClient (host = host , key = key )
112- if clowder_version == 2 :
113- result_json = v2datasets .download_metadata (connector , client , datasetid , extractor )
114- return result_json
115- else :
116- result_json = v1datasets .download_metadata (connector , client , datasetid , extractor )
117- return result_json
109+ result_json = datasets .download_metadata (connector , client , datasetid , extractor )
110+ return result_json
118111
119112
120113def get_info (connector , host , key , datasetid ):
@@ -127,10 +120,7 @@ def get_info(connector, host, key, datasetid):
127120 datasetid -- the dataset to get info of
128121 """
129122 client = ClowderClient (host = host , key = key )
130- if clowder_version == 2 :
131- info = v2datasets .get_info (connector , client , datasetid )
132- else :
133- info = v1datasets .get_info (connector , client , datasetid )
123+ info = datasets .get_info (connector , client , datasetid )
134124 return info
135125
136126
@@ -144,10 +134,7 @@ def get_file_list(connector, host, key, datasetid):
144134 datasetid -- the dataset to get filelist of
145135 """
146136 client = ClowderClient (host = host , key = key )
147- if clowder_version == 2 :
148- file_list = v2datasets .get_file_list (connector , client , datasetid )
149- else :
150- file_list = v1datasets .get_file_list (connector , client , datasetid )
137+ file_list = datasets .get_file_list (connector , client , datasetid )
151138 return file_list
152139
153140
@@ -163,10 +150,7 @@ def remove_metadata(connector, host, key, datasetid, extractor=None):
163150 !!! ALL JSON-LD METADATA WILL BE REMOVED IF NO extractor PROVIDED !!!
164151 """
165152 client = ClowderClient (host = host , key = key )
166- if clowder_version == 2 :
167- v2datasets .remove_metadata (connector , client , datasetid , extractor )
168- else :
169- v1datasets .remove_metadata (connector , client , datasetid , extractor )
153+ datasets .remove_metadata (connector , client , datasetid , extractor )
170154
171155
172156def submit_extraction (connector , host , key , datasetid , extractorname ):
@@ -180,10 +164,7 @@ def submit_extraction(connector, host, key, datasetid, extractorname):
180164 extractorname -- registered name of extractor to trigger
181165 """
182166 client = ClowderClient (host = host , key = key )
183- if clowder_version == 2 :
184- result_status_code = v2datasets .submit_extraction (connector , client , datasetid , extractorname )
185- else :
186- result_status_code = v1datasets .submit_extraction (connector , client , datasetid , extractorname )
167+ return datasets .submit_extraction (connector , client , datasetid , extractorname )
187168
188169
189170def submit_extractions_by_collection (connector , host , key , collectionid , extractorname , recursive = True ):
@@ -242,7 +223,30 @@ def upload_metadata(connector, host, key, datasetid, metadata):
242223 metadata -- the metadata to be uploaded
243224 """
244225 client = ClowderClient (host = host , key = key )
245- if clowder_version == 2 :
246- v2datasets .upload_metadata (connector , client , datasetid , metadata )
247- else :
248- v1datasets .upload_metadata (connector , client , datasetid , metadata )
226+ datasets .upload_metadata (connector , client , datasetid , metadata )
227+
228+
229+ def upload_preview (connector , host , key , datasetid , previewfile , previewmetadata = None , preview_mimetype = None ,
230+ visualization_name = None , visualization_description = None , visualization_config_data = None ,
231+ visualization_component_id = None ):
232+ """Upload preview to Clowder.
233+
234+ Keyword arguments:
235+ connector -- connector information, used to get missing parameters and send status updates
236+ host -- the clowder host, including http and port, should end with a /
237+ key -- the secret key to login to clowder
238+ datasetid -- the dataset that is currently being processed
239+ previewfile -- the file containing the preview
240+ previewmetadata -- any metadata to be associated with preview, can contain a section_id
241+ to indicate the section this preview should be associated with.
242+ preview_mimetype -- (optional) MIME type of the preview file. By default, this is obtained from the
243+ file itself and this parameter can be ignored. E.g. 'application/vnd.clowder+custom+xml'
244+ """
245+
246+ client = ClowderClient (host = host , key = key )
247+ preview_id = datasets .upload_preview (connector , client , datasetid , previewfile , previewmetadata , preview_mimetype ,
248+ visualization_name = visualization_name ,
249+ visualization_description = visualization_description ,
250+ visualization_config_data = visualization_config_data ,
251+ visualization_component_id = visualization_component_id )
252+ return preview_id
0 commit comments