Skip to content

Commit 4b6b91e

Browse files
authored
Merge pull request #76 from clowder-framework/72-add-same-v2-visualization-to-pyclowder-dataset-class
Adding upload_preview method for dataset
2 parents 4d52fb2 + d6022d4 commit 4b6b91e

File tree

4 files changed

+138
-38
lines changed

4 files changed

+138
-38
lines changed

pyclowder/api/v2/datasets.py

Lines changed: 83 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@
99
import tempfile
1010

1111
import requests
12+
from requests_toolbelt.multipart.encoder import MultipartEncoder
1213

1314
from pyclowder.client import ClowderClient
1415
from pyclowder.collections import get_datasets, get_child_collections, delete as delete_collection
@@ -233,3 +234,85 @@ def upload_metadata(connector, client, datasetid, metadata):
233234
verify=connector.ssl_verify if connector else True)
234235
result.raise_for_status()
235236

237+
def upload_preview(connector, client, datasetid, previewfile, previewmetadata=None, preview_mimetype=None,
238+
visualization_name=None, visualization_description=None, visualization_config_data=None,
239+
visualization_component_id=None):
240+
"""Upload visualization to Clowder.
241+
242+
Keyword arguments:
243+
connector -- connector information, used to get missing parameters and send status updates
244+
client -- ClowderClient containing authentication credentials
245+
datsetid -- the dataset that is currently being processed
246+
previewfile -- the file containing the preview
247+
previewmetadata -- any metadata to be associated with preview, can contain a section_id
248+
to indicate the section this preview should be associated with.
249+
preview_mimetype -- (optional) MIME type of the preview file. By default, this is obtained from the
250+
file itself and this parameter can be ignored. E.g. 'application/vnd.clowder+custom+xml'
251+
"""
252+
253+
connector.message_process({"type": "dataset", "id": datasetid}, "Uploading dataset preview.")
254+
logger = logging.getLogger(__name__)
255+
256+
preview_id = None
257+
visualization_config_id = None
258+
259+
if os.path.exists(previewfile):
260+
261+
# upload visualization URL
262+
visualization_config_url = '%s/api/v2/visualizations/config' % client.host
263+
264+
if visualization_config_data is None:
265+
visualization_config_data = dict()
266+
267+
payload = json.dumps({
268+
"resource": {
269+
"collection": "datasets",
270+
"resource_id": datasetid
271+
},
272+
"client": client.host,
273+
"parameters": visualization_config_data,
274+
"visualization_mimetype": preview_mimetype,
275+
"visualization_component_id": visualization_component_id
276+
})
277+
278+
headers = {
279+
"X-API-KEY": client.key,
280+
"Content-Type": "application/json"
281+
}
282+
283+
response = connector.post(visualization_config_url, headers=headers, data=payload,
284+
verify=connector.ssl_verify if connector else True)
285+
286+
if response.status_code == 200:
287+
visualization_config_id = response.json()['id']
288+
logger.debug("Uploaded visualization config ID = [%s]", visualization_config_id)
289+
else:
290+
logger.error("An error occurred when uploading visualization config to dataset: " + datasetid)
291+
292+
if visualization_config_id is not None:
293+
294+
# upload visualization URL
295+
visualization_url = '%s/api/v2/visualizations?name=%s&description=%s&config=%s' % (
296+
client.host, visualization_name, visualization_description, visualization_config_id)
297+
298+
filename = os.path.basename(previewfile)
299+
if preview_mimetype is not None:
300+
multipart_encoder_object = MultipartEncoder(
301+
fields={'file': (filename, open(previewfile, 'rb'), preview_mimetype)})
302+
else:
303+
multipart_encoder_object = MultipartEncoder(fields={'file': (filename, open(previewfile, 'rb'))})
304+
headers = {'X-API-KEY': client.key,
305+
'Content-Type': multipart_encoder_object.content_type}
306+
response = connector.post(visualization_url, data=multipart_encoder_object, headers=headers,
307+
verify=connector.ssl_verify if connector else True)
308+
309+
if response.status_code == 200:
310+
preview_id = response.json()['id']
311+
logger.debug("Uploaded visualization data ID = [%s]", preview_id)
312+
else:
313+
logger.error("An error occurred when uploading the visualization data to dataset: " + datasetid)
314+
else:
315+
logger.error("Visualization data file not found")
316+
317+
return preview_id
318+

pyclowder/datasets.py

Lines changed: 42 additions & 38 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,12 @@
1616
from pyclowder.utils import StatusMessage
1717

1818
clowder_version = int(os.getenv('CLOWDER_VERSION', '1'))
19+
# Import dataset API methods based on Clowder version
20+
if clowder_version == 2:
21+
import pyclowder.api.v2.datasets as datasets
22+
else:
23+
import pyclowder.api.v1.datasets as datasets
24+
1925

2026
def create_empty(connector, host, key, datasetname, description, parentid=None, spaceid=None):
2127
"""Create a new dataset in Clowder.
@@ -30,10 +36,7 @@ def create_empty(connector, host, key, datasetname, description, parentid=None,
3036
spaceid -- id of the space to add dataset to
3137
"""
3238
client = ClowderClient(host=host, key=key)
33-
if clowder_version == 2:
34-
datasetid = v2datasets.create_empty(connector, client, datasetname, description, parentid, spaceid)
35-
else:
36-
datasetid = v1datasets.create_empty(connector, client, datasetname, description, parentid, spaceid)
39+
datasetid = datasets.create_empty(connector, client, datasetname, description, parentid, spaceid)
3740
return datasetid
3841

3942

@@ -47,10 +50,7 @@ def delete(connector, host, key, datasetid):
4750
datasetid -- the dataset to delete
4851
"""
4952
client = ClowderClient(host=host, key=key)
50-
if clowder_version == 2:
51-
result = v2datasets.delete(connector, client, datasetid)
52-
else:
53-
result = v1datasets.delete(connector, client, datasetid)
53+
result = datasets.delete(connector, client, datasetid)
5454
result.raise_for_status()
5555

5656
return json.loads(result.text)
@@ -91,10 +91,7 @@ def download(connector, host, key, datasetid):
9191
datasetid -- the file that is currently being processed
9292
"""
9393
client = ClowderClient(host=host, key=key)
94-
if clowder_version == 2:
95-
zipfile = v2datasets.download(connector, client, datasetid)
96-
else:
97-
zipfile = v1datasets.download(connector, client, datasetid)
94+
zipfile = datasets.download(connector, client, datasetid)
9895
return zipfile
9996

10097

@@ -109,12 +106,8 @@ def download_metadata(connector, host, key, datasetid, extractor=None):
109106
extractor -- extractor name to filter results (if only one extractor's metadata is desired)
110107
"""
111108
client = ClowderClient(host=host, key=key)
112-
if clowder_version == 2:
113-
result_json = v2datasets.download_metadata(connector, client, datasetid, extractor)
114-
return result_json
115-
else:
116-
result_json = v1datasets.download_metadata(connector, client, datasetid, extractor)
117-
return result_json
109+
result_json = datasets.download_metadata(connector, client, datasetid, extractor)
110+
return result_json
118111

119112

120113
def get_info(connector, host, key, datasetid):
@@ -127,10 +120,7 @@ def get_info(connector, host, key, datasetid):
127120
datasetid -- the dataset to get info of
128121
"""
129122
client = ClowderClient(host=host, key=key)
130-
if clowder_version == 2:
131-
info = v2datasets.get_info(connector, client, datasetid)
132-
else:
133-
info = v1datasets.get_info(connector, client, datasetid)
123+
info = datasets.get_info(connector, client, datasetid)
134124
return info
135125

136126

@@ -144,10 +134,7 @@ def get_file_list(connector, host, key, datasetid):
144134
datasetid -- the dataset to get filelist of
145135
"""
146136
client = ClowderClient(host=host, key=key)
147-
if clowder_version == 2:
148-
file_list = v2datasets.get_file_list(connector, client, datasetid)
149-
else:
150-
file_list = v1datasets.get_file_list(connector, client, datasetid)
137+
file_list = datasets.get_file_list(connector, client, datasetid)
151138
return file_list
152139

153140

@@ -163,10 +150,7 @@ def remove_metadata(connector, host, key, datasetid, extractor=None):
163150
!!! ALL JSON-LD METADATA WILL BE REMOVED IF NO extractor PROVIDED !!!
164151
"""
165152
client = ClowderClient(host=host, key=key)
166-
if clowder_version == 2:
167-
v2datasets.remove_metadata(connector, client, datasetid, extractor)
168-
else:
169-
v1datasets.remove_metadata(connector, client, datasetid, extractor)
153+
datasets.remove_metadata(connector, client, datasetid, extractor)
170154

171155

172156
def submit_extraction(connector, host, key, datasetid, extractorname):
@@ -180,10 +164,7 @@ def submit_extraction(connector, host, key, datasetid, extractorname):
180164
extractorname -- registered name of extractor to trigger
181165
"""
182166
client = ClowderClient(host=host, key=key)
183-
if clowder_version == 2:
184-
result_status_code = v2datasets.submit_extraction(connector, client, datasetid, extractorname)
185-
else:
186-
result_status_code = v1datasets.submit_extraction(connector, client, datasetid, extractorname)
167+
return datasets.submit_extraction(connector, client, datasetid, extractorname)
187168

188169

189170
def submit_extractions_by_collection(connector, host, key, collectionid, extractorname, recursive=True):
@@ -242,7 +223,30 @@ def upload_metadata(connector, host, key, datasetid, metadata):
242223
metadata -- the metadata to be uploaded
243224
"""
244225
client = ClowderClient(host=host, key=key)
245-
if clowder_version == 2:
246-
v2datasets.upload_metadata(connector, client, datasetid, metadata)
247-
else:
248-
v1datasets.upload_metadata(connector, client, datasetid, metadata)
226+
datasets.upload_metadata(connector, client, datasetid, metadata)
227+
228+
229+
def upload_preview(connector, host, key, datasetid, previewfile, previewmetadata=None, preview_mimetype=None,
230+
visualization_name=None, visualization_description=None, visualization_config_data=None,
231+
visualization_component_id=None):
232+
"""Upload preview to Clowder.
233+
234+
Keyword arguments:
235+
connector -- connector information, used to get missing parameters and send status updates
236+
host -- the clowder host, including http and port, should end with a /
237+
key -- the secret key to login to clowder
238+
datasetid -- the dataset that is currently being processed
239+
previewfile -- the file containing the preview
240+
previewmetadata -- any metadata to be associated with preview, can contain a section_id
241+
to indicate the section this preview should be associated with.
242+
preview_mimetype -- (optional) MIME type of the preview file. By default, this is obtained from the
243+
file itself and this parameter can be ignored. E.g. 'application/vnd.clowder+custom+xml'
244+
"""
245+
246+
client = ClowderClient(host=host, key=key)
247+
preview_id = datasets.upload_preview(connector, client, datasetid, previewfile, previewmetadata, preview_mimetype,
248+
visualization_name=visualization_name,
249+
visualization_description=visualization_description,
250+
visualization_config_data=visualization_config_data,
251+
visualization_component_id=visualization_component_id)
252+
return preview_id
188 KB
Loading

sample-extractors/test-dataset-extractor/test-dataset-extractor.py

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -65,6 +65,19 @@ def process_message(self, connector, host, secret_key, resource, parameters):
6565
else:
6666
logger.info("Metadata: %s", dataset_metadata)
6767

68+
# Upload a preview to dataset
69+
# Local file path to file which you want to upload to dataset for preview
70+
preview_file_path = os.path.join(os.getcwd(), 'preview_file.jpeg')
71+
preview_id = pyclowder.datasets.upload_preview(connector, host, secret_key, dataset_id, preview_file_path, None,
72+
"image/jpeg", visualization_name="test-dataset-extractor",
73+
visualization_component_id="basic-image-component")
74+
if preview_id is None:
75+
logger.info("Preview upload failed")
76+
else:
77+
logger.info("Preview %s uploaded to dataset successfully ", preview_id)
78+
79+
80+
6881

6982
if __name__ == "__main__":
7083
extractor = TestDatasetExtractor()

0 commit comments

Comments
 (0)