Skip to content

Commit adf287a

Browse files
committed
Merge branch '50-clowder20-submit-file-to-extractor' of https://github.com/clowder-framework/pyclowder into 50-clowder20-submit-file-to-extractor
2 parents 6ca6a82 + df55e40 commit adf287a

File tree

2 files changed

+0
-194
lines changed

2 files changed

+0
-194
lines changed

pyclowder/api/v2/datasets.py

Lines changed: 0 additions & 140 deletions
Original file line numberDiff line numberDiff line change
@@ -224,53 +224,6 @@ def submit_extraction(connector, host, key, datasetid, extractorname):
224224
return result.status_code
225225

226226

227-
# TODO not implemented in v2 yet
228-
def submit_extractions_by_collection(connector, host, key, collectionid, extractorname, recursive=True):
229-
"""Manually trigger an extraction on all datasets in a collection.
230-
231-
This will iterate through all datasets in the given collection and submit them to
232-
the provided extractor.
233-
234-
Keyword arguments:
235-
connector -- connector information, used to get missing parameters and send status updates
236-
host -- the clowder host, including http and port, should end with a /
237-
key -- the secret key to login to clowder
238-
datasetid -- the dataset UUID to submit
239-
extractorname -- registered name of extractor to trigger
240-
recursive -- whether to also submit child collection datasets recursively (defaults to True)
241-
"""
242-
243-
dslist = get_datasets(connector, host, key, collectionid)
244-
245-
for ds in dslist:
246-
submit_extraction(connector, host, key, ds['id'], extractorname)
247-
248-
if recursive:
249-
childcolls = get_child_collections(connector, host, key, collectionid)
250-
for coll in childcolls:
251-
submit_extractions_by_collection(connector, host, key, coll['id'], extractorname, recursive)
252-
253-
254-
# TODO tags not implemented in v2
255-
def upload_tags(connector, host, key, datasetid, tags):
256-
"""Upload dataset tag to Clowder.
257-
258-
Keyword arguments:
259-
connector -- connector information, used to get missing parameters and send status updates
260-
host -- the clowder host, including http and port, should end with a /
261-
key -- the secret key to login to clowder
262-
datasetid -- the dataset that is currently being processed
263-
tags -- the tags to be uploaded
264-
"""
265-
266-
connector.status_update(StatusMessage.processing, {"type": "dataset", "id": datasetid}, "Uploading dataset tags.")
267-
268-
headers = {'Content-Type': 'application/json'}
269-
url = '%sapi/datasets/%s/tags?key=%s' % (host, datasetid, key)
270-
result = connector.post(url, headers=headers, data=json.dumps(tags),
271-
verify=connector.ssl_verify if connector else True)
272-
273-
274227
def upload_metadata(connector, host, key, datasetid, metadata):
275228
"""Upload dataset JSON-LD metadata to Clowder.
276229
@@ -291,96 +244,3 @@ def upload_metadata(connector, host, key, datasetid, metadata):
291244
verify=connector.ssl_verify if connector else True)
292245
result.raise_for_status()
293246

294-
# TODO not done yet, need more testing
295-
class DatasetsApi(object):
296-
"""
297-
API to manage the REST CRUD endpoints for datasets.
298-
"""
299-
300-
def __init__(self, client=None, host=None, key=None,
301-
username=None, password=None):
302-
"""Set client if provided otherwise create new one"""
303-
if client:
304-
self.client = client
305-
else:
306-
self.client = ClowderClient(host=host, key=key,
307-
username=username, password=password)
308-
309-
def datasets_get(self):
310-
"""
311-
Get the list of all available datasets.
312-
313-
:return: Full list of datasets.
314-
:rtype: `requests.Response`
315-
"""
316-
logging.debug("Getting all datasets")
317-
try:
318-
return self.client.get("/datasets")
319-
except Exception as e:
320-
logging.error("Error retrieving dataset list: %s", str(e))
321-
322-
def dataset_get(self, dataset_id):
323-
"""
324-
Get a specific dataset by id.
325-
326-
:return: Sensor object as JSON.
327-
:rtype: `requests.Response`
328-
"""
329-
logging.debug("Getting dataset %s" % dataset_id)
330-
try:
331-
return self.client.get("/datasets/%s" % dataset_id)
332-
except Exception as e:
333-
logging.error("Error retrieving dataset %s: %s" % (dataset_id, str(e)))
334-
335-
def create_empty(self, dataset_id):
336-
"""
337-
Create dataset.
338-
339-
:return: If successful or not.
340-
:rtype: `requests.Response`
341-
"""
342-
logging.debug("Adding dataset")
343-
try:
344-
return self.client.post("/datasets/createempty", dataset_id)
345-
except Exception as e:
346-
logging.error("Error adding datapoint %s: %s" % (dataset_id, str(e)))
347-
348-
def dataset_delete(self, dataset_id):
349-
"""
350-
Delete a specific dataset by id.
351-
352-
:return: If successfull or not.
353-
:rtype: `requests.Response`
354-
"""
355-
logging.debug("Deleting dataset %s" % dataset_id)
356-
try:
357-
return self.client.delete("/datasets/%s" % dataset_id)
358-
except Exception as e:
359-
logging.error("Error retrieving dataset %s: %s" % (dataset_id, str(e)))
360-
361-
def upload_file(self, dataset_id, file):
362-
"""
363-
Add a file to a dataset.
364-
365-
:return: If successfull or not.
366-
:rtype: `requests.Response`
367-
"""
368-
logging.debug("Uploading a file to dataset %s" % dataset_id)
369-
try:
370-
return self.client.post_file("/uploadToDataset/%s" % dataset_id, file)
371-
except Exception as e:
372-
logging.error("Error upload to dataset %s: %s" % (dataset_id, str(e)))
373-
374-
def add_metadata(self, dataset_id, metadata):
375-
"""
376-
Add a file to a dataset
377-
378-
:return: If successfull or not.
379-
:rtype: `requests.Response`
380-
"""
381-
382-
logging.debug("Update metadata of dataset %s" % dataset_id)
383-
try:
384-
return self.client.post("/datasets/%s/metadata" % dataset_id, metadata)
385-
except Exception as e:
386-
logging.error("Error upload to dataset %s: %s" % (dataset_id, str(e)))

pyclowder/api/v2/files.py

Lines changed: 0 additions & 54 deletions
Original file line numberDiff line numberDiff line change
@@ -125,60 +125,6 @@ def submit_extraction(connector, host, key, fileid, extractorname):
125125
return result
126126

127127

128-
# TODO not implemented in v2
129-
def submit_extractions_by_dataset(connector, host, key, datasetid, extractorname, ext=False):
130-
"""Manually trigger an extraction on all files in a dataset.
131-
132-
This will iterate through all files in the given dataset and submit them to
133-
the provided extractor.
134-
135-
Keyword arguments:
136-
connector -- connector information, used to get missing parameters and send status updates
137-
host -- the clowder host, including http and port, should end with a /
138-
key -- the secret key to login to clowder
139-
datasetid -- the dataset UUID to submit
140-
extractorname -- registered name of extractor to trigger
141-
ext -- extension to filter. e.g. 'tif' will only submit TIFF files for extraction.
142-
"""
143-
144-
filelist = get_file_list(connector, host, key, datasetid)
145-
146-
for f in filelist:
147-
# Only submit files that end with given extension, if specified
148-
if ext and not f['filename'].endswith(ext):
149-
continue
150-
151-
submit_extraction(connector, host, key, f['id'], extractorname)
152-
153-
154-
# TODO not implemented in v2
155-
def submit_extractions_by_collection(connector, host, key, collectionid, extractorname, ext=False, recursive=True):
156-
"""Manually trigger an extraction on all files in a collection.
157-
158-
This will iterate through all datasets in the given collection and submit them to
159-
the submit_extractions_by_dataset(). Does not operate recursively if there are nested collections.
160-
161-
Keyword arguments:
162-
connector -- connector information, used to get missing parameters and send status updates
163-
host -- the clowder host, including http and port, should end with a /
164-
key -- the secret key to login to clowder
165-
collectionid -- the collection UUID to submit
166-
extractorname -- registered name of extractor to trigger
167-
ext -- extension to filter. e.g. 'tif' will only submit TIFF files for extraction
168-
recursive -- whether to also submit child collection files recursively (defaults to True)
169-
"""
170-
171-
dslist = get_datasets(connector, host, key, collectionid)
172-
173-
for ds in dslist:
174-
submit_extractions_by_dataset(connector, host, key, ds['id'], extractorname, ext)
175-
176-
if recursive:
177-
childcolls = get_child_collections(connector, host, key, collectionid)
178-
for coll in childcolls:
179-
submit_extractions_by_collection(connector, host, key, coll['id'], extractorname, ext, recursive)
180-
181-
182128
def upload_metadata(connector, host, key, fileid, metadata):
183129
"""Upload file JSON-LD metadata to Clowder.
184130

0 commit comments

Comments
 (0)