@@ -223,54 +223,6 @@ def submit_extraction(connector, host, key, datasetid, extractorname):
223223
224224 return result .status_code
225225
226-
227- # TODO not implemented in v2 yet
228- def submit_extractions_by_collection (connector , host , key , collectionid , extractorname , recursive = True ):
229- """Manually trigger an extraction on all datasets in a collection.
230-
231- This will iterate through all datasets in the given collection and submit them to
232- the provided extractor.
233-
234- Keyword arguments:
235- connector -- connector information, used to get missing parameters and send status updates
236- host -- the clowder host, including http and port, should end with a /
237- key -- the secret key to login to clowder
238- datasetid -- the dataset UUID to submit
239- extractorname -- registered name of extractor to trigger
240- recursive -- whether to also submit child collection datasets recursively (defaults to True)
241- """
242-
243- dslist = get_datasets (connector , host , key , collectionid )
244-
245- for ds in dslist :
246- submit_extraction (connector , host , key , ds ['id' ], extractorname )
247-
248- if recursive :
249- childcolls = get_child_collections (connector , host , key , collectionid )
250- for coll in childcolls :
251- submit_extractions_by_collection (connector , host , key , coll ['id' ], extractorname , recursive )
252-
253-
254- # TODO tags not implemented in v2
255- def upload_tags (connector , host , key , datasetid , tags ):
256- """Upload dataset tag to Clowder.
257-
258- Keyword arguments:
259- connector -- connector information, used to get missing parameters and send status updates
260- host -- the clowder host, including http and port, should end with a /
261- key -- the secret key to login to clowder
262- datasetid -- the dataset that is currently being processed
263- tags -- the tags to be uploaded
264- """
265-
266- connector .status_update (StatusMessage .processing , {"type" : "dataset" , "id" : datasetid }, "Uploading dataset tags." )
267-
268- headers = {'Content-Type' : 'application/json' }
269- url = '%sapi/datasets/%s/tags?key=%s' % (host , datasetid , key )
270- result = connector .post (url , headers = headers , data = json .dumps (tags ),
271- verify = connector .ssl_verify if connector else True )
272-
273-
274226def upload_metadata (connector , host , key , datasetid , metadata ):
275227 """Upload dataset JSON-LD metadata to Clowder.
276228
@@ -291,96 +243,3 @@ def upload_metadata(connector, host, key, datasetid, metadata):
291243 verify = connector .ssl_verify if connector else True )
292244 result .raise_for_status ()
293245
294- # TODO not done yet, need more testing
295- class DatasetsApi (object ):
296- """
297- API to manage the REST CRUD endpoints for datasets.
298- """
299-
300- def __init__ (self , client = None , host = None , key = None ,
301- username = None , password = None ):
302- """Set client if provided otherwise create new one"""
303- if client :
304- self .client = client
305- else :
306- self .client = ClowderClient (host = host , key = key ,
307- username = username , password = password )
308-
309- def datasets_get (self ):
310- """
311- Get the list of all available datasets.
312-
313- :return: Full list of datasets.
314- :rtype: `requests.Response`
315- """
316- logging .debug ("Getting all datasets" )
317- try :
318- return self .client .get ("/datasets" )
319- except Exception as e :
320- logging .error ("Error retrieving dataset list: %s" , str (e ))
321-
322- def dataset_get (self , dataset_id ):
323- """
324- Get a specific dataset by id.
325-
326- :return: Sensor object as JSON.
327- :rtype: `requests.Response`
328- """
329- logging .debug ("Getting dataset %s" % dataset_id )
330- try :
331- return self .client .get ("/datasets/%s" % dataset_id )
332- except Exception as e :
333- logging .error ("Error retrieving dataset %s: %s" % (dataset_id , str (e )))
334-
335- def create_empty (self , dataset_id ):
336- """
337- Create dataset.
338-
339- :return: If successful or not.
340- :rtype: `requests.Response`
341- """
342- logging .debug ("Adding dataset" )
343- try :
344- return self .client .post ("/datasets/createempty" , dataset_id )
345- except Exception as e :
346- logging .error ("Error adding datapoint %s: %s" % (dataset_id , str (e )))
347-
348- def dataset_delete (self , dataset_id ):
349- """
350- Delete a specific dataset by id.
351-
352- :return: If successfull or not.
353- :rtype: `requests.Response`
354- """
355- logging .debug ("Deleting dataset %s" % dataset_id )
356- try :
357- return self .client .delete ("/datasets/%s" % dataset_id )
358- except Exception as e :
359- logging .error ("Error retrieving dataset %s: %s" % (dataset_id , str (e )))
360-
361- def upload_file (self , dataset_id , file ):
362- """
363- Add a file to a dataset.
364-
365- :return: If successfull or not.
366- :rtype: `requests.Response`
367- """
368- logging .debug ("Uploading a file to dataset %s" % dataset_id )
369- try :
370- return self .client .post_file ("/uploadToDataset/%s" % dataset_id , file )
371- except Exception as e :
372- logging .error ("Error upload to dataset %s: %s" % (dataset_id , str (e )))
373-
374- def add_metadata (self , dataset_id , metadata ):
375- """
376- Add a file to a dataset
377-
378- :return: If successfull or not.
379- :rtype: `requests.Response`
380- """
381-
382- logging .debug ("Update metadata of dataset %s" % dataset_id )
383- try :
384- return self .client .post ("/datasets/%s/metadata" % dataset_id , metadata )
385- except Exception as e :
386- logging .error ("Error upload to dataset %s: %s" % (dataset_id , str (e )))
0 commit comments