@@ -16,8 +16,7 @@ def create_empty(connector, client, datasetname, description, parentid=None, spa
1616
1717 Keyword arguments:
1818 connector -- connector information, used to get missing parameters and send status updates
19- host -- the clowder host, including http and port, should end with a /
20- key -- the secret key to login to clowder
19+ client -- ClowderClient containing authentication credentials
2120 datasetname -- name of new dataset to create
2221 description -- description of new dataset
2322 parentid -- id of parent collection
@@ -56,61 +55,58 @@ def create_empty(connector, client, datasetname, description, parentid=None, spa
5655
5756 return datasetid
5857
59- def delete (connector , host , key , datasetid ):
58+ def delete (connector , client , datasetid ):
6059 """Delete dataset from Clowder.
6160
6261 Keyword arguments:
6362 connector -- connector information, used to get missing parameters and send status updates
64- host -- the clowder host, including http and port, should end with a /
65- key -- the secret key to login to clowder
63+ client -- ClowderClient containing authentication credentials
6664 datasetid -- the dataset to delete
6765 """
68- headers = {"Authorization" : "Bearer " + key }
66+ headers = {"Authorization" : "Bearer " + client . key }
6967
70- url = "%sapi/v2/datasets/%s" % (host , datasetid )
68+ url = "%sapi/v2/datasets/%s" % (client . host , datasetid )
7169
7270 result = requests .delete (url , verify = connector .ssl_verify if connector else True )
7371 result .raise_for_status ()
7472
7573 return json .loads (result .text )
7674
7775# TODO collection not implemented yet in v2
78- def delete_by_collection (connector , host , key , collectionid , recursive = True , delete_colls = False ):
76+ def delete_by_collection (connector , client , collectionid , recursive = True , delete_colls = False ):
7977 """Delete datasets from Clowder by iterating through collection.
8078
8179 Keyword arguments:
8280 connector -- connector information, used to get missing parameters and send status updates
83- host -- the clowder host, including http and port, should end with a /
84- key -- the secret key to login to clowder
81+ client -- ClowderClient containing authentication credentials
8582 collectionid -- the collection to walk
8683 recursive -- whether to also iterate across child collections
8784 delete_colls -- whether to also delete collections containing the datasets
8885 """
89- dslist = get_datasets (connector , host , key , collectionid )
86+ dslist = get_datasets (connector , client . host , client . key , collectionid )
9087 for ds in dslist :
91- delete (connector , host , key , ds ['id' ])
88+ delete (connector , client . host , client . key , ds ['id' ])
9289
9390 if recursive :
94- childcolls = get_child_collections (connector , host , key , collectionid )
91+ childcolls = get_child_collections (connector , client . host , client . key , collectionid )
9592 for coll in childcolls :
96- delete_by_collection (connector , host , key , coll ['id' ], recursive , delete_colls )
93+ delete_by_collection (connector , client . host , client . key , coll ['id' ], recursive , delete_colls )
9794
9895 if delete_colls :
99- delete_collection (connector , host , key , collectionid )
96+ delete_collection (connector , client . host , client . key , collectionid )
10097
101- def download (connector , host , key , datasetid ):
98+ def download (connector , client , datasetid ):
10299 """Download dataset to be processed from Clowder as zip file.
103100
104101 Keyword arguments:
105102 connector -- connector information, used to get missing parameters and send status updates
106- host -- the clowder host, including http and port, should end with a /
107- key -- the secret key to login to clowder
103+ client -- ClowderClient containing authentication credentials
108104 datasetid -- the file that is currently being processed
109105 """
110106 connector .message_process ({"type" : "dataset" , "id" : datasetid }, "Downloading dataset." )
111107
112108 # fetch dataset zipfile
113- url = '%sapi/datasets/%s/download?key=%s' % (host , datasetid , key )
109+ url = '%sapi/datasets/%s/download?key=%s' % (client . host , datasetid ,client . key )
114110 result = requests .get (url , stream = True ,
115111 verify = connector .ssl_verify if connector else True )
116112 result .raise_for_status ()
@@ -122,20 +118,19 @@ def download(connector, host, key, datasetid):
122118
123119 return zipfile
124120
125- def download_metadata (connector , host , key , datasetid , extractor = None ):
121+ def download_metadata (connector , client , datasetid , extractor = None ):
126122 """Download dataset JSON-LD metadata from Clowder.
127123
128124 Keyword arguments:
129125 connector -- connector information, used to get missing parameters and send status updates
130- host -- the clowder host, including http and port, should end with a /
131- key -- the secret key to login to clowder
126+ client -- ClowderClient containing authentication credentials
132127 datasetid -- the dataset to fetch metadata of
133128 extractor -- extractor name to filter results (if only one extractor's metadata is desired)
134129 """
135- headers = {"Authorization" : "Bearer " + key }
130+ headers = {"Authorization" : "Bearer " + client . key }
136131
137132 filterstring = "" if extractor is None else "&extractor=%s" % extractor
138- url = '%sapi/v2/datasets/%s/metadata' % (host , datasetid )
133+ url = '%sapi/v2/datasets/%s/metadata' % (client . host , datasetid )
139134
140135 # fetch data
141136 result = requests .get (url , stream = True , headers = headers ,
@@ -144,78 +139,74 @@ def download_metadata(connector, host, key, datasetid, extractor=None):
144139
145140 return result .json ()
146141
147- def get_info (connector , host , key , datasetid ):
142+ def get_info (connector , client , datasetid ):
148143 """Get basic dataset information from UUID.
149144
150145 Keyword arguments:
151146 connector -- connector information, used to get missing parameters and send status updates
152- host -- the clowder host, including http and port, should end with a /
153- key -- the secret key to login to clowder
147+ client -- ClowderClient containing authentication credentials
154148 datasetid -- the dataset to get info of
155149 """
156- headers = {"Authorization" : "Bearer " + key }
150+ headers = {"Authorization" : "Bearer " + client . key }
157151
158- url = "%sapi/v2/datasets/%s" % (host , datasetid )
152+ url = "%sapi/v2/datasets/%s" % (client . host , datasetid )
159153
160154 result = requests .get (url , headers = headers ,
161155 verify = connector .ssl_verify if connector else True )
162156 result .raise_for_status ()
163157
164158 return json .loads (result .text )
165159
166- def get_file_list (connector , host , key , datasetid ):
160+ def get_file_list (connector , client , datasetid ):
167161 """Get list of files in a dataset as JSON object.
168162
169163 Keyword arguments:
170164 connector -- connector information, used to get missing parameters and send status updates
171- host -- the clowder host, including http and port, should end with a /
172- key -- the secret key to login to clowder
165+ client -- ClowderClient containing authentication credentials
173166 datasetid -- the dataset to get filelist of
174167 """
175- headers = {"Authorization" : "Bearer " + key }
168+ headers = {"Authorization" : "Bearer " + client . key }
176169
177- url = "%sapi/v2/datasets/%s/files" % (host , datasetid )
170+ url = "%sapi/v2/datasets/%s/files" % (client . host , datasetid )
178171
179172 result = requests .get (url , headers = headers , verify = connector .ssl_verify if connector else True )
180173 result .raise_for_status ()
181174
182175 return json .loads (result .text )
183176
184- def remove_metadata (connector , host , key , datasetid , extractor = None ):
177+ def remove_metadata (connector , client , datasetid , extractor = None ):
185178 """Delete dataset JSON-LD metadata from Clowder.
186179
187180 Keyword arguments:
188181 connector -- connector information, used to get missing parameters and send status updates
189- host -- the clowder host, including http and port, should end with a /
190- key -- the secret key to login to clowder
182+ client -- ClowderClient containing authentication credentials
191183 datasetid -- the dataset to fetch metadata of
192184 extractor -- extractor name to filter deletion
193185 !!! ALL JSON-LD METADATA WILL BE REMOVED IF NO extractor PROVIDED !!!
194186 """
195- headers = {"Authorization" : "Bearer " + key }
187+ headers = {"Authorization" : "Bearer " + client . key }
196188
197189 filterstring = "" if extractor is None else "&extractor=%s" % extractor
198- url = '%sapi/v2/datasets/%s/metadata' % (host , datasetid )
190+ url = '%sapi/v2/datasets/%s/metadata' % (client . host , datasetid )
199191
200192 # fetch data
201193 result = requests .delete (url , stream = True , headers = headers ,
202194 verify = connector .ssl_verify if connector else True )
203195 result .raise_for_status ()
204196
205- def submit_extraction (connector , host , key , datasetid , extractorname ):
197+ def submit_extraction (connector , client , datasetid , extractorname ):
206198 """Submit dataset for extraction by given extractor.
207199
208200 Keyword arguments:
209201 connector -- connector information, used to get missing parameters and send status updates
210- host -- the clowder host, including http and port, should end with a /
211- key -- the secret key to login to clowder
202+ client -- ClowderClient containing authentication credentials
212203 datasetid -- the dataset UUID to submit
213204 extractorname -- registered name of extractor to trigger
214205 """
215206 headers = {'Content-Type' : 'application/json' ,
216- "Authorization" : "Bearer " + key }
207+ "Authorization" : "Bearer " + client . key }
217208
218- url = "%sapi/v2/datasets/%s/extractions?key=%s" % (host , datasetid )
209+ url = "%sapi/v2/datasets/%s/extractions?key=%s" % (client . host , datasetid )
219210
220211 result = requests .post (url ,
221212 headers = headers ,
@@ -225,64 +216,61 @@ def submit_extraction(connector, host, key, datasetid, extractorname):
225216
226217 return result .status_code
227218
228- def submit_extractions_by_collection (connector , host , key , collectionid , extractorname , recursive = True ):
219+ def submit_extractions_by_collection (connector , client , collectionid , extractorname , recursive = True ):
229220 """Manually trigger an extraction on all datasets in a collection.
230221
231222 This will iterate through all datasets in the given collection and submit them to
232223 the provided extractor.
233224
234225 Keyword arguments:
235226 connector -- connector information, used to get missing parameters and send status updates
236- host -- the clowder host, including http and port, should end with a /
237- key -- the secret key to login to clowder
227+ client -- ClowderClient containing authentication credentials
238228 datasetid -- the dataset UUID to submit
239229 extractorname -- registered name of extractor to trigger
240230 recursive -- whether to also submit child collection datasets recursively (defaults to True)
241231 """
242- dslist = get_datasets (connector , host , key , collectionid )
232+ dslist = get_datasets (connector , client . host , client . key , collectionid )
243233
244234 for ds in dslist :
245- submit_extraction (connector , host , key , ds ['id' ], extractorname )
235+ submit_extraction (connector , client . host , client . key , ds ['id' ], extractorname )
246236
247237 if recursive :
248- childcolls = get_child_collections (connector , host , key , collectionid )
238+ childcolls = get_child_collections (connector , client . host , client . key , collectionid )
249239 for coll in childcolls :
250- submit_extractions_by_collection (connector , host , key , coll ['id' ], extractorname , recursive )
240+ submit_extractions_by_collection (connector , client . host , client . key , coll ['id' ], extractorname , recursive )
251241
252242# TODO tags not implemented in v2
253- def upload_tags (connector , host , key , datasetid , tags ):
243+ def upload_tags (connector , client , datasetid , tags ):
254244 """Upload dataset tag to Clowder.
255245
256246 Keyword arguments:
257247 connector -- connector information, used to get missing parameters and send status updates
258- host -- the clowder host, including http and port, should end with a /
259- key -- the secret key to login to clowder
248+ client -- ClowderClient containing authentication credentials
260249 datasetid -- the dataset that is currently being processed
261250 tags -- the tags to be uploaded
262251 """
263252 connector .status_update (StatusMessage .processing , {"type" : "dataset" , "id" : datasetid }, "Uploading dataset tags." )
264253
265254 headers = {'Content-Type' : 'application/json' }
266- url = '%sapi/datasets/%s/tags?key=%s' % (host , datasetid , key )
255+ url = '%sapi/datasets/%s/tags?key=%s' % (client . host , datasetid , client . key )
267256 result = connector .post (url , headers = headers , data = json .dumps (tags ),
268257 verify = connector .ssl_verify if connector else True )
269258
270259
271- def upload_metadata (connector , host , key , datasetid , metadata ):
260+ def upload_metadata (connector , client , datasetid , metadata ):
272261 """Upload dataset JSON-LD metadata to Clowder.
273262
274263 Keyword arguments:
275264 connector -- connector information, used to get missing parameters and send status updates
276- host -- the clowder host, including http and port, should end with a /
277- key -- the secret key to login to clowder
265+ client -- ClowderClient containing authentication credentials
278266 datasetid -- the dataset that is currently being processed
279267 metadata -- the metadata to be uploaded
280268 """
281269 headers = {'Content-Type' : 'application/json' ,
282- "Authorization" : "Bearer " + key }
270+ "Authorization" : "Bearer " + client . key }
283271 connector .message_process ({"type" : "dataset" , "id" : datasetid }, "Uploading dataset metadata." )
284272
285- url = '%sapi/v2/datasets/%s/metadata' % (host , datasetid )
273+ url = '%sapi/v2/datasets/%s/metadata' % (client . host , datasetid )
286274 result = requests .post (url , headers = headers , data = json .dumps (metadata ),
287275 verify = connector .ssl_verify if connector else True )
288276 result .raise_for_status ()
0 commit comments