@@ -247,7 +247,6 @@ def upload_to_dataset(connector, client, datasetid, filepath, check_duplicate=Fa
247247
248248 logger = logging .getLogger (__name__ )
249249
250- # TODO fix this to use v2 api
251250 if check_duplicate :
252251 ds_files = get_file_list (connector , client .host , client .key , datasetid )
253252 for f in ds_files :
@@ -257,7 +256,7 @@ def upload_to_dataset(connector, client, datasetid, filepath, check_duplicate=Fa
257256
258257 for source_path in connector .mounted_paths :
259258 if filepath .startswith (connector .mounted_paths [source_path ]):
260- return _upload_to_dataset_local (connector , client . host , client . key , datasetid , filepath )
259+ return _upload_to_dataset_local (connector , client , datasetid , filepath )
261260
262261 url = '%s/api/v2/datasets/%s/files' % (client .host , datasetid )
263262
@@ -279,17 +278,15 @@ def upload_to_dataset(connector, client, datasetid, filepath, check_duplicate=Fa
279278 logger .error ("unable to upload file %s (not found)" , filepath )
280279
281280
282- def _upload_to_dataset_local (connector , host , key , datasetid , filepath ):
281+ def _upload_to_dataset_local (connector , client , datasetid , filepath ):
283282 """Upload file POINTER to existing Clowder dataset. Does not copy actual file bytes.
284283
285284 Keyword arguments:
286285 connector -- connector information, used to get missing parameters and send status updates
287- host -- the clowder host, including http and port, should end with a /
288- key -- the secret key to login to clowder
286+ client -- ClowderClient containing authentication credentials
289287 datasetid -- the dataset that the file should be associated with
290288 filepath -- path to file
291289 """
292- client = ClowderClient (host , key )
293290 logger = logging .getLogger (__name__ )
294291 url = '%s/api/v2/datatsets/%s/files' % (client .host , datasetid )
295292
0 commit comments