Skip to content

Commit 4147390

Browse files
committed
cleanup a few client parameters
1 parent f3c81cb commit 4147390

File tree

3 files changed

+7
-9
lines changed

3 files changed

+7
-9
lines changed

pyclowder/api/v1/files.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -297,7 +297,7 @@ def upload_to_dataset(connector, client, datasetid, filepath, check_duplicate=Fa
297297

298298
for source_path in connector.mounted_paths:
299299
if filepath.startswith(connector.mounted_paths[source_path]):
300-
return _upload_to_dataset_local(connector, client.host, client.key, datasetid, filepath)
300+
return _upload_to_dataset_local(connector, client, datasetid, filepath)
301301

302302
url = '%s/api/uploadToDataset/%s?key=%s' % (client.host, datasetid, client.key)
303303

pyclowder/api/v2/files.py

Lines changed: 3 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -247,7 +247,6 @@ def upload_to_dataset(connector, client, datasetid, filepath, check_duplicate=Fa
247247

248248
logger = logging.getLogger(__name__)
249249

250-
# TODO fix this to use v2 api
251250
if check_duplicate:
252251
ds_files = get_file_list(connector, client.host, client.key, datasetid)
253252
for f in ds_files:
@@ -257,7 +256,7 @@ def upload_to_dataset(connector, client, datasetid, filepath, check_duplicate=Fa
257256

258257
for source_path in connector.mounted_paths:
259258
if filepath.startswith(connector.mounted_paths[source_path]):
260-
return _upload_to_dataset_local(connector, client.host, client.key, datasetid, filepath)
259+
return _upload_to_dataset_local(connector, client, datasetid, filepath)
261260

262261
url = '%s/api/v2/datasets/%s/files' % (client.host, datasetid)
263262

@@ -279,17 +278,15 @@ def upload_to_dataset(connector, client, datasetid, filepath, check_duplicate=Fa
279278
logger.error("unable to upload file %s (not found)", filepath)
280279

281280

282-
def _upload_to_dataset_local(connector, host, key, datasetid, filepath):
281+
def _upload_to_dataset_local(connector, client, datasetid, filepath):
283282
"""Upload file POINTER to existing Clowder dataset. Does not copy actual file bytes.
284283
285284
Keyword arguments:
286285
connector -- connector information, used to get missing parameters and send status updates
287-
host -- the clowder host, including http and port, should end with a /
288-
key -- the secret key to login to clowder
286+
client -- ClowderClient containing authentication credentials
289287
datasetid -- the dataset that the file should be associated with
290288
filepath -- path to file
291289
"""
292-
client = ClowderClient(host, key)
293290
logger = logging.getLogger(__name__)
294291
url = '%s/api/v2/datatsets/%s/files' % (client.host, datasetid)
295292

pyclowder/files.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -115,7 +115,8 @@ def submit_extractions_by_dataset(connector, host, key, datasetid, extractorname
115115
extractorname -- registered name of extractor to trigger
116116
ext -- extension to filter. e.g. 'tif' will only submit TIFF files for extraction.
117117
"""
118-
filelist = get_file_list(connector, host, key, datasetid)
118+
client = ClowderClient(host=host, key=key)
119+
filelist = get_file_list(connector, client, datasetid)
119120

120121
for f in filelist:
121122
# Only submit files that end with given extension, if specified
@@ -326,5 +327,5 @@ def _upload_to_dataset_local(connector, host, key, datasetid, filepath):
326327
if clowder_version == 2:
327328
uploadedfileid = v2files._upload_to_dataset_local(connector, client, datasetid, filepath)
328329
else:
329-
uploadedfileid = v1files._upload_to_dataset_local(connector, host, key, datasetid, filepath)
330+
uploadedfileid = v1files._upload_to_dataset_local(connector, client, datasetid, filepath)
330331
return uploadedfileid

0 commit comments

Comments
 (0)