Skip to content

Commit 986da98

Browse files
committed
removing token using key
1 parent edcee1b commit 986da98

File tree

2 files changed

+22
-30
lines changed

2 files changed

+22
-30
lines changed

pyclowder/api/v2/files.py

Lines changed: 11 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -83,7 +83,7 @@ def download_info(connector, host, key, fileid):
8383
return result
8484

8585

86-
def download_metadata(connector, host, key, fileid, extractor=None, token=None):
86+
def download_metadata(connector, host, key, fileid, extractor=None):
8787
"""Download file JSON-LD metadata from Clowder.
8888
8989
Keyword arguments:
@@ -96,15 +96,15 @@ def download_metadata(connector, host, key, fileid, extractor=None, token=None):
9696

9797
filterstring = "" if extractor is None else "?extractor=%s" % extractor
9898
url = '%sapi/v2/files/%s/metadata?%s' % (host, fileid, filterstring)
99-
headers = {"Authorization": "Bearer " + token}
99+
headers = {"Authorization": "Bearer " + key}
100100

101101
# fetch data
102102
result = connector.get(url, stream=True, verify=connector.ssl_verify if connector else True, headers=headers)
103103

104104
return result
105105

106106

107-
def submit_extraction(connector, host, key, fileid, extractorname, token=None):
107+
def submit_extraction(connector, host, key, fileid, extractorname):
108108
"""Submit file for extraction by given extractor.
109109
110110
Keyword arguments:
@@ -118,7 +118,7 @@ def submit_extraction(connector, host, key, fileid, extractorname, token=None):
118118
url = "%sapi/v2/files/%s/extractions?key=%s" % (host, fileid, key)
119119
result = connector.post(url,
120120
headers={'Content-Type': 'application/json',
121-
"Authorization": "Bearer " + token},
121+
"Authorization": "Bearer " + key},
122122
data=json.dumps({"extractor": extractorname}),
123123
verify=connector.ssl_verify if connector else True)
124124

@@ -299,7 +299,7 @@ def upload_thumbnail(connector, host, key, fileid, thumbnail):
299299
return thumbnailid
300300

301301

302-
def upload_to_dataset(connector, host, key, datasetid, filepath, check_duplicate=False, token=None):
302+
def upload_to_dataset(connector, host, key, datasetid, filepath, check_duplicate=False):
303303
"""Upload file to existing Clowder dataset.
304304
305305
Keyword arguments:
@@ -323,7 +323,7 @@ def upload_to_dataset(connector, host, key, datasetid, filepath, check_duplicate
323323

324324
for source_path in connector.mounted_paths:
325325
if filepath.startswith(connector.mounted_paths[source_path]):
326-
return _upload_to_dataset_local(connector, host, key, datasetid, filepath, token)
326+
return _upload_to_dataset_local(connector, host, key, datasetid, filepath, key)
327327

328328
url = '%sapi/v2/datasets/%s/files' % (host, datasetid)
329329

@@ -332,12 +332,8 @@ def upload_to_dataset(connector, host, key, datasetid, filepath, check_duplicate
332332
m = MultipartEncoder(
333333
fields={'file': (filename, open(filepath, 'rb'))}
334334
)
335-
if token:
336-
headers = {"Authorization": "Bearer " + token,
337-
'Content-Type': m.content_type}
338-
else:
339-
headers = {"Authorization": "Bearer " + key,
340-
'Content-Type': m.content_type}
335+
headers = {"Authorization": "Bearer " + key,
336+
'Content-Type': m.content_type}
341337
result = connector.post(url, data=m, headers=headers,
342338
verify=connector.ssl_verify if connector else True)
343339

@@ -349,7 +345,7 @@ def upload_to_dataset(connector, host, key, datasetid, filepath, check_duplicate
349345
logger.error("unable to upload file %s (not found)", filepath)
350346

351347

352-
def _upload_to_dataset_local(connector, host, key, datasetid, filepath, token=None):
348+
def _upload_to_dataset_local(connector, host, key, datasetid, filepath):
353349
"""Upload file POINTER to existing Clowder dataset. Does not copy actual file bytes.
354350
355351
Keyword arguments:
@@ -375,12 +371,8 @@ def _upload_to_dataset_local(connector, host, key, datasetid, filepath, token=No
375371
m = MultipartEncoder(
376372
fields={'file': (filename, open(filepath, 'rb'))}
377373
)
378-
if token:
379-
headers = {"Authorization": "Bearer " + token,
380-
'Content-Type': m.content_type}
381-
else:
382-
headers = {"Authorization": "Bearer " + key,
383-
'Content-Type': m.content_type}
374+
headers = {"Authorization": "Bearer " + key,
375+
'Content-Type': m.content_type}
384376
result = connector.post(url, data=m, headers=headers,
385377
verify=connector.ssl_verify if connector else True)
386378

pyclowder/files.py

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@
3030

3131

3232
# pylint: disable=too-many-arguments
33-
def download(connector, host, key, fileid, intermediatefileid=None, ext="", token=None):
33+
def download(connector, host, key, fileid, intermediatefileid=None, ext=""):
3434
"""Download file to be processed from Clowder.
3535
3636
Keyword arguments:
@@ -48,7 +48,7 @@ def download(connector, host, key, fileid, intermediatefileid=None, ext="", toke
4848
return inputfilename
4949

5050

51-
def download_info(connector, host, key, fileid, token=None):
51+
def download_info(connector, host, key, fileid):
5252
"""Download file summary metadata from Clowder.
5353
5454
Keyword arguments:
@@ -65,7 +65,7 @@ def download_info(connector, host, key, fileid, token=None):
6565
return result.json()
6666

6767

68-
def download_metadata(connector, host, key, fileid, extractor=None, token=None):
68+
def download_metadata(connector, host, key, fileid, extractor=None):
6969
"""Download file JSON-LD metadata from Clowder.
7070
7171
Keyword arguments:
@@ -76,13 +76,13 @@ def download_metadata(connector, host, key, fileid, extractor=None, token=None):
7676
extractor -- extractor name to filter results (if only one extractor's metadata is desired)
7777
"""
7878
if clowder_version >= 2.0:
79-
result = v2files.download_metadata(connector, host, key, fileid, extractor, token)
79+
result = v2files.download_metadata(connector, host, key, fileid, extractor)
8080
else:
8181
result = v1files.download_metadata(connector, host, key, fileid, extractor)
8282
return result.json()
8383

8484

85-
def submit_extraction(connector, host, key, fileid, extractorname, token=None):
85+
def submit_extraction(connector, host, key, fileid, extractorname):
8686
"""Submit file for extraction by given extractor.
8787
8888
Keyword arguments:
@@ -93,7 +93,7 @@ def submit_extraction(connector, host, key, fileid, extractorname, token=None):
9393
extractorname -- registered name of extractor to trigger
9494
"""
9595
if clowder_version >= 2.0:
96-
result = v2files.submit_extraction(connector, host, key, fileid, extractorname, token)
96+
result = v2files.submit_extraction(connector, host, key, fileid, extractorname)
9797
else:
9898
result = v1files.submit_extraction(connector, host, key, fileid, extractorname)
9999
return result.json()
@@ -151,7 +151,7 @@ def submit_extractions_by_collection(connector, host, key, collectionid, extract
151151
submit_extractions_by_collection(connector, host, key, coll['id'], extractorname, ext, recursive)
152152

153153

154-
def upload_metadata(connector, host, key, fileid, metadata, token=None):
154+
def upload_metadata(connector, host, key, fileid, metadata):
155155
"""Upload file JSON-LD metadata to Clowder.
156156
157157
Keyword arguments:
@@ -265,7 +265,7 @@ def upload_thumbnail(connector, host, key, fileid, thumbnail):
265265
return thumbnailid
266266

267267

268-
def upload_to_dataset(connector, host, key, datasetid, filepath, check_duplicate=False, token=None):
268+
def upload_to_dataset(connector, host, key, datasetid, filepath, check_duplicate=False):
269269
"""Upload file to existing Clowder dataset.
270270
271271
Keyword arguments:
@@ -278,7 +278,7 @@ def upload_to_dataset(connector, host, key, datasetid, filepath, check_duplicate
278278
"""
279279

280280
if clowder_version >= 2.0:
281-
v2files.upload_to_dataset(connector, host, key, datasetid, filepath, check_duplicate, token=None)
281+
v2files.upload_to_dataset(connector, host, key, datasetid, filepath, check_duplicate)
282282
else:
283283
logger = logging.getLogger(__name__)
284284

@@ -311,7 +311,7 @@ def upload_to_dataset(connector, host, key, datasetid, filepath, check_duplicate
311311
logger.error("unable to upload file %s (not found)", filepath)
312312

313313

314-
def _upload_to_dataset_local(connector, host, key, datasetid, filepath, token=None):
314+
def _upload_to_dataset_local(connector, host, key, datasetid, filepath):
315315
"""Upload file POINTER to existing Clowder dataset. Does not copy actual file bytes.
316316
317317
Keyword arguments:
@@ -323,7 +323,7 @@ def _upload_to_dataset_local(connector, host, key, datasetid, filepath, token=No
323323
"""
324324

325325
if clowder_version >= 2.0:
326-
uploadedfileid = v2files._upload_to_dataset_local(connector, host, key, datasetid, filepath, token)
326+
uploadedfileid = v2files._upload_to_dataset_local(connector, host, key, datasetid, filepath)
327327
else:
328328
uploadedfileid = v1files._upload_to_dataset_local(connector, host, key, datasetid, filepath)
329329
return uploadedfileid

0 commit comments

Comments
 (0)