Skip to content

Commit ac11358

Browse files
authored
Merge pull request #69 from clowder-framework/support-v2-API-key
Change v2 endpoints to API Keys instead of Bearer tokens
2 parents 0e33cea + 4ec3067 commit ac11358

File tree

5 files changed

+24
-24
lines changed

5 files changed

+24
-24
lines changed

CHANGELOG.md

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,11 @@ All notable changes to this project will be documented in this file.
44
The format is based on [Keep a Changelog](http://keepachangelog.com/)
55
and this project adheres to [Semantic Versioning](http://semver.org/).
66

7+
## 3.0.1 - 2023-05-25
8+
9+
### Changed
10+
This version updates Clowder 2 functionality to use API Key headers instead of Bearer tokens.
11+
712
## 3.0.0 - 2022-12-16
813
This version adds Clowder 2 support and removes the old method of extractor registration in favor of reliance on heartbeats.
914

pyclowder/api/v1/datasets.py

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -123,13 +123,11 @@ def download_metadata(connector, client, datasetid, extractor=None):
123123
datasetid -- the dataset to fetch metadata of
124124
extractor -- extractor name to filter results (if only one extractor's metadata is desired)
125125
"""
126-
headers = {"Authorization": "Bearer " + client.key}
127-
128126
filterstring = "" if extractor is None else "&extractor=%s" % extractor
129-
url = '%s/api/datasets/%s/metadata?key=%s' % (client.host, datasetid, client.key)
127+
url = '%s/api/datasets/%s/metadata?key=%s' % (client.host, datasetid, client.key + filterstring)
130128

131129
# fetch data
132-
result = requests.get(url, stream=True, headers=headers,
130+
result = requests.get(url, stream=True,
133131
verify=connector.ssl_verify if connector else True)
134132
result.raise_for_status()
135133

pyclowder/api/v2/datasets.py

Lines changed: 9 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,7 @@ def create_empty(connector, client, datasetname, description, parentid=None, spa
3131

3232
url = '%s/api/v2/datasets' % client.host
3333
headers = {"Content-Type": "application/json",
34-
"Authorization": "Bearer " + client.key}
34+
"X-API-KEY": client.key}
3535
result = requests.post(url, headers=headers,
3636
data=json.dumps({"name": datasetname, "description": description}),
3737
verify=connector.ssl_verify if connector else True)
@@ -52,8 +52,7 @@ def delete(connector, client , datasetid):
5252
client -- ClowderClient containing authentication credentials
5353
datasetid -- the dataset to delete
5454
"""
55-
headers = {"Authorization": "Bearer " + client.key}
56-
55+
headers = {"X-API-KEY": client.key}
5756
url = "%s/api/v2/datasets/%s" % (client.host, datasetid)
5857

5958
result = requests.delete(url, headers=headers, verify=connector.ssl_verify if connector else True)
@@ -97,7 +96,7 @@ def download(connector, client, datasetid):
9796

9897
connector.message_process({"type": "dataset", "id": datasetid}, "Downloading dataset.")
9998

100-
headers = {"Authorization": "Bearer " + client.key}
99+
headers = {"X-API-KEY": client.key}
101100
# fetch dataset zipfile
102101
url = '%s/api/v2/datasets/%s/download' % (client.host, datasetid)
103102
result = requests.get(url, stream=True, headers=headers,
@@ -121,7 +120,7 @@ def download_metadata(connector, client, datasetid, extractor=None):
121120
datasetid -- the dataset to fetch metadata of
122121
extractor -- extractor name to filter results (if only one extractor's metadata is desired)
123122
"""
124-
headers = {"Authorization": "Bearer " + client.key}
123+
headers = {"X-API-KEY": client.key}
125124

126125
filterstring = "" if extractor is None else "&extractor=%s" % extractor
127126
url = '%s/api/v2/datasets/%s/metadata' % (client.host, datasetid)
@@ -142,7 +141,7 @@ def get_info(connector, client, datasetid):
142141
client -- ClowderClient containing authentication credentials
143142
datasetid -- the dataset to get info of
144143
"""
145-
headers = {"Authorization": "Bearer " + client.key}
144+
headers = {"X-API-KEY": client.key}
146145

147146
url = "%s/api/v2/datasets/%s" % (client.host, datasetid)
148147

@@ -161,7 +160,7 @@ def get_file_list(connector, client, datasetid):
161160
client -- ClowderClient containing authentication credentials
162161
datasetid -- the dataset to get filelist of
163162
"""
164-
headers = {"Authorization": "Bearer " + client.key}
163+
headers = {"X-API-KEY": client.key}
165164

166165
url = "%s/api/v2/datasets/%s/files" % (client.host, datasetid)
167166

@@ -181,7 +180,7 @@ def remove_metadata(connector, client, datasetid, extractor=None):
181180
extractor -- extractor name to filter deletion
182181
!!! ALL JSON-LD METADATA WILL BE REMOVED IF NO extractor PROVIDED !!!
183182
"""
184-
headers = {"Authorization": "Bearer " + client.key}
183+
headers = {"X-API-KEY": client.key}
185184

186185
filterstring = "" if extractor is None else "&extractor=%s" % extractor
187186
url = '%s/api/v2/datasets/%s/metadata' % (client.host, datasetid)
@@ -202,7 +201,7 @@ def submit_extraction(connector, client, datasetid, extractorname):
202201
extractorname -- registered name of extractor to trigger
203202
"""
204203
headers = {'Content-Type': 'application/json',
205-
"Authorization": "Bearer " + client.key}
204+
"X-API-KEY": client.key}
206205

207206
url = "%s/api/v2/datasets/%s/extractions?key=%s" % (client.host, datasetid)
208207

@@ -225,7 +224,7 @@ def upload_metadata(connector, client, datasetid, metadata):
225224
metadata -- the metadata to be uploaded
226225
"""
227226
headers = {'Content-Type': 'application/json',
228-
"Authorization": "Bearer " + client.key}
227+
"X-API-KEY": client.key}
229228
connector.message_process({"type": "dataset", "id": datasetid}, "Uploading dataset metadata.")
230229

231230

pyclowder/api/v2/files.py

Lines changed: 7 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -65,7 +65,7 @@ def download(connector, client, fileid, intermediatefileid=None, ext=""):
6565
intermediatefileid = fileid
6666

6767
url = '%s/api/v2/files/%s' % (client.host, intermediatefileid)
68-
headers = {"Authorization": "Bearer " + client.key}
68+
headers = {"X-API-KEY": client.key}
6969
result = connector.get(url, stream=True, verify=connector.ssl_verify if connector else True, headers=headers)
7070

7171
(inputfile, inputfilename) = tempfile.mkstemp(suffix=ext)
@@ -90,7 +90,7 @@ def download_info(connector, client, fileid):
9090
"""
9191

9292
url = '%s/api/v2/files/%s/metadata' % (client.host, fileid)
93-
headers = {"Authorization": "Bearer " + client.key}
93+
headers = {"X-API-KEY": client.key}
9494
# fetch data
9595
result = connector.get(url, stream=True, verify=connector.ssl_verify if connector else True, headers=headers)
9696

@@ -109,7 +109,7 @@ def download_metadata(connector,client, fileid, extractor=None):
109109

110110
filterstring = "" if extractor is None else "?extractor=%s" % extractor
111111
url = '%s/api/v2/files/%s/metadata?%s' % (client.host, fileid, filterstring)
112-
headers = {"Authorization": "Bearer " + client.key}
112+
headers = {"X-API-KEY": client.key}
113113

114114
# fetch data
115115
result = connector.get(url, stream=True, verify=connector.ssl_verify if connector else True, headers=headers)
@@ -130,7 +130,7 @@ def submit_extraction(connector, client, fileid, extractorname):
130130
url = "%s/api/v2/files/%s/extractions?key=%s" % (client.host, fileid, client.key)
131131
result = connector.post(url,
132132
headers={'Content-Type': 'application/json',
133-
"Authorization": "Bearer " + client.key},
133+
"X-API-KEY": client.key},
134134
data=json.dumps({"extractor": extractorname}),
135135
verify=connector.ssl_verify if connector else True)
136136

@@ -149,9 +149,7 @@ def upload_metadata(connector, client, fileid, metadata):
149149

150150
connector.message_process({"type": "file", "id": fileid}, "Uploading file metadata.")
151151
headers = {'Content-Type': 'application/json',
152-
'Authorization':'Bearer ' + client.key}
153-
print(metadata)
154-
as_json = json.dumps(metadata)
152+
'X-API-KEY': client.key}
155153
url = '%s/api/v2/files/%s/metadata' % (client.host, fileid)
156154
result = connector.post(url, headers=headers, data=json.dumps(metadata),
157155
verify=connector.ssl_verify if connector else True)
@@ -285,7 +283,7 @@ def upload_to_dataset(connector, client, datasetid, filepath, check_duplicate=Fa
285283
m = MultipartEncoder(
286284
fields={'file': (filename, open(filepath, 'rb'))}
287285
)
288-
headers = {"Authorization": "Bearer " + client.key,
286+
headers = {"X-API-KEY": client.key,
289287
'Content-Type': m.content_type}
290288
result = connector.post(url, data=m, headers=headers,
291289
verify=connector.ssl_verify if connector else True)
@@ -322,7 +320,7 @@ def _upload_to_dataset_local(connector, client, datasetid, filepath):
322320
m = MultipartEncoder(
323321
fields={'file': (filename, open(filepath, 'rb'))}
324322
)
325-
headers = {"Authorization": "Bearer " + client.key,
323+
headers = {"X-API-KEY": client.key,
326324
'Content-Type': m.content_type}
327325
result = connector.post(url, data=m, headers=headers,
328326
verify=connector.ssl_verify if connector else True)

setup.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88

99
setup(
1010
name='pyclowder',
11-
version='3.0.0',
11+
version='3.0.1',
1212
description='Python SDK for the Clowder Data Management System',
1313
long_description=long_description,
1414

0 commit comments

Comments
 (0)