Skip to content

Commit 03d7527

Browse files
committed
for v2, 'token' is replaced with secretKey in v2, so only the api endpoints have to change, as the headers
1 parent 8243fef commit 03d7527

File tree

3 files changed

+36
-74
lines changed

3 files changed

+36
-74
lines changed

pyclowder/api/v2/datasets.py

Lines changed: 21 additions & 51 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515
from pyclowder.utils import StatusMessage
1616

1717

18-
def create_empty(connector, host, key, datasetname, description, parentid=None, spaceid=None, token=None):
18+
def create_empty(connector, host, key, datasetname, description, parentid=None, spaceid=None):
1919
"""Create a new dataset in Clowder.
2020
2121
Keyword arguments:
@@ -31,12 +31,8 @@ def create_empty(connector, host, key, datasetname, description, parentid=None,
3131
logger = logging.getLogger(__name__)
3232

3333
url = '%sapi/v2/datasets' % (host)
34-
if token:
35-
headers = {"Content-Type": "application/json",
36-
"Authorization": "Bearer " + token}
37-
else:
38-
headers = {"Content-Type": "application/json",
39-
"Authorization": "Bearer " + key}
34+
headers = {"Content-Type": "application/json",
35+
"Authorization": "Bearer " + key}
4036
result = requests.post(url, headers=headers,
4137
data=json.dumps({"name": datasetname, "description": description}),
4238
verify=connector.ssl_verify if connector else True)
@@ -49,7 +45,7 @@ def create_empty(connector, host, key, datasetname, description, parentid=None,
4945
return datasetid
5046

5147

52-
def delete(connector, host, key, datasetid, token=None):
48+
def delete(connector, host, key, datasetid):
5349
"""Delete dataset from Clowder.
5450
5551
Keyword arguments:
@@ -58,10 +54,7 @@ def delete(connector, host, key, datasetid, token=None):
5854
key -- the secret key to login to clowder
5955
datasetid -- the dataset to delete
6056
"""
61-
if token:
62-
headers = {"Authorization": "Bearer " + token}
63-
else:
64-
headers = {"Authorization": "Bearer " + key}
57+
headers = {"Authorization": "Bearer " + key}
6558

6659
url = "%sapi/v2/datasets/%s" % (host, datasetid)
6760

@@ -96,7 +89,7 @@ def delete_by_collection(connector, host, key, collectionid, recursive=True, del
9689
delete_collection(connector, host, key, collectionid)
9790

9891

99-
def download(connector, host, key, datasetid, token=None):
92+
def download(connector, host, key, datasetid):
10093
"""Download dataset to be processed from Clowder as zip file.
10194
10295
Keyword arguments:
@@ -108,10 +101,7 @@ def download(connector, host, key, datasetid, token=None):
108101

109102
connector.message_process({"type": "dataset", "id": datasetid}, "Downloading dataset.")
110103

111-
if token:
112-
headers = {"Authorization": "Bearer " + token}
113-
else:
114-
headers = {"Authorization": "Bearer " + key}
104+
headers = {"Authorization": "Bearer " + key}
115105
# fetch dataset zipfile
116106
url = '%sapi/v2/datasets/%s/download' % (host, datasetid)
117107
result = requests.get(url, stream=True, headers=headers,
@@ -126,7 +116,7 @@ def download(connector, host, key, datasetid, token=None):
126116
return zipfile
127117

128118

129-
def download_metadata(connector, host, key, datasetid, extractor=None, token=None):
119+
def download_metadata(connector, host, key, datasetid, extractor=None):
130120
"""Download dataset JSON-LD metadata from Clowder.
131121
132122
Keyword arguments:
@@ -136,10 +126,7 @@ def download_metadata(connector, host, key, datasetid, extractor=None, token=Non
136126
datasetid -- the dataset to fetch metadata of
137127
extractor -- extractor name to filter results (if only one extractor's metadata is desired)
138128
"""
139-
if token:
140-
headers = {"Authorization": "Bearer " + token}
141-
else:
142-
headers = {"Authorization": "Bearer " + key}
129+
headers = {"Authorization": "Bearer " + key}
143130

144131
filterstring = "" if extractor is None else "&extractor=%s" % extractor
145132
url = '%sapi/v2/datasets/%s/metadata' % (host, datasetid)
@@ -152,7 +139,7 @@ def download_metadata(connector, host, key, datasetid, extractor=None, token=Non
152139
return result.json()
153140

154141

155-
def get_info(connector, host, key, datasetid, token=None):
142+
def get_info(connector, host, key, datasetid):
156143
"""Get basic dataset information from UUID.
157144
158145
Keyword arguments:
@@ -161,10 +148,7 @@ def get_info(connector, host, key, datasetid, token=None):
161148
key -- the secret key to login to clowder
162149
datasetid -- the dataset to get info of
163150
"""
164-
if token:
165-
headers = {"Authorization": "Bearer " + token}
166-
else:
167-
headers = {"Authorization": "Bearer " + key}
151+
headers = {"Authorization": "Bearer " + key}
168152

169153
url = "%sapi/v2/datasets/%s" % (host, datasetid)
170154

@@ -175,7 +159,7 @@ def get_info(connector, host, key, datasetid, token=None):
175159
return json.loads(result.text)
176160

177161

178-
def get_file_list(connector, host, key, datasetid, token=None):
162+
def get_file_list(connector, host, key, datasetid):
179163
"""Get list of files in a dataset as JSON object.
180164
181165
Keyword arguments:
@@ -184,10 +168,7 @@ def get_file_list(connector, host, key, datasetid, token=None):
184168
key -- the secret key to login to clowder
185169
datasetid -- the dataset to get filelist of
186170
"""
187-
if token:
188-
headers = {"Authorization": "Bearer " + token}
189-
else:
190-
headers = {"Authorization": "Bearer " + key}
171+
headers = {"Authorization": "Bearer " + key}
191172

192173
url = "%sapi/v2/datasets/%s/files" % (host, datasetid)
193174

@@ -197,7 +178,7 @@ def get_file_list(connector, host, key, datasetid, token=None):
197178
return json.loads(result.text)
198179

199180

200-
def remove_metadata(connector, host, key, datasetid, extractor=None, token=None):
181+
def remove_metadata(connector, host, key, datasetid, extractor=None):
201182
"""Delete dataset JSON-LD metadata from Clowder.
202183
203184
Keyword arguments:
@@ -208,10 +189,7 @@ def remove_metadata(connector, host, key, datasetid, extractor=None, token=None)
208189
extractor -- extractor name to filter deletion
209190
!!! ALL JSON-LD METADATA WILL BE REMOVED IF NO extractor PROVIDED !!!
210191
"""
211-
if token:
212-
headers = {"Authorization": "Bearer " + token}
213-
else:
214-
headers = {"Authorization": "Bearer " + key}
192+
headers = {"Authorization": "Bearer " + key}
215193

216194
filterstring = "" if extractor is None else "&extractor=%s" % extractor
217195
url = '%sapi/v2/datasets/%s/metadata' % (host, datasetid)
@@ -222,7 +200,7 @@ def remove_metadata(connector, host, key, datasetid, extractor=None, token=None)
222200
result.raise_for_status()
223201

224202

225-
def submit_extraction(connector, host, key, datasetid, extractorname, token=None):
203+
def submit_extraction(connector, host, key, datasetid, extractorname):
226204
"""Submit dataset for extraction by given extractor.
227205
228206
Keyword arguments:
@@ -232,12 +210,8 @@ def submit_extraction(connector, host, key, datasetid, extractorname, token=None
232210
datasetid -- the dataset UUID to submit
233211
extractorname -- registered name of extractor to trigger
234212
"""
235-
if token:
236-
headers = {'Content-Type': 'application/json',
237-
"Authorization": "Bearer " + token}
238-
else:
239-
headers = {'Content-Type': 'application/json',
240-
"Authorization": "Bearer " + key}
213+
headers = {'Content-Type': 'application/json',
214+
"Authorization": "Bearer " + key}
241215

242216
url = "%sapi/v2/datasets/%s/extractions?key=%s" % (host, datasetid)
243217

@@ -297,7 +271,7 @@ def upload_tags(connector, host, key, datasetid, tags):
297271
verify=connector.ssl_verify if connector else True)
298272

299273

300-
def upload_metadata(connector, host, key, datasetid, metadata, token=None):
274+
def upload_metadata(connector, host, key, datasetid, metadata):
301275
"""Upload dataset JSON-LD metadata to Clowder.
302276
303277
Keyword arguments:
@@ -307,12 +281,8 @@ def upload_metadata(connector, host, key, datasetid, metadata, token=None):
307281
datasetid -- the dataset that is currently being processed
308282
metadata -- the metadata to be uploaded
309283
"""
310-
if token:
311-
headers = {'Content-Type': 'application/json',
312-
"Autorization": "Bearer " + token}
313-
else:
314-
headers = {'Content-Type': 'application/json',
315-
"Authorization": "Bearer " + key}
284+
headers = {'Content-Type': 'application/json',
285+
"Authorization": "Bearer " + key}
316286
connector.message_process({"type": "dataset", "id": datasetid}, "Uploading dataset metadata.")
317287

318288

pyclowder/connectors.py

Lines changed: 5 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -184,8 +184,6 @@ def _build_resource(self, body, host, secret_key):
184184
intermediatefileid = body.get('intermediateId', '')
185185
datasetid = body.get('datasetId', '')
186186
filename = body.get('filename', '')
187-
if float(os.getenv('clowder_version')) == 2.0:
188-
token = body.get('token', ' ')
189187

190188
# determine resource type; defaults to file
191189
resource_type = "file"
@@ -214,12 +212,8 @@ def _build_resource(self, body, host, secret_key):
214212
# determine what to download (if needed) and add relevant data to resource
215213
if resource_type == "dataset":
216214
try:
217-
if float(os.getenv('clowder_version')) == 2.0:
218-
datasetinfo = pyclowder.datasets.get_info(self, host, secret_key, datasetid, token)
219-
filelist = pyclowder.datasets.get_file_list(self, host, secret_key, datasetid, token)
220-
else:
221-
datasetinfo = pyclowder.datasets.get_info(self, host, secret_key, datasetid)
222-
filelist = pyclowder.datasets.get_file_list(self, host, secret_key, datasetid)
215+
datasetinfo = pyclowder.datasets.get_info(self, host, secret_key, datasetid)
216+
filelist = pyclowder.datasets.get_file_list(self, host, secret_key, datasetid)
223217
triggering_file = None
224218
for f in filelist:
225219
if f['id'] == fileid:
@@ -255,7 +249,6 @@ def _build_resource(self, body, host, secret_key):
255249
"intermediate_id": intermediatefileid,
256250
"name": filename,
257251
"file_ext": ext,
258-
"token": token,
259252
"parent": {"type": "dataset",
260253
"id": datasetid}
261254
}
@@ -427,7 +420,7 @@ def _process_message(self, body):
427420
if url not in Connector.registered_clowder:
428421
Connector.registered_clowder.append(url)
429422
if clowder_version >= 2.0:
430-
self.register_extractor("%s" % (url), token=secret_key)
423+
self.register_extractor("%s" % (url,secret_key))
431424
else:
432425
self.register_extractor("%s?key=%s" % (url, secret_key))
433426

@@ -540,7 +533,7 @@ def _process_message(self, body):
540533
else:
541534
self.message_error(resource, message)
542535

543-
def register_extractor(self, endpoints, token=None):
536+
def register_extractor(self, endpoints):
544537
"""Register extractor info with Clowder.
545538
546539
This assumes a file called extractor_info.json to be located in either the
@@ -552,8 +545,7 @@ def register_extractor(self, endpoints, token=None):
552545

553546
logger = logging.getLogger(__name__)
554547

555-
headers = {'Content-Type': 'application/json',
556-
'Authorization': 'Bearer ' + token}
548+
headers = {'Content-Type': 'application/json'}
557549
data = self.extractor_info
558550

559551
for url in endpoints.split(','):

pyclowder/datasets.py

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -115,7 +115,7 @@ def download_metadata(connector, host, key, datasetid, extractor=None, token=Non
115115
return result_json
116116

117117

118-
def get_info(connector, host, key, datasetid, token=None):
118+
def get_info(connector, host, key, datasetid):
119119
"""Get basic dataset information from UUID.
120120
121121
Keyword arguments:
@@ -125,13 +125,13 @@ def get_info(connector, host, key, datasetid, token=None):
125125
datasetid -- the dataset to get info of
126126
"""
127127
if clowder_version >= 2.0:
128-
info = v2datasets.get_info(connector, host, key, datasetid, token)
128+
info = v2datasets.get_info(connector, host, key, datasetid)
129129
else:
130130
info = v1datasets.get_info(connector, host, key, datasetid)
131131
return info
132132

133133

134-
def get_file_list(connector, host, key, datasetid, token=None):
134+
def get_file_list(connector, host, key, datasetid):
135135
"""Get list of files in a dataset as JSON object.
136136
137137
Keyword arguments:
@@ -141,13 +141,13 @@ def get_file_list(connector, host, key, datasetid, token=None):
141141
datasetid -- the dataset to get filelist of
142142
"""
143143
if clowder_version >= 2.0:
144-
file_list = v2datasets.get_file_list(connector, host, key, datasetid, token)
144+
file_list = v2datasets.get_file_list(connector, host, key, datasetid)
145145
else:
146146
file_list = v1datasets.get_file_list(connector, host, key, datasetid)
147147
return file_list
148148

149149

150-
def remove_metadata(connector, host, key, datasetid, extractor=None, token=None):
150+
def remove_metadata(connector, host, key, datasetid, extractor=None):
151151
"""Delete dataset JSON-LD metadata from Clowder.
152152
153153
Keyword arguments:
@@ -159,12 +159,12 @@ def remove_metadata(connector, host, key, datasetid, extractor=None, token=None)
159159
!!! ALL JSON-LD METADATA WILL BE REMOVED IF NO extractor PROVIDED !!!
160160
"""
161161
if clowder_version >= 2.0:
162-
v2datasets.remove_metadata(connector, host, key, datasetid, extractor, token)
162+
v2datasets.remove_metadata(connector, host, key, datasetid, extractor)
163163
else:
164164
v1datasets.remove_metadata(connector, host, key, datasetid, extractor)
165165

166166

167-
def submit_extraction(connector, host, key, datasetid, extractorname, token=None):
167+
def submit_extraction(connector, host, key, datasetid, extractorname):
168168
"""Submit dataset for extraction by given extractor.
169169
170170
Keyword arguments:
@@ -175,7 +175,7 @@ def submit_extraction(connector, host, key, datasetid, extractorname, token=None
175175
extractorname -- registered name of extractor to trigger
176176
"""
177177
if clowder_version >= 2.0:
178-
result_status_code = v2datasets.submit_extraction(connector, host, key, datasetid, extractorname, token)
178+
result_status_code = v2datasets.submit_extraction(connector, host, key, datasetid, extractorname)
179179
else:
180180
result_status_code = v1datasets.submit_extraction(connector, host, key, datasetid, extractorname)
181181

@@ -225,7 +225,7 @@ def upload_tags(connector, host, key, datasetid, tags):
225225
verify=connector.ssl_verify if connector else True)
226226

227227

228-
def upload_metadata(connector, host, key, datasetid, metadata, token=None):
228+
def upload_metadata(connector, host, key, datasetid, metadata):
229229
"""Upload dataset JSON-LD metadata to Clowder.
230230
231231
Keyword arguments:
@@ -236,7 +236,7 @@ def upload_metadata(connector, host, key, datasetid, metadata, token=None):
236236
metadata -- the metadata to be uploaded
237237
"""
238238
if clowder_version >= 2.0:
239-
v2datasets.upload_metadata(connector, host, key, datasetid, metadata, token)
239+
v2datasets.upload_metadata(connector, host, key, datasetid, metadata)
240240
else:
241241
v1datasets.upload_metadata(connector, host, key, datasetid, metadata)
242242

0 commit comments

Comments
 (0)