Skip to content

Commit a731c68

Browse files
committed
somehow some v2 authorization and routes got in here, this fixes those
1 parent 1eb29b5 commit a731c68

File tree

1 file changed

+12
-25
lines changed

1 file changed

+12
-25
lines changed

pyclowder/api/v1/datasets.py

Lines changed: 12 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -4,8 +4,6 @@
44
import tempfile
55

66
import requests
7-
import pyclowder.api.v2.datasets as v2datasets
8-
import pyclowder.api.v1.datasets as v1datasets
97
from pyclowder.client import ClowderClient
108
from pyclowder.collections import get_datasets, get_child_collections, delete as delete_collection
119
from pyclowder.utils import StatusMessage
@@ -63,9 +61,7 @@ def delete(connector, client, datasetid):
6361
client -- ClowderClient containing authentication credentials
6462
datasetid -- the dataset to delete
6563
"""
66-
headers = {"Authorization": "Bearer " + client.key}
67-
68-
url = "%s/api/v2/datasets/%s" % (client.host, datasetid)
64+
url = "%s/api/datasets/%s?key=%s" % (client.host, datasetid, client.key)
6965

7066
result = requests.delete(url, verify=connector.ssl_verify if connector else True)
7167
result.raise_for_status()
@@ -130,7 +126,7 @@ def download_metadata(connector, client, datasetid, extractor=None):
130126
headers = {"Authorization": "Bearer " + client.key}
131127

132128
filterstring = "" if extractor is None else "&extractor=%s" % extractor
133-
url = '%s/api/v2/datasets/%s/metadata' % (client.host, datasetid)
129+
url = '%s/api/datasets/%s/metadata?key=%s' % (client.host, datasetid, client.key)
134130

135131
# fetch data
136132
result = requests.get(url, stream=True, headers=headers,
@@ -147,12 +143,10 @@ def get_info(connector, client, datasetid):
147143
client -- ClowderClient containing authentication credentials
148144
datasetid -- the dataset to get info of
149145
"""
150-
headers = {"Authorization": "Bearer " + client.key}
151146

152-
url = "%s/api/v2/datasets/%s" % (client.host, datasetid)
147+
url = "%s/api/datasets/%s?key=%s" % (client.host, datasetid, client.key)
153148

154-
result = requests.get(url, headers=headers,
155-
verify=connector.ssl_verify if connector else True)
149+
result = requests.get(url, verify=connector.ssl_verify if connector else True)
156150
result.raise_for_status()
157151

158152
return json.loads(result.text)
@@ -165,11 +159,9 @@ def get_file_list(connector, client, datasetid):
165159
client -- ClowderClient containing authentication credentials
166160
datasetid -- the dataset to get filelist of
167161
"""
168-
headers = {"Authorization": "Bearer " + client.key}
162+
url = "%s/api/datasets/%s/files?key=%s" % (client.host, datasetid, client.key)
169163

170-
url = "%s/api/v2/datasets/%s/files" % (client.host, datasetid)
171-
172-
result = requests.get(url, headers=headers, verify=connector.ssl_verify if connector else True)
164+
result = requests.get(url, verify=connector.ssl_verify if connector else True)
173165
result.raise_for_status()
174166

175167
return json.loads(result.text)
@@ -184,14 +176,11 @@ def remove_metadata(connector, client, datasetid, extractor=None):
184176
extractor -- extractor name to filter deletion
185177
!!! ALL JSON-LD METADATA WILL BE REMOVED IF NO extractor PROVIDED !!!
186178
"""
187-
headers = {"Authorization": "Bearer " + client.key}
188-
189179
filterstring = "" if extractor is None else "&extractor=%s" % extractor
190-
url = '%s/api/v2/datasets/%s/metadata' % (client.host, datasetid)
180+
url = '%s/api/datasets/%s/metadata?key=%s' % (client.host, datasetid, client.key)
191181

192182
# fetch data
193-
result = requests.delete(url, stream=True, headers=headers,
194-
verify=connector.ssl_verify if connector else True)
183+
result = requests.delete(url, stream=True, verify=connector.ssl_verify if connector else True)
195184
result.raise_for_status()
196185

197186
def submit_extraction(connector, client, datasetid, extractorname):
@@ -203,10 +192,9 @@ def submit_extraction(connector, client, datasetid, extractorname):
203192
datasetid -- the dataset UUID to submit
204193
extractorname -- registered name of extractor to trigger
205194
"""
206-
headers = {'Content-Type': 'application/json',
207-
"Authorization": "Bearer " + client.key}
195+
headers = {'Content-Type': 'application/json'}
208196

209-
url = "%s/api/v2/datasets/%s/extractions?key=%s" % (client.host, datasetid)
197+
url = "%s/api/v2/datasets/%s/extractions?key=%s" % (client.host, datasetid, client.key)
210198

211199
result = requests.post(url,
212200
headers=headers,
@@ -266,11 +254,10 @@ def upload_metadata(connector, client, datasetid, metadata):
266254
datasetid -- the dataset that is currently being processed
267255
metadata -- the metadata to be uploaded
268256
"""
269-
headers = {'Content-Type': 'application/json',
270-
"Authorization": "Bearer " + client.key}
257+
headers = {'Content-Type': 'application/json'}
271258
connector.message_process({"type": "dataset", "id": datasetid}, "Uploading dataset metadata.")
272259

273-
url = '%s/api/v2/datasets/%s/metadata' % (client.host, datasetid)
260+
url = '%s/api/v2/datasets/%s/metadata?key=%s' % (client.host, datasetid, client.key)
274261
result = requests.post(url, headers=headers, data=json.dumps(metadata),
275262
verify=connector.ssl_verify if connector else True)
276263
result.raise_for_status()

0 commit comments

Comments
 (0)