Skip to content

Commit 29d3e22

Browse files
committed
need / for api calls
1 parent abd54d5 commit 29d3e22

File tree

6 files changed

+63
-63
lines changed

6 files changed

+63
-63
lines changed

pyclowder/api/v1/datasets.py

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ def create_empty(connector, client, datasetname, description, parentid=None, spa
2424
"""
2525
logger = logging.getLogger(__name__)
2626

27-
url = '%sapi/datasets/createempty?key=%s' % (client.host, client.key)
27+
url = '%s/api/datasets/createempty?key=%s' % (client.host, client.key)
2828

2929
if parentid:
3030
if spaceid:
@@ -65,7 +65,7 @@ def delete(connector, client, datasetid):
6565
"""
6666
headers = {"Authorization": "Bearer " + client.key}
6767

68-
url = "%sapi/v2/datasets/%s" % (client.host, datasetid)
68+
url = "%s/api/v2/datasets/%s" % (client.host, datasetid)
6969

7070
result = requests.delete(url, verify=connector.ssl_verify if connector else True)
7171
result.raise_for_status()
@@ -106,7 +106,7 @@ def download(connector, client, datasetid):
106106
connector.message_process({"type": "dataset", "id": datasetid}, "Downloading dataset.")
107107

108108
# fetch dataset zipfile
109-
url = '%sapi/datasets/%s/download?key=%s' % (client.host, datasetid,client.key)
109+
url = '%s/api/datasets/%s/download?key=%s' % (client.host, datasetid,client.key)
110110
result = requests.get(url, stream=True,
111111
verify=connector.ssl_verify if connector else True)
112112
result.raise_for_status()
@@ -130,7 +130,7 @@ def download_metadata(connector, client, datasetid, extractor=None):
130130
headers = {"Authorization": "Bearer " + client.key}
131131

132132
filterstring = "" if extractor is None else "&extractor=%s" % extractor
133-
url = '%sapi/v2/datasets/%s/metadata' % (client.host, datasetid)
133+
url = '%s/api/v2/datasets/%s/metadata' % (client.host, datasetid)
134134

135135
# fetch data
136136
result = requests.get(url, stream=True, headers=headers,
@@ -149,7 +149,7 @@ def get_info(connector, client, datasetid):
149149
"""
150150
headers = {"Authorization": "Bearer " + client.key}
151151

152-
url = "%sapi/v2/datasets/%s" % (client.host, datasetid)
152+
url = "%s/api/v2/datasets/%s" % (client.host, datasetid)
153153

154154
result = requests.get(url, headers=headers,
155155
verify=connector.ssl_verify if connector else True)
@@ -167,7 +167,7 @@ def get_file_list(connector, client, datasetid):
167167
"""
168168
headers = {"Authorization": "Bearer " + client.key}
169169

170-
url = "%sapi/v2/datasets/%s/files" % (client.host, datasetid)
170+
url = "%s/api/v2/datasets/%s/files" % (client.host, datasetid)
171171

172172
result = requests.get(url, headers=headers, verify=connector.ssl_verify if connector else True)
173173
result.raise_for_status()
@@ -187,7 +187,7 @@ def remove_metadata(connector, client, datasetid, extractor=None):
187187
headers = {"Authorization": "Bearer " + client.key}
188188

189189
filterstring = "" if extractor is None else "&extractor=%s" % extractor
190-
url = '%sapi/v2/datasets/%s/metadata' % (client.host, datasetid)
190+
url = '%s/api/v2/datasets/%s/metadata' % (client.host, datasetid)
191191

192192
# fetch data
193193
result = requests.delete(url, stream=True, headers=headers,
@@ -206,7 +206,7 @@ def submit_extraction(connector, client, datasetid, extractorname):
206206
headers = {'Content-Type': 'application/json',
207207
"Authorization": "Bearer " + client.key}
208208

209-
url = "%sapi/v2/datasets/%s/extractions?key=%s" % (client.host, datasetid)
209+
url = "%s/api/v2/datasets/%s/extractions?key=%s" % (client.host, datasetid)
210210

211211
result = requests.post(url,
212212
headers=headers,
@@ -252,7 +252,7 @@ def upload_tags(connector, client, datasetid, tags):
252252
connector.status_update(StatusMessage.processing, {"type": "dataset", "id": datasetid}, "Uploading dataset tags.")
253253

254254
headers = {'Content-Type': 'application/json'}
255-
url = '%sapi/datasets/%s/tags?key=%s' % (client.host, datasetid, client.key)
255+
url = '%s/api/datasets/%s/tags?key=%s' % (client.host, datasetid, client.key)
256256
result = connector.post(url, headers=headers, data=json.dumps(tags),
257257
verify=connector.ssl_verify if connector else True)
258258

@@ -270,7 +270,7 @@ def upload_metadata(connector, client, datasetid, metadata):
270270
"Authorization": "Bearer " + client.key}
271271
connector.message_process({"type": "dataset", "id": datasetid}, "Uploading dataset metadata.")
272272

273-
url = '%sapi/v2/datasets/%s/metadata' % (client.host, datasetid)
273+
url = '%s/api/v2/datasets/%s/metadata' % (client.host, datasetid)
274274
result = requests.post(url, headers=headers, data=json.dumps(metadata),
275275
verify=connector.ssl_verify if connector else True)
276276
result.raise_for_status()

pyclowder/api/v1/files.py

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,7 @@ def download(connector, client, fileid, intermediatefileid=None, ext=""):
4242
if not intermediatefileid:
4343
intermediatefileid = fileid
4444

45-
url = '%sapi/files/%s?key=%s' % (client.host, intermediatefileid, client.key)
45+
url = '%s/api/files/%s?key=%s' % (client.host, intermediatefileid, client.key)
4646
result = connector.get(url, stream=True, verify=connector.ssl_verify if connector else True)
4747

4848
(inputfile, inputfilename) = tempfile.mkstemp(suffix=ext)
@@ -66,7 +66,7 @@ def download_info(connector, client, fileid):
6666
fileid -- the file to fetch metadata of
6767
"""
6868

69-
url = '%sapi/files/%s/metadata?key=%s' % (client.host, fileid, client.key)
69+
url = '%s/api/files/%s/metadata?key=%s' % (client.host, fileid, client.key)
7070

7171
# fetch data
7272
result = connector.get(url, stream=True, verify=connector.ssl_verify if connector else True)
@@ -85,7 +85,7 @@ def download_metadata(connector, client, fileid, extractor=None):
8585
"""
8686

8787
filterstring = "" if extractor is None else "&extractor=%s" % extractor
88-
url = '%sapi/files/%s/metadata.jsonld?key=%s%s' % (client.host, fileid, client.key, filterstring)
88+
url = '%s/api/files/%s/metadata.jsonld?key=%s%s' % (client.host, fileid, client.key, filterstring)
8989

9090
# fetch data
9191
result = connector.get(url, stream=True, verify=connector.ssl_verify if connector else True)
@@ -103,7 +103,7 @@ def submit_extraction(connector, client, fileid, extractorname):
103103
extractorname -- registered name of extractor to trigger
104104
"""
105105

106-
url = "%sapi/files/%s/extractions?key=%s" % (client.host, fileid, client.key)
106+
url = "%s/api/files/%s/extractions?key=%s" % (client.host, fileid, client.key)
107107

108108
result = connector.post(url,
109109
headers={'Content-Type': 'application/json'},
@@ -176,7 +176,7 @@ def upload_metadata(connector, client, fileid, metadata):
176176
connector.message_process({"type": "file", "id": fileid}, "Uploading file metadata.")
177177

178178
headers = {'Content-Type': 'application/json'}
179-
url = '%sapi/files/%s/metadata.jsonld?key=%s' % (client.host, fileid, client.key)
179+
url = '%s/api/files/%s/metadata.jsonld?key=%s' % (client.host, fileid, client.key)
180180
result = connector.post(url, headers=headers, data=json.dumps(metadata),
181181
verify=connector.ssl_verify if connector else True)
182182

@@ -202,7 +202,7 @@ def upload_preview(connector, client, fileid, previewfile, previewmetadata=None,
202202
headers = {'Content-Type': 'application/json'}
203203

204204
# upload preview
205-
url = '%sapi/previews?key=%s' % (client.host, client.key)
205+
url = '%s/api/previews?key=%s' % (client.host, client.key)
206206
with open(previewfile, 'rb') as filebytes:
207207
# If a custom preview file MIME type is provided, use it to generate the preview file object.
208208
if preview_mimetype is not None:
@@ -216,13 +216,13 @@ def upload_preview(connector, client, fileid, previewfile, previewmetadata=None,
216216

217217
# associate uploaded preview with orginal file
218218
if fileid and not (previewmetadata and 'section_id' in previewmetadata and previewmetadata['section_id']):
219-
url = '%sapi/files/%s/previews/%s?key=%s' % (client.host, fileid, previewid, client.key)
219+
url = '%s/api/files/%s/previews/%s?key=%s' % (client.host, fileid, previewid, client.key)
220220
result = connector.post(url, headers=headers, data=json.dumps({}),
221221
verify=connector.ssl_verify if connector else True)
222222

223223
# associate metadata with preview
224224
if previewmetadata is not None:
225-
url = '%sapi/previews/%s/metadata?key=%s' % (client.host, previewid, client.key)
225+
url = '%s/api/previews/%s/metadata?key=%s' % (client.host, previewid, client.key)
226226
result = connector.post(url, headers=headers, data=json.dumps(previewmetadata),
227227
verify=connector.ssl_verify if connector else True)
228228

@@ -242,7 +242,7 @@ def upload_tags(connector, client, fileid, tags):
242242
connector.message_process({"type": "file", "id": fileid}, "Uploading file tags.")
243243

244244
headers = {'Content-Type': 'application/json'}
245-
url = '%sapi/files/%s/tags?key=%s' % (client.host, fileid, client.key)
245+
url = '%s/api/files/%s/tags?key=%s' % (client.host, fileid, client.key)
246246
result = connector.post(url, headers=headers, data=json.dumps(tags),
247247
verify=connector.ssl_verify if connector else True)
248248

@@ -299,7 +299,7 @@ def upload_to_dataset(connector, client, datasetid, filepath, check_duplicate=Fa
299299
if filepath.startswith(connector.mounted_paths[source_path]):
300300
return _upload_to_dataset_local(connector, client.host, client.key, datasetid, filepath)
301301

302-
url = '%sapi/uploadToDataset/%s?key=%s' % (client.host, datasetid, client.key)
302+
url = '%s/api/uploadToDataset/%s?key=%s' % (client.host, datasetid, client.key)
303303

304304
if os.path.exists(filepath):
305305
filename = os.path.basename(filepath)
@@ -328,7 +328,7 @@ def _upload_to_dataset_local(connector, client, datasetid, filepath):
328328
"""
329329

330330
logger = logging.getLogger(__name__)
331-
url = '%sapi/uploadToDataset/%s?key=%s' % (client.host, datasetid, cliet.key)
331+
url = '%s/api/uploadToDataset/%s?key=%s' % (client.host, datasetid, cliet.key)
332332

333333
if os.path.exists(filepath):
334334
# Replace local path with remote path before uploading

pyclowder/api/v2/datasets.py

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ def create_empty(connector, client, datasetname, description, parentid=None, spa
2929

3030
logger = logging.getLogger(__name__)
3131

32-
url = '%sapi/v2/datasets' % client.host
32+
url = '%s/api/v2/datasets' % client.host
3333
headers = {"Content-Type": "application/json",
3434
"Authorization": "Bearer " + client.key}
3535
result = requests.post(url, headers=headers,
@@ -54,7 +54,7 @@ def delete(connector, client , datasetid):
5454
"""
5555
headers = {"Authorization": "Bearer " + client.key}
5656

57-
url = "%sapi/v2/datasets/%s" % (client.host, datasetid)
57+
url = "%s/api/v2/datasets/%s" % (client.host, datasetid)
5858

5959
result = requests.delete(url, headers=headers, verify=connector.ssl_verify if connector else True)
6060
result.raise_for_status()
@@ -99,7 +99,7 @@ def download(connector, client, datasetid):
9999

100100
headers = {"Authorization": "Bearer " + client.key}
101101
# fetch dataset zipfile
102-
url = '%sapi/v2/datasets/%s/download' % (client.host, datasetid)
102+
url = '%s/api/v2/datasets/%s/download' % (client.host, datasetid)
103103
result = requests.get(url, stream=True, headers=headers,
104104
verify=connector.ssl_verify if connector else True)
105105
result.raise_for_status()
@@ -124,7 +124,7 @@ def download_metadata(connector, client, datasetid, extractor=None):
124124
headers = {"Authorization": "Bearer " + client.key}
125125

126126
filterstring = "" if extractor is None else "&extractor=%s" % extractor
127-
url = '%sapi/v2/datasets/%s/metadata' % (client.host, datasetid)
127+
url = '%s/api/v2/datasets/%s/metadata' % (client.host, datasetid)
128128

129129
# fetch data
130130
result = requests.get(url, stream=True, headers=headers,
@@ -144,7 +144,7 @@ def get_info(connector, client, datasetid):
144144
"""
145145
headers = {"Authorization": "Bearer " + client.key}
146146

147-
url = "%sapi/v2/datasets/%s" % (client.host, datasetid)
147+
url = "%s/api/v2/datasets/%s" % (client.host, datasetid)
148148

149149
result = requests.get(url, headers=headers,
150150
verify=connector.ssl_verify if connector else True)
@@ -163,7 +163,7 @@ def get_file_list(connector, client, datasetid):
163163
"""
164164
headers = {"Authorization": "Bearer " + client.key}
165165

166-
url = "%sapi/v2/datasets/%s/files" % (client.host, datasetid)
166+
url = "%s/api/v2/datasets/%s/files" % (client.host, datasetid)
167167

168168
result = requests.get(url, headers=headers, verify=connector.ssl_verify if connector else True)
169169
result.raise_for_status()
@@ -184,7 +184,7 @@ def remove_metadata(connector, client, datasetid, extractor=None):
184184
headers = {"Authorization": "Bearer " + client.key}
185185

186186
filterstring = "" if extractor is None else "&extractor=%s" % extractor
187-
url = '%sapi/v2/datasets/%s/metadata' % (client.host, datasetid)
187+
url = '%s/api/v2/datasets/%s/metadata' % (client.host, datasetid)
188188

189189
# fetch data
190190
result = requests.delete(url, stream=True, headers=headers,
@@ -204,7 +204,7 @@ def submit_extraction(connector, client, datasetid, extractorname):
204204
headers = {'Content-Type': 'application/json',
205205
"Authorization": "Bearer " + client.key}
206206

207-
url = "%sapi/v2/datasets/%s/extractions?key=%s" % (client.host, datasetid)
207+
url = "%s/api/v2/datasets/%s/extractions?key=%s" % (client.host, datasetid)
208208

209209
result = requests.post(url,
210210
headers=headers,
@@ -229,7 +229,7 @@ def upload_metadata(connector, client, datasetid, metadata):
229229
connector.message_process({"type": "dataset", "id": datasetid}, "Uploading dataset metadata.")
230230

231231

232-
url = '%sapi/v2/datasets/%s/metadata' % (client.host, datasetid)
232+
url = '%s/api/v2/datasets/%s/metadata' % (client.host, datasetid)
233233
result = requests.post(url, headers=headers, data=json.dumps(metadata),
234234
verify=connector.ssl_verify if connector else True)
235235
result.raise_for_status()

pyclowder/api/v2/files.py

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -44,7 +44,7 @@ def download(connector, client, fileid, intermediatefileid=None, ext=""):
4444
if not intermediatefileid:
4545
intermediatefileid = fileid
4646

47-
url = '%sapi/v2/files/%s' % (client.host, intermediatefileid)
47+
url = '%s/api/v2/files/%s' % (client.host, intermediatefileid)
4848
headers = {"Authorization": "Bearer " + client.key}
4949
result = connector.get(url, stream=True, verify=connector.ssl_verify if connector else True, headers=headers)
5050

@@ -69,7 +69,7 @@ def download_info(connector, client, fileid):
6969
fileid -- the file to fetch metadata of
7070
"""
7171

72-
url = '%sapi/v2/files/%s/metadata' % (client.host, fileid)
72+
url = '%s/api/v2/files/%s/metadata' % (client.host, fileid)
7373
headers = {"Authorization": "Bearer " + client.key}
7474
# fetch data
7575
result = connector.get(url, stream=True, verify=connector.ssl_verify if connector else True, headers=headers)
@@ -88,7 +88,7 @@ def download_metadata(connector,client, fileid, extractor=None):
8888
"""
8989

9090
filterstring = "" if extractor is None else "?extractor=%s" % extractor
91-
url = '%sapi/v2/files/%s/metadata?%s' % (client.host, fileid, filterstring)
91+
url = '%s/api/v2/files/%s/metadata?%s' % (client.host, fileid, filterstring)
9292
headers = {"Authorization": "Bearer " + client.key}
9393

9494
# fetch data
@@ -107,7 +107,7 @@ def submit_extraction(connector, client, fileid, extractorname):
107107
extractorname -- registered name of extractor to trigger
108108
"""
109109

110-
url = "%sapi/v2/files/%s/extractions?key=%s" % (client.host, fileid, client.key)
110+
url = "%s/api/v2/files/%s/extractions?key=%s" % (client.host, fileid, client.key)
111111
result = connector.post(url,
112112
headers={'Content-Type': 'application/json',
113113
"Authorization": "Bearer " + client.key},
@@ -132,7 +132,7 @@ def upload_metadata(connector, client, fileid, metadata):
132132
'Authorization':'Bearer ' + client.key}
133133
print(metadata)
134134
as_json = json.dumps(metadata)
135-
url = '%sapi/v2/files/%s/metadata' % (client.host, fileid)
135+
url = '%s/api/v2/files/%s/metadata' % (client.host, fileid)
136136
result = connector.post(url, headers=headers, data=json.dumps(metadata),
137137
verify=connector.ssl_verify if connector else True)
138138

@@ -160,7 +160,7 @@ def upload_preview(connector, client, fileid, previewfile, previewmetadata=None,
160160
headers = {'Content-Type': 'application/json'}
161161

162162
# upload preview
163-
url = '%sapi/previews?key=%s' % (client.host, client.key)
163+
url = '%s/api/previews?key=%s' % (client.host, client.key)
164164
with open(previewfile, 'rb') as filebytes:
165165
# If a custom preview file MIME type is provided, use it to generate the preview file object.
166166
if preview_mimetype is not None:
@@ -174,13 +174,13 @@ def upload_preview(connector, client, fileid, previewfile, previewmetadata=None,
174174

175175
# associate uploaded preview with orginal file
176176
if fileid and not (previewmetadata and 'section_id' in previewmetadata and previewmetadata['section_id']):
177-
url = '%sapi/files/%s/previews/%s?key=%s' % (host, fileid, previewid, key)
177+
url = '%s/api/files/%s/previews/%s?key=%s' % (host, fileid, previewid, key)
178178
result = connector.post(url, headers=headers, data=json.dumps({}),
179179
verify=connector.ssl_verify if connector else True)
180180

181181
# associate metadata with preview
182182
if previewmetadata is not None:
183-
url = '%sapi/previews/%s/metadata?key=%s' % (host, previewid, key)
183+
url = '%s/api/previews/%s/metadata?key=%s' % (host, previewid, key)
184184
result = connector.post(url, headers=headers, data=json.dumps(previewmetadata),
185185
verify=connector.ssl_verify if connector else True)
186186

@@ -200,7 +200,7 @@ def upload_tags(connector, client, fileid, tags):
200200
connector.message_process({"type": "file", "id": fileid}, "Uploading file tags.")
201201

202202
headers = {'Content-Type': 'application/json'}
203-
url = '%sapi/files/%s/tags?key=%s' % (client.host, fileid, client.key)
203+
url = '%s/api/files/%s/tags?key=%s' % (client.host, fileid, client.key)
204204
result = connector.post(url, headers=headers, data=json.dumps(tags),
205205
verify=connector.ssl_verify if connector else True)
206206

@@ -259,7 +259,7 @@ def upload_to_dataset(connector, client, datasetid, filepath, check_duplicate=Fa
259259
if filepath.startswith(connector.mounted_paths[source_path]):
260260
return _upload_to_dataset_local(connector, client.host, client.key, datasetid, filepath)
261261

262-
url = '%sapi/v2/datasets/%s/files' % (client.host, datasetid)
262+
url = '%s/api/v2/datasets/%s/files' % (client.host, datasetid)
263263

264264
if os.path.exists(filepath):
265265
filename = os.path.basename(filepath)
@@ -291,7 +291,7 @@ def _upload_to_dataset_local(connector, host, key, datasetid, filepath):
291291
"""
292292
client = ClowderClient(host, key)
293293
logger = logging.getLogger(__name__)
294-
url = '%sapi/v2/datatsets/%s/files' % (client.host, datasetid)
294+
url = '%s/api/v2/datatsets/%s/files' % (client.host, datasetid)
295295

296296
if os.path.exists(filepath):
297297
# Replace local path with remote path before uploading

0 commit comments

Comments
 (0)