Skip to content

Commit f6f4012

Browse files
committed
fix remaining endpoints
1 parent 6b0a00d commit f6f4012

File tree

5 files changed

+31
-36
lines changed

5 files changed

+31
-36
lines changed

pyclowder/collections.py

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@
66
import json
77
import logging
88
import requests
9-
9+
import posixpath
1010
from pyclowder.client import ClowderClient
1111

1212

@@ -27,26 +27,26 @@ def create_empty(connector, host, key, collectionname, description, parentid=Non
2727

2828
if parentid:
2929
if spaceid:
30-
url = '%sapi/collections/newCollectionWithParent?key=%s' % (host, key)
30+
url = posixpath.join(host, 'api/collections/newCollectionWithParent?key=%s' % key)
3131
result = requests.post(url, headers={"Content-Type": "application/json"},
3232
data=json.dumps({"name": collectionname, "description": description,
3333
"parentId": [parentid], "space": spaceid}),
3434
verify=connector.ssl_verify if connector else True)
3535
else:
36-
url = '%sapi/collections/newCollectionWithParent?key=%s' % (host, key)
36+
url = posixpath.join(host, 'api/collections/newCollectionWithParent?key=%s' % key)
3737
result = requests.post(url, headers={"Content-Type": "application/json"},
3838
data=json.dumps({"name": collectionname, "description": description,
3939
"parentId": [parentid]}),
4040
verify=connector.ssl_verify if connector else True)
4141
else:
4242
if spaceid:
43-
url = '%sapi/collections?key=%s' % (host, key)
43+
url = posixpath.join(host, 'api/collections?key=%s' % key)
4444
result = requests.post(url, headers={"Content-Type": "application/json"},
4545
data=json.dumps({"name": collectionname, "description": description,
4646
"space": spaceid}),
4747
verify=connector.ssl_verify if connector else True)
4848
else:
49-
url = '%sapi/collections?key=%s' % (host, key)
49+
url = posixpath.join(host, 'api/collections?key=%s' % key)
5050
result = requests.post(url, headers={"Content-Type": "application/json"},
5151
data=json.dumps({"name": collectionname, "description": description}),
5252
verify=connector.ssl_verify if connector else True)
@@ -59,7 +59,7 @@ def create_empty(connector, host, key, collectionname, description, parentid=Non
5959

6060

6161
def delete(connector, host, key, collectionid):
62-
url = "%sapi/collections/%s?key=%s" % (host, collectionid, key)
62+
url = posixpath.join(host, "api/collections/%s?key=%s" % (collectionid, key))
6363

6464
result = requests.delete(url, verify=connector.ssl_verify if connector else True)
6565
result.raise_for_status()
@@ -77,7 +77,7 @@ def get_child_collections(connector, host, key, collectionid):
7777
collectionid -- the collection to get children of
7878
"""
7979

80-
url = "%sapi/collections/%s/getChildCollections?key=%s" % (host, collectionid, key)
80+
url = posixpath.join(host, "api/collections/%s/getChildCollections?key=%s" % (collectionid, key))
8181

8282
result = requests.get(url,
8383
verify=connector.ssl_verify if connector else True)
@@ -96,7 +96,7 @@ def get_datasets(connector, host, key, collectionid):
9696
datasetid -- the collection to get datasets of
9797
"""
9898

99-
url = "%sapi/collections/%s/datasets?key=%s" % (host, collectionid, key)
99+
url = posixpath.join(host, "api/collections/%s/datasets?key=%s" % (host, collectionid, key)
100100

101101
result = requests.get(url,
102102
verify=connector.ssl_verify if connector else True)
@@ -126,7 +126,7 @@ def upload_preview(connector, host, key, collectionid, previewfile, previewmetad
126126
headers = {'Content-Type': 'application/json'}
127127

128128
# upload preview
129-
url = '%sapi/previews?key=%s' % (host, key)
129+
url = posixpath.join(host, 'api/previews?key=%s' % key)
130130
with open(previewfile, 'rb') as filebytes:
131131
result = requests.post(url, files={"File": filebytes},
132132
verify=connector.ssl_verify if connector else True)
@@ -136,14 +136,14 @@ def upload_preview(connector, host, key, collectionid, previewfile, previewmetad
136136

137137
# associate uploaded preview with original collection
138138
if collectionid and not (previewmetadata and 'section_id' in previewmetadata and previewmetadata['section_id']):
139-
url = '%sapi/collections/%s/previews/%s?key=%s' % (host, collectionid, previewid, key)
139+
url = posixpath.join(host, 'api/collections/%s/previews/%s?key=%s' % (collectionid, previewid, key))
140140
result = requests.post(url, headers=headers, data=json.dumps({}),
141141
verify=connector.ssl_verify if connector else True)
142142
result.raise_for_status()
143143

144144
# associate metadata with preview
145145
if previewmetadata is not None:
146-
url = '%sapi/previews/%s/metadata?key=%s' % (host, previewid, key)
146+
url = posixpath.join(host, 'api/previews/%s/metadata?key=%s' % (previewid, key))
147147
result = requests.post(url, headers=headers, data=json.dumps(previewmetadata),
148148
verify=connector.ssl_verify if connector else True)
149149
result.raise_for_status()

pyclowder/datasets.py

Lines changed: 2 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -6,12 +6,8 @@
66
import json
77
import logging
88
import os
9-
import tempfile
10-
11-
import requests
9+
import posixpath
1210
from pyclowder.client import ClowderClient
13-
import pyclowder.api.v2.datasets as v2datasets
14-
import pyclowder.api.v1.datasets as v1datasets
1511
from pyclowder.collections import get_datasets, get_child_collections, delete as delete_collection
1612
from pyclowder.utils import StatusMessage
1713

@@ -207,7 +203,7 @@ def upload_tags(connector, host, key, datasetid, tags):
207203
connector.status_update(StatusMessage.processing, {"type": "dataset", "id": datasetid}, "Uploading dataset tags.")
208204

209205
headers = {'Content-Type': 'application/json'}
210-
url = '%sapi/datasets/%s/tags?key=%s' % (client.host, datasetid, client.key)
206+
url = posixpath.join(client.host, 'api/datasets/%s/tags?key=%s' % (datasetid, client.key))
211207
result = connector.post(url, headers=headers, data=json.dumps(tags),
212208
verify=connector.ssl_verify if connector else True)
213209

pyclowder/files.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@
66
import json
77
import logging
88
import os
9-
9+
import posixpath
1010
import requests
1111
from requests_toolbelt.multipart.encoder import MultipartEncoder
1212

@@ -235,7 +235,7 @@ def upload_tags(connector, host, key, fileid, tags):
235235
connector.message_process({"type": "file", "id": fileid}, "Uploading file tags.")
236236

237237
headers = {'Content-Type': 'application/json'}
238-
url = '%sapi/files/%s/tags?key=%s' % (client.host, fileid, client.key)
238+
url = posixpath.join(client.host, 'api/files/%s/tags?key=%s' % (fileid, client.key))
239239
result = connector.post(url, headers=headers, data=json.dumps(tags),
240240
verify=connector.ssl_verify if connector else True)
241241

@@ -284,7 +284,7 @@ def upload_to_dataset(connector, host, key, datasetid, filepath, check_duplicate
284284
if filepath.startswith(connector.mounted_paths[source_path]):
285285
return _upload_to_dataset_local(connector, client.host, client.key, datasetid, filepath)
286286

287-
url = '%sapi/uploadToDataset/%s?key=%s' % (client.host, datasetid, client.key)
287+
url = posixpath.join(client.host, 'api/uploadToDataset/%s?key=%s' % (datasetid, client.key))
288288

289289
if os.path.exists(filepath):
290290
filename = os.path.basename(filepath)

pyclowder/geostreams.py

Lines changed: 11 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55

66
import json
77
import logging
8-
8+
import posixpath
99
import requests
1010

1111

@@ -36,7 +36,7 @@ def create_sensor(connector, host, key, sensorname, geom, type, region):
3636
}
3737
}
3838

39-
url = "%sapi/geostreams/sensors?key=%s" % (host, key)
39+
url = posixpath.join(host, "api/geostreams/sensors?key=%s" % key)
4040

4141
result = requests.post(url, headers={'Content-type': 'application/json'},
4242
data=json.dumps(body),
@@ -75,7 +75,7 @@ def create_stream(connector, host, key, streamname, sensorid, geom, properties=N
7575
"sensor_id": str(sensorid)
7676
}
7777

78-
url = "%sapi/geostreams/streams?key=%s" % (host, key)
78+
url = posixpath.join(host, "api/geostreams/streams?key=%s" % key)
7979

8080
result = requests.post(url, headers={'Content-type': 'application/json'},
8181
data=json.dumps(body),
@@ -116,7 +116,7 @@ def create_datapoint(connector, host, key, streamid, geom, starttime, endtime, p
116116
"stream_id": str(streamid)
117117
}
118118

119-
url = '%sapi/geostreams/datapoints?key=%s' % (host, key)
119+
url = posixpath.join(host, 'api/geostreams/datapoints?key=%s' % key)
120120

121121
result = requests.post(url, headers={'Content-type': 'application/json'},
122122
data=json.dumps(body),
@@ -141,7 +141,7 @@ def get_sensor_by_name(connector, host, key, sensorname):
141141

142142
logger = logging.getLogger(__name__)
143143

144-
url = "%sapi/geostreams/sensors?sensor_name=%s&key=%s" % (host, sensorname, key)
144+
url = posixpath.join(host, "api/geostreams/sensors?sensor_name=%s&key=%s" % (sensorname, key))
145145

146146
result = requests.get(url,
147147
verify=connector.ssl_verify if connector else True)
@@ -167,7 +167,7 @@ def get_sensors_by_circle(connector, host, key, lon, lat, radius=0):
167167
radius -- distance in meters around point to search
168168
"""
169169

170-
url = "%sapi/geostreams/sensors?geocode=%s,%s,%s&key=%s" % (host, lat, lon, radius, key)
170+
url = posixpath.join(host, "api/geostreams/sensors?geocode=%s,%s,%s&key=%s" % (lat, lon, radius, key))
171171

172172
result = requests.get(url,
173173
verify=connector.ssl_verify if connector else True)
@@ -192,10 +192,9 @@ def get_sensors_by_polygon(connector, host, key, coord_list):
192192
"""
193193

194194
coord_strings = [str(i) for i in coord_list]
195-
url = "%sapi/geostreams/sensors?geocode=%s&key=%s" % (host, ','.join(coord_strings), key)
195+
url = posixpath.join(host, "api/geostreams/sensors?geocode=%s&key=%s" % (','.join(coord_strings), key))
196196

197-
result = requests.get(url,
198-
verify=connector.ssl_verify if connector else True)
197+
result = requests.get(url, verify=connector.ssl_verify if connector else True)
199198
result.raise_for_status()
200199

201200
# Return first sensor
@@ -218,7 +217,7 @@ def get_stream_by_name(connector, host, key, streamname):
218217

219218
logger = logging.getLogger(__name__)
220219

221-
url = "%sapi/geostreams/streams?stream_name=%s&key=%s" % (host, streamname, key)
220+
url = posixpath.join(host, "api/geostreams/streams?stream_name=%s&key=%s" % (streamname, key))
222221

223222
result = requests.get(url,
224223
verify=connector.ssl_verify if connector else True)
@@ -244,7 +243,7 @@ def get_streams_by_circle(connector, host, key, lon, lat, radius=0):
244243
radius -- distance in meters around point to search
245244
"""
246245

247-
url = "%sapi/geostreams/stream?geocode=%s,%s,%s&key=%s" % (host, lat, lon, radius, key)
246+
url = posixpath.join(host, "api/geostreams/stream?geocode=%s,%s,%s&key=%s" % (lat, lon, radius, key))
248247

249248
result = requests.get(url,
250249
verify=connector.ssl_verify if connector else True)
@@ -268,7 +267,7 @@ def get_streams_by_polygon(connector, host, key, coord_list):
268267
"""
269268

270269
coord_strings = [str(i) for i in coord_list]
271-
url = "%sapi/geostreams/stream?geocode=%s&key=%s" % (host, ','.join(coord_strings), key)
270+
url = posixpath.join(host, "api/geostreams/stream?geocode=%s&key=%s" % (','.join(coord_strings), key))
272271

273272
result = requests.get(url,
274273
verify=connector.ssl_verify if connector else True)

pyclowder/sections.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55

66
import json
77
import logging
8-
8+
import posixpath
99
import requests
1010

1111

@@ -23,7 +23,7 @@ def upload(connector, host, key, sectiondata):
2323
headers = {'Content-Type': 'application/json'}
2424

2525
# upload section
26-
url = '%sapi/sections?key=%s' % (host, key)
26+
url = posixpath.join(host, 'api/sections?key=%s' % key)
2727
result = requests.post(url, headers=headers, data=json.dumps(sectiondata),
2828
verify=connector.ssl_verify if connector else True)
2929
result.raise_for_status()
@@ -48,7 +48,7 @@ def upload_tags(connector, host, key, sectionid, tags):
4848
connector.message_process({"type": "section", "id": sectionid}, "Uploading section tags.")
4949

5050
headers = {'Content-Type': 'application/json'}
51-
url = '%sapi/sections/%s/tags?key=%s' % (host, sectionid, key)
51+
url = posixpath.join(host, 'api/sections/%s/tags?key=%s' % (sectionid, key))
5252
result = requests.post(url, headers=headers, data=json.dumps(tags),
5353
verify=connector.ssl_verify if connector else True)
5454
result.raise_for_status()
@@ -69,7 +69,7 @@ def upload_description(connector, host, key, sectionid, description):
6969
"Uploading section description.")
7070

7171
headers = {'Content-Type': 'application/json'}
72-
url = '%sapi/sections/%s/description?key=%s' % (host, sectionid, key)
72+
url = posixpath.join(host, 'api/sections/%s/description?key=%s' % (sectionid, key))
7373
result = requests.post(url, headers=headers, data=json.dumps(description),
7474
verify=connector.ssl_verify if connector else True)
7575
result.raise_for_status()

0 commit comments

Comments
 (0)