19
19
20
20
import warnings
21
21
22
+ DEFAULT_SERVICE_NAME = "service"
23
+
22
24
def swifturl2d (name , container_name , object_name ):
23
25
return 'swift2d://{}.{}/{}' .format (container_name , name , object_name )
24
26
@@ -216,7 +218,13 @@ def __init__(self, sparkcontext, credentials, cos_id='', bucket_name=''):
216
218
raise ValueError ("Invalid input: credentials.{} is required!" .format (key ))
217
219
218
220
# setup config
219
- prefix = "fs.s3d.service"
221
+ prefix = "fs.cos"
222
+
223
+ if (cos_id ):
224
+ prefix = "{}.{}" .format (prefix , cos_id )
225
+ else :
226
+ prefix = prefix + "." + DEFAULT_SERVICE_NAME
227
+
220
228
hconf = sparkcontext ._jsc .hadoopConfiguration ()
221
229
hconf .set (prefix + ".endpoint" , credentials ['endpoint' ])
222
230
hconf .set (prefix + ".access.key" , credentials ['access_key' ])
@@ -227,11 +235,18 @@ def get_os_id():
227
235
228
236
def url (self , object_name , bucket_name = '' ):
229
237
bucket_name_var = ''
238
+ service_name = DEFAULT_SERVICE_NAME
239
+
240
+ # determine the bucket to use
230
241
if (bucket_name ):
231
242
bucket_name_var = bucket_name
232
243
elif (self .bucket_name ):
233
244
bucket_name_var = self .bucket_name
234
245
else :
235
246
raise ValueError ("Invalid input: bucket_name is required!" )
236
247
237
- return "s3d://{}.service/{}" .format (bucket_name_var , object_name )
248
+ # use service name that we set up hadoop config for
249
+ if (self .cos_id ):
250
+ service_name = self .cos_id
251
+
252
+ return "cos://{}.{}/{}" .format (bucket_name_var , service_name , object_name )
0 commit comments