|
20 | 20 | def swifturl(name, container_name, object_name):
|
21 | 21 | return 'swift://{}.{}/{}'.format(container_name, name, object_name)
|
22 | 22 |
|
| 23 | +def swifturl2d(name, container_name, object_name): |
| 24 | + return 'swift2d://{}.{}/{}'.format(container_name, name, object_name) |
| 25 | + |
23 | 26 | class softlayer(object):
|
24 | 27 |
|
25 | 28 | def __init__(self, sparkcontext, name, auth_url, username, password):
|
@@ -47,7 +50,36 @@ def __init__(self, sparkcontext, name, auth_url, username, password):
|
47 | 50 |
|
48 | 51 | def url(self, container_name, object_name):
|
49 | 52 | return swifturl(self.name, container_name, object_name)
|
50 |
| - |
| 53 | + |
| 54 | +class softlayer2d(object): |
| 55 | + |
| 56 | + def __init__(self, sparkcontext, name, auth_url, tenant, username, password): |
| 57 | + ''' |
| 58 | + sparkcontext is a SparkContext object. |
| 59 | + name is a string that can be anything other than an empty string. |
| 60 | + auth_url, username and password are string credentials for your |
| 61 | + Softlayer Objectstore |
| 62 | +
|
| 63 | + ''' |
| 64 | + self.name = name |
| 65 | + |
| 66 | + prefix = "fs.swift2d.service." + name |
| 67 | + hconf = sparkcontext._jsc.hadoopConfiguration() |
| 68 | + hconf.set(prefix + ".auth.url", auth_url) |
| 69 | + hconf.set(prefix + ".username", username) |
| 70 | + hconf.set(prefix + ".tenant", tenant) |
| 71 | + hconf.set(prefix + ".auth.endpoint.prefix", "endpoints") |
| 72 | + hconf.set(prefix + ".auth.method", "swiftauth") |
| 73 | + hconf.setInt(prefix + ".http.port", 8080) |
| 74 | + hconf.set(prefix + ".apikey", password) |
| 75 | + hconf.setBoolean(prefix + ".public", True) |
| 76 | + hconf.set(prefix + ".use.get.auth", "true") |
| 77 | + hconf.setBoolean(prefix + ".location-aware", False) |
| 78 | + hconf.set(prefix + ".password", password) |
| 79 | + |
| 80 | + def url(self, container_name, object_name): |
| 81 | + return swifturl2d(self.name, container_name, object_name) |
| 82 | + |
51 | 83 | class bluemix(object):
|
52 | 84 |
|
53 | 85 | def __init__(self, sparkcontext, credentials):
|
@@ -84,3 +116,40 @@ def __init__(self, sparkcontext, credentials):
|
84 | 116 | def url(self, container_name, object_name):
|
85 | 117 | return swifturl(self.name, container_name, object_name)
|
86 | 118 |
|
| 119 | +class bluemix2d(object): |
| 120 | + |
| 121 | + def __init__(self, sparkcontext, credentials): |
| 122 | + ''' |
| 123 | + sparkcontext is a SparkContext object. |
| 124 | +
|
| 125 | + credentials is a dictionary with the following required keys: |
| 126 | + name |
| 127 | + auth_url |
| 128 | + project_id |
| 129 | + user_id |
| 130 | + password |
| 131 | + region |
| 132 | +
|
| 133 | + When using this from a IBM Spark service instance that |
| 134 | + is configured to connect to particular Bluemix object store |
| 135 | + instances, the values for these credentials can be obtained |
| 136 | + by clicking on the 'insert to code' link just below a data |
| 137 | + source. |
| 138 | + ''' |
| 139 | + self.name = credentials['name'] |
| 140 | + |
| 141 | + |
| 142 | + prefix = "fs.swift2d.service." + credentials['name'] |
| 143 | + hconf = sparkcontext._jsc.hadoopConfiguration() |
| 144 | + hconf.set(prefix + ".auth.url", credentials['auth_url']+'/v3/auth/tokens') |
| 145 | + hconf.set(prefix + ".auth.endpoint.prefix", "endpoints") |
| 146 | + hconf.set(prefix + ".auth.method","keystoneV3 ") |
| 147 | + hconf.set(prefix + ".tenant", credentials['project_id']) |
| 148 | + hconf.set(prefix + ".username", credentials['user_id']) |
| 149 | + hconf.set(prefix + ".password", credentials['password']) |
| 150 | + hconf.setInt(prefix + ".http.port", 8080) |
| 151 | + hconf.set(prefix + ".region", credentials['region']) |
| 152 | + hconf.setBoolean(prefix + ".public", True) |
| 153 | + |
| 154 | + def url(self, container_name, object_name): |
| 155 | + return swifturl2d(self.name, container_name, object_name) |
0 commit comments