@@ -10,14 +10,14 @@ object urlbuilder{
10
10
}
11
11
}
12
12
13
- /**
13
+ /**
14
14
* softlayer class sets up a swift connection between an IBM Spark service
15
- * instance and Softlayer Object Storgae instance.
16
- *
15
+ * instance and Softlayer Object Storgae instance.
16
+ *
17
17
* Constructor arguments
18
18
19
19
* sparkcontext: a SparkContext object.
20
- *
20
+ *
21
21
* name: string that identifies this configuration. You can
22
22
* use any string you like. This allows you to create
23
23
* multiple configurations to different Object Storage accounts.
@@ -26,14 +26,14 @@ object urlbuilder{
26
26
* Softlayer Object Store
27
27
*/
28
28
29
- class softlayer (sc : SparkContext , name : String , auth_url : String ,
30
- tenant : String , username : String , password : String ,
29
+ class softlayer (sc : SparkContext , name : String , auth_url : String ,
30
+ tenant : String , username : String , password : String ,
31
31
swift2d_driver : String = " com.ibm.stocator.fs.ObjectStoreFileSystem" ,
32
32
public : Boolean = false ){
33
-
34
-
33
+
34
+
35
35
val hadoopConf = sc.hadoopConfiguration;
36
- val prefix = " fs.swift2d.service." + name
36
+ val prefix = " fs.swift2d.service." + name
37
37
38
38
hadoopConf.set(" fs.swift2d.impl" ,swift2d_driver)
39
39
hadoopConf.set(prefix + " .auth.url" ,auth_url)
@@ -48,13 +48,13 @@ class softlayer(sc: SparkContext, name: String, auth_url: String,
48
48
hadoopConf.setBoolean(prefix + " .location-aware" ,false )
49
49
hadoopConf.set(prefix + " .password" ,password)
50
50
51
-
51
+
52
52
def url (container_name : String , object_name: String ) : String = {
53
53
return (urlbuilder.swifturl2d(name= name, container_name,object_name))
54
54
}
55
55
}
56
56
57
- /**
57
+ /**
58
58
* bluemix class sets up a swift connection between an IBM Spark service
59
59
* instance and an Object Storage instance provisioned through IBM Bluemix.
60
60
@@ -63,7 +63,7 @@ class softlayer(sc: SparkContext, name: String, auth_url: String,
63
63
* sparkcontext: a SparkContext object.
64
64
65
65
* credentials: a dictionary with the following required keys:
66
- *
66
+ *
67
67
* auth_url
68
68
69
69
* project_id (or projectId)
@@ -73,13 +73,13 @@ class softlayer(sc: SparkContext, name: String, auth_url: String,
73
73
* password
74
74
75
75
* region
76
- *
76
+ *
77
77
* name: string that identifies this configuration. You can
78
78
* use any string you like. This allows you to create
79
79
* multiple configurations to different Object Storage accounts.
80
80
* This is not required at the moment, since credentials['name']
81
81
* is still supported.
82
- *
82
+ *
83
83
* When using this from a IBM Spark service instance that
84
84
* is configured to connect to particular Bluemix object store
85
85
* instances, the values for these credentials can be obtained
@@ -88,9 +88,9 @@ class softlayer(sc: SparkContext, name: String, auth_url: String,
88
88
*/
89
89
90
90
class bluemix (sc : SparkContext , name : String , creds : HashMap [String , String ],
91
- swift2d_driver : String = " com.ibm.stocator.fs.ObjectStoreFileSystem" ,
91
+ swift2d_driver : String = " com.ibm.stocator.fs.ObjectStoreFileSystem" ,
92
92
public : Boolean = false ){
93
-
93
+
94
94
95
95
def ifexist (credsin : HashMap [String , String ], var1 : String , var2 : String ): String = {
96
96
if (credsin.keySet.exists(_ == var1)){
@@ -103,7 +103,7 @@ class bluemix(sc: SparkContext, name: String, creds: HashMap[String, String],
103
103
val username = ifexist(creds, " user_id" ," userId" )
104
104
val tenant = ifexist(creds, " project_id" ," projectId" )
105
105
106
-
106
+
107
107
val hadoopConf = sc.hadoopConfiguration;
108
108
val prefix = " fs.swift2d.service." + name;
109
109
@@ -118,10 +118,68 @@ class bluemix(sc: SparkContext, name: String, creds: HashMap[String, String],
118
118
hadoopConf.setBoolean(prefix + " .public" ,public)
119
119
hadoopConf.set(prefix + " .region" ,creds(" region" ))
120
120
hadoopConf.setInt(prefix + " .http.port" ,8080 )
121
-
121
+
122
122
def url (container_name : String , object_name: String ) : String = {
123
123
return (urlbuilder.swifturl2d(name= name, container_name,object_name))
124
124
}
125
125
}
126
126
127
+ /**
128
+ * CloudObjectStorage class sets up a s3d connection between an IBM Spark service
129
+ * instance and an IBM Cloud Object Storage instance.
130
+
131
+ * Constructor arguments:
132
+
133
+ * sparkcontext: a SparkContext object.
134
+
135
+ * credentials: a dictionary with the following required keys:
136
+ *
137
+ * endpoint
138
+
139
+ * access_key
127
140
141
+ * secret_key
142
+
143
+ * cosId [optional]: this parameter is the cloud object storage unique id. It is useful
144
+ to keep in the class instance for further checks after the initialization. However,
145
+ it is not mandatory for the class instance to work. This value can be retrieved by
146
+ calling the getCosId function.
147
+
148
+ bucket_name (projectId in DSX) [optional]: string that identifies the defult
149
+ bucket nameyou want to access files from in the COS service instance.
150
+ In DSX, bucket_name is the same as projectId. One bucket is
151
+ associated with one project.
152
+ If this value is not specified, you need to pass it when
153
+ you use the url function.
154
+ *
155
+ * When using this from a IBM Spark service instance that
156
+ * is configured to connect to particular Bluemix object store
157
+ * instances, the values for these credentials can be obtained
158
+ * by clicking on the 'insert to code' link just below a data
159
+ * source.
160
+ */
161
+ class CloudObjectStorage (sc : SparkContext , credentials : HashMap [String , String ], cosId : String = " " ) {
162
+
163
+ // check if all credentials are available
164
+ val requiredValues = Array (" endPoint" , " accessKey" , " secretKey" )
165
+ for ( key <- requiredValues ) {
166
+ if (! credentials.contains(key)) {
167
+ throw new IllegalArgumentException (" Invalid input: missing required input!" )
168
+ }
169
+ }
170
+
171
+ // set config
172
+ val hadoopConf = sc.hadoopConfiguration;
173
+ val prefix = " fs.s3d.service" ;
174
+ hadoopConf.set(prefix + " .endpoint" , credentials(" endPoint" ))
175
+ hadoopConf.set(prefix + " .access.key" , credentials(" accessKey" ))
176
+ hadoopConf.set(prefix + " .secret.key" , credentials(" secretKey" ))
177
+
178
+ def getCosId () : String = {
179
+ return cosId
180
+ }
181
+
182
+ def url (bucketName : String , objectName : String ) : String = {
183
+ return " s3d://" + bucketName + " .service/" + objectName
184
+ }
185
+ }
0 commit comments