Skip to content

Commit 9a93ee5

Browse files
committed
Add COS class for scala
1 parent 52ad8e6 commit 9a93ee5

File tree

1 file changed

+76
-18
lines changed

1 file changed

+76
-18
lines changed

scala/src/main/scala/Osconfig.scala

Lines changed: 76 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -10,14 +10,14 @@ object urlbuilder{
1010
}
1111
}
1212

13-
/**
13+
/**
1414
* softlayer class sets up a swift connection between an IBM Spark service
15-
* instance and Softlayer Object Storgae instance.
16-
*
15+
* instance and Softlayer Object Storgae instance.
16+
*
1717
* Constructor arguments
1818
1919
* sparkcontext: a SparkContext object.
20-
*
20+
*
2121
* name: string that identifies this configuration. You can
2222
* use any string you like. This allows you to create
2323
* multiple configurations to different Object Storage accounts.
@@ -26,14 +26,14 @@ object urlbuilder{
2626
* Softlayer Object Store
2727
*/
2828

29-
class softlayer(sc: SparkContext, name: String, auth_url: String,
30-
tenant: String, username: String, password: String,
29+
class softlayer(sc: SparkContext, name: String, auth_url: String,
30+
tenant: String, username: String, password: String,
3131
swift2d_driver: String = "com.ibm.stocator.fs.ObjectStoreFileSystem",
3232
public: Boolean=false){
33-
34-
33+
34+
3535
val hadoopConf = sc.hadoopConfiguration;
36-
val prefix = "fs.swift2d.service." + name
36+
val prefix = "fs.swift2d.service." + name
3737

3838
hadoopConf.set("fs.swift2d.impl",swift2d_driver)
3939
hadoopConf.set(prefix + ".auth.url",auth_url)
@@ -48,13 +48,13 @@ class softlayer(sc: SparkContext, name: String, auth_url: String,
4848
hadoopConf.setBoolean(prefix + ".location-aware",false)
4949
hadoopConf.set(prefix + ".password",password)
5050

51-
51+
5252
def url(container_name: String, object_name:String) : String= {
5353
return(urlbuilder.swifturl2d(name= name, container_name,object_name))
5454
}
5555
}
5656

57-
/**
57+
/**
5858
* bluemix class sets up a swift connection between an IBM Spark service
5959
* instance and an Object Storage instance provisioned through IBM Bluemix.
6060
@@ -63,7 +63,7 @@ class softlayer(sc: SparkContext, name: String, auth_url: String,
6363
* sparkcontext: a SparkContext object.
6464
6565
* credentials: a dictionary with the following required keys:
66-
*
66+
*
6767
* auth_url
6868
6969
* project_id (or projectId)
@@ -73,13 +73,13 @@ class softlayer(sc: SparkContext, name: String, auth_url: String,
7373
* password
7474
7575
* region
76-
*
76+
*
7777
* name: string that identifies this configuration. You can
7878
* use any string you like. This allows you to create
7979
* multiple configurations to different Object Storage accounts.
8080
* This is not required at the moment, since credentials['name']
8181
* is still supported.
82-
*
82+
*
8383
* When using this from a IBM Spark service instance that
8484
* is configured to connect to particular Bluemix object store
8585
* instances, the values for these credentials can be obtained
@@ -88,9 +88,9 @@ class softlayer(sc: SparkContext, name: String, auth_url: String,
8888
*/
8989

9090
class bluemix(sc: SparkContext, name: String, creds: HashMap[String, String],
91-
swift2d_driver: String = "com.ibm.stocator.fs.ObjectStoreFileSystem",
91+
swift2d_driver: String = "com.ibm.stocator.fs.ObjectStoreFileSystem",
9292
public: Boolean =false){
93-
93+
9494

9595
def ifexist(credsin: HashMap[String, String], var1: String, var2: String): String = {
9696
if (credsin.keySet.exists(_ == var1)){
@@ -103,7 +103,7 @@ class bluemix(sc: SparkContext, name: String, creds: HashMap[String, String],
103103
val username = ifexist(creds, "user_id","userId")
104104
val tenant = ifexist(creds, "project_id","projectId")
105105

106-
106+
107107
val hadoopConf = sc.hadoopConfiguration;
108108
val prefix = "fs.swift2d.service." + name;
109109

@@ -118,10 +118,68 @@ class bluemix(sc: SparkContext, name: String, creds: HashMap[String, String],
118118
hadoopConf.setBoolean(prefix + ".public",public)
119119
hadoopConf.set(prefix + ".region",creds("region"))
120120
hadoopConf.setInt(prefix + ".http.port",8080)
121-
121+
122122
def url(container_name: String, object_name:String) : String= {
123123
return(urlbuilder.swifturl2d(name= name, container_name,object_name))
124124
}
125125
}
126126

127+
/**
128+
* CloudObjectStorage class sets up a s3d connection between an IBM Spark service
129+
* instance and an IBM Cloud Object Storage instance.
130+
131+
* Constructor arguments:
132+
133+
* sparkcontext: a SparkContext object.
134+
135+
* credentials: a dictionary with the following required keys:
136+
*
137+
* endpoint
138+
139+
* access_key
127140
141+
* secret_key
142+
143+
* cosId [optional]: this parameter is the cloud object storage unique id. It is useful
144+
to keep in the class instance for further checks after the initialization. However,
145+
it is not mandatory for the class instance to work. This value can be retrieved by
146+
calling the getCosId function.
147+
148+
bucket_name (projectId in DSX) [optional]: string that identifies the defult
149+
bucket nameyou want to access files from in the COS service instance.
150+
In DSX, bucket_name is the same as projectId. One bucket is
151+
associated with one project.
152+
If this value is not specified, you need to pass it when
153+
you use the url function.
154+
*
155+
* When using this from a IBM Spark service instance that
156+
* is configured to connect to particular Bluemix object store
157+
* instances, the values for these credentials can be obtained
158+
* by clicking on the 'insert to code' link just below a data
159+
* source.
160+
*/
161+
class CloudObjectStorage(sc: SparkContext, credentials: HashMap[String, String], cosId: String = "") {
162+
163+
// check if all credentials are available
164+
val requiredValues = Array("endPoint", "accessKey", "secretKey")
165+
for ( key <- requiredValues ) {
166+
if (!credentials.contains(key)) {
167+
throw new IllegalArgumentException("Invalid input: missing required input!")
168+
}
169+
}
170+
171+
// set config
172+
val hadoopConf = sc.hadoopConfiguration;
173+
val prefix = "fs.s3d.service";
174+
hadoopConf.set(prefix + ".endpoint", credentials("endPoint"))
175+
hadoopConf.set(prefix + ".access.key", credentials("accessKey"))
176+
hadoopConf.set(prefix + ".secret.key", credentials("secretKey"))
177+
178+
def getCosId() : String = {
179+
return cosId
180+
}
181+
182+
def url(bucketName: String, objectName: String) : String = {
183+
return "s3d://" + bucketName + ".service/" + objectName
184+
}
185+
}

0 commit comments

Comments
 (0)