@@ -9,24 +9,24 @@ swifturl = function(name, container_name, object_name){
9
9
10
10
11
11
# ' sparkcontext is a SparkContext object.
12
- # '
12
+ # '
13
13
# ' name is a string that identifies this configuration. You can
14
14
# ' use any string you like. This allows you to create
15
15
# ' multiple configurations to different Object Storage accounts.
16
16
# ' auth_url, username and password are string credentials for your
17
17
# ' Softlayer Object Store
18
- # ' @export softlayer
18
+ # ' @export softlayer
19
19
# ' @exportClass softlayer
20
20
21
21
softlayer <- setRefClass(" softlayer" ,
22
22
fields = list (name = " character" , container_name = " character" , object_name = " character" ,
23
- sparkcontext = ' jobj' , auth_url = " character" ,
23
+ sparkcontext = ' jobj' , auth_url = " character" ,
24
24
tenant = " character" , username = " character" , password = " character" ),
25
- methods = list (initialize =
25
+ methods = list (initialize =
26
26
function ( sparkcontext , name , auth_url , tenant , username , password ,public = FALSE ,
27
- swift2d_driver = ' com.ibm.stocator.fs.ObjectStoreFileSystem' ){
27
+ swift2d_driver = ' com.ibm.stocator.fs.ObjectStoreFileSystem' ){
28
28
29
- .self $ name = name
29
+ .self $ name = name
30
30
prefix = paste(" fs.swift2d.service" , name , sep = " ." )
31
31
hConf = SparkR ::: callJMethod(sparkcontext , " hadoopConfiguration" )
32
32
SparkR ::: callJMethod(hConf , " set" , " fs.swift2d.impl" , swift2d_driver )
@@ -41,21 +41,21 @@ softlayer <- setRefClass("softlayer",
41
41
SparkR ::: callJMethod(hConf , " set" , paste(prefix , " use.get.auth" , sep = ' .' ), " true" )
42
42
invisible (SparkR ::: callJMethod(hConf , " setBoolean" , paste(prefix , " location-aware" , sep = ' .' ), FALSE ))
43
43
SparkR ::: callJMethod(hConf , " set" , paste(prefix , " password" , sep = ' .' ), password )
44
-
45
-
44
+
45
+
46
46
},
47
-
47
+
48
48
url = function (container_name , object_name ){
49
49
return (swifturl(name , container_name , object_name ))}
50
50
)
51
51
)
52
52
53
53
54
-
54
+
55
55
# ' sparkcontext: a SparkContext object.
56
56
# '
57
57
# ' credentials: a dictionary with the following required keys:
58
- # '
58
+ # '
59
59
# ' auth_url
60
60
# ' project_id (or projectId)
61
61
# ' user_id (or userId)
@@ -73,28 +73,28 @@ softlayer <- setRefClass("softlayer",
73
73
# ' instances, the values for these credentials can be obtained
74
74
# ' by clicking on the 'insert to code' link just below a data
75
75
# ' source.
76
- # ' @export bluemix
76
+ # ' @export bluemix
77
77
# ' @exportClass bluemix
78
78
79
-
79
+
80
80
bluemix <- setRefClass(" bluemix" ,
81
- fields = list (name = " character" , credentials = " list" ,
81
+ fields = list (name = " character" , credentials = " list" ,
82
82
sparkcontext = ' jobj' , public = " character" ),
83
- methods = list (initialize =
83
+ methods = list (initialize =
84
84
function (... , sparkcontext , name = NULL , credentials ,
85
- public = FALSE ,swift2d_driver = ' com.ibm.stocator.fs.ObjectStoreFileSystem' ){
85
+ public = FALSE ,swift2d_driver = ' com.ibm.stocator.fs.ObjectStoreFileSystem' ){
86
86
87
87
callSuper(... ,credentials = credentials )
88
-
88
+
89
89
if ( is.null(name )) name <<- credentials [" name" ][[1 ]]
90
-
90
+
91
91
user_id = try( credentials [' user_id' ][[1 ]])
92
92
if (class(user_id )== " try-error" ) user_id = credentials [' userId' ][[1 ]]
93
-
93
+
94
94
tenant = try( credentials [' project_id' ][[1 ]])
95
95
if (class(tenant )== " try-error" ) tenant = credentials [' projectId' ][[1 ]]
96
-
97
- .self $ name = name
96
+
97
+ .self $ name = name
98
98
prefix = paste(" fs.swift2d.service" , name , sep = " ." )
99
99
hConf = SparkR ::: callJMethod(sparkcontext , " hadoopConfiguration" )
100
100
SparkR ::: callJMethod(hConf , " set" , " fs.swift2d.impl" , swift2d_driver )
@@ -108,8 +108,63 @@ bluemix <- setRefClass("bluemix",
108
108
invisible (SparkR ::: callJMethod(hConf , " setBoolean" , paste(prefix , " public" , sep = ' .' ), public ))
109
109
# invisible(SparkR:::callJMethod(hConf, "setInt", paste(prefix, "http.port", sep='.'), 8080))
110
110
},
111
-
111
+
112
112
url = function ( container_name , object_name ){
113
113
return (swifturl(name , container_name , object_name ))}
114
114
)
115
- )
115
+ )
116
+
117
+ # ' CloudObjectStorage is a class that is designed for IBM cloud object storage (COS)
118
+ # ' It sets up the hadoop config for COS and provide the final file url.
119
+ # '
120
+ # ' sparkContext: a SparkContext object.
121
+ # ''
122
+ # ' credentials: a dictionary with the following required keys:
123
+ # ' endpoint
124
+ # ' accessKey
125
+ # ' secretKey
126
+ # '
127
+ # ' configurationName: string identifies the configurations that has been
128
+ # ' set.
129
+ # ' When using this from a IBM Spark service instance that
130
+ # ' is configured to connect to particular Bluemix object store
131
+ # ' instances, the values for these credentials can be obtained
132
+ # ' by clicking on the 'insert to code' link just below a data
133
+ # ' source.
134
+ # ' @export bluemix
135
+ # ' @exportClass bluemix
136
+ CloudObjectStorage <- setRefClass(" CloudObjectStorage" ,
137
+ fields = list (configName = " character" ),
138
+ methods = list (
139
+ initialize = function (... , sparkContext , credentials , configurationName ){
140
+
141
+
142
+ if (is.null(credentials [" endpoint" ][[1 ]])) {
143
+ stop(" Attribute endpoint in credentials is missing!" )
144
+ }
145
+
146
+ if (is.null(credentials [" accessKey" ][[1 ]])) {
147
+ stop(" Attribute accessKey in credentials is missing!" )
148
+ }
149
+
150
+ if (is.null(credentials [" secretKey" ][[1 ]])) {
151
+ stop(" Attribute secretKey in credentials is missing!" )
152
+ }
153
+
154
+ .self $ configName = configurationName
155
+ prefix = " fs.s3d.service"
156
+ hConf = SparkR ::: callJMethod(sparkContext , " hadoopConfiguration" )
157
+ SparkR ::: callJMethod(hConf , " set" , paste(prefix , " endpoint" , sep = ' .' ), credentials [' endpoint' ][[1 ]])
158
+ SparkR ::: callJMethod(hConf , " set" , paste(prefix , " access.key" , sep = ' .' ), credentials [' accessKey' ][[1 ]])
159
+ SparkR ::: callJMethod(hConf , " set" , paste(prefix , " secret.key" , sep = ' .' ), credentials [' secretKey' ][[1 ]])
160
+ },
161
+
162
+ getConfigName = function () {
163
+ return (.self $ configName )
164
+ },
165
+
166
+ url = function (bucketName , objectName ){
167
+ return (paste(" s3d://" , bucketName , " .service/" , objectName , sep = " " ))
168
+ }
169
+ )
170
+ )
0 commit comments