@@ -37,7 +37,7 @@ within a DSX Jupyter notebook, you can obtain your account credentials in the fo
37
37
If your Object Storage was created with a Softlayer account, each part of the credentials will
38
38
be found as text that you can copy and paste into the example code below.
39
39
40
- ### Cloud Object Storage
40
+ ### Softlayer - IBM Cloud Object Storage
41
41
library(ibmos2sparkR)
42
42
configurationName = "bluemixO123"
43
43
@@ -60,8 +60,59 @@ be found as text that you can copy and paste into the example code below.
60
60
header = "true")
61
61
head(df.data.1)
62
62
63
+ ### Bluemix - IBM Cloud Object Storage
64
+ The class CloudObjectStorage allows you to connect to an IBM Cloud Object Storage (COS) hosted on Bluemix. You can connect to
65
+ a Bluemix COS using api keys as follows:
63
66
64
- ### Bluemix / Data Science Experience
67
+ library(ibmos2sparkR)
68
+ configurationName = "bluemixO123"
69
+
70
+ # In DSX notebooks, the "insert to code" will insert this credentials list for you
71
+ credentials <- list(
72
+ apiKey = "XXX",
73
+ serviceId = "XXX",
74
+ endpoint = "https://s3-api.objectstorage.....net/"
75
+ )
76
+
77
+ cos <- CloudObjectStorage(sparkContext=sc, credentials=credentials, configurationName=configurationName, cosType="bluemix_cos")
78
+
79
+ bucketName <- "bucketName"
80
+ fileName <- "test.csv"
81
+ url <- cos$url(bucketName, fileName)
82
+
83
+ invisible(sparkR.session(appName = "SparkSession R"))
84
+
85
+ df.data.1 <- read.df(url,
86
+ source = "org.apache.spark.sql.execution.datasources.csv.CSVFileFormat",
87
+ header = "true")
88
+ head(df.data.1)
89
+
90
+ Alternatively, you can connect to an IBM Bluemix COS using IAM token. Example:
91
+
92
+ library(ibmos2sparkR)
93
+ configurationName = "bluemixO123"
94
+
95
+ # In DSX notebooks, the "insert to code" will insert this credentials list for you
96
+ credentials <- list(
97
+ iamToken = "XXXXXXXXX",
98
+ serviceId = "XXX",
99
+ endpoint = "https://s3-api.objectstorage.....net/"
100
+ )
101
+
102
+ cos <- CloudObjectStorage(sparkContext=sc, credentials=credentials, configurationName=configurationName, cosType="bluemix_cos", authMethod="iam_token")
103
+
104
+ bucketName <- "bucketName"
105
+ fileName <- "test.csv"
106
+ url <- cos$url(bucketName, fileName)
107
+
108
+ invisible(sparkR.session(appName = "SparkSession R"))
109
+
110
+ df.data.1 <- read.df(url,
111
+ source = "org.apache.spark.sql.execution.datasources.csv.CSVFileFormat",
112
+ header = "true")
113
+ head(df.data.1)
114
+
115
+ ### Bluemix Swift Object Storage / Data Science Experience
65
116
66
117
library(ibmos2sparkR)
67
118
configurationname = "bluemixOScon" #can be any any name you like (allows for multiple configurations)
@@ -86,7 +137,7 @@ be found as text that you can copy and paste into the example code below.
86
137
data = read.df(bmconfig$url(container, objectname), source="com.databricks.spark.csv", header="true")
87
138
88
139
89
- ### Softlayer
140
+ ### Softlayer Swift Object Storage
90
141
91
142
library(ibmos2sparkR)
92
143
configurationname = "softlayerOScon" #can be any any name you like (allows for multiple configurations)
0 commit comments