1
- # ibmos2spark
1
+ # ibmos2sparklyr
2
2
3
3
The package sets Spark Hadoop configurations for connecting to
4
4
IBM Bluemix Object Storage and Softlayer Account Object Storage instances
@@ -20,7 +20,7 @@ In Data Science Experience please be sure to include the "dependencies = FALSE"
20
20
21
21
### Bluemix
22
22
23
- library(ibmos2spark )
23
+ library(ibmos2sparklyr )
24
24
configurationname = "bluemixOScon" #can be any any name you like (allows for multiple configurations)
25
25
26
26
# In DSX notebooks, the "insert to code" will insert this credentials list for you
@@ -32,16 +32,17 @@ In Data Science Experience please be sure to include the "dependencies = FALSE"
32
32
password="XXXXX")
33
33
34
34
bmconfig = bluemix(sparkcontext=sc, name=configurationname, credentials = creds)
35
-
36
- container = "my_container"
37
- object = "my_data.csv"
38
-
35
+
36
+ container = "my_container" # name of your object store container
37
+ object = "my_data.csv" # name of object that you want to retrieve in the container
38
+ spark_object_name = "dataFromSwift" # name to assign to the new spark object
39
+
39
40
data = sparklyr::spark_read_csv(sc, spark_object_name,bmconfig$url(container,object))
40
41
41
42
42
43
### Softlayer
43
44
44
- library(ibmos2spark )
45
+ library(ibmos2sparklyr )
45
46
configurationname = "softlayerOScon" #can be any any name you like (allows for multiple configurations)
46
47
47
48
slconfig = softlayer(sparkcontext=sc,
@@ -52,8 +53,9 @@ In Data Science Experience please be sure to include the "dependencies = FALSE"
52
53
password="XXXXX"
53
54
)
54
55
55
- container = "my_container"
56
- object = "my_data.csv"
56
+ container = "my_container" # name of your object store container
57
+ object = "my_data.csv" # name of object that you want to retrieve in the container
58
+ spark_object_name = "dataFromSwift" # name to assign to the new spark object
57
59
58
60
data = sparklyr::spark_read_csv(sc, spark_object_name,slconfig$url(container,object))
59
61
0 commit comments