Skip to content

Commit 1c56118

Browse files
authored
Merge pull request apache-spark-on-k8s#373 from palantir/update-upstream
2 parents e5c3be5 + 56df4d3 commit 1c56118

File tree

389 files changed

+10063
-3291
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

389 files changed

+10063
-3291
lines changed

LICENSE

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -263,7 +263,7 @@ The text of each license is also included at licenses/LICENSE-[project].txt.
263263
(New BSD license) Protocol Buffer Java API (org.spark-project.protobuf:protobuf-java:2.4.1-shaded - http://code.google.com/p/protobuf)
264264
(The BSD License) Fortran to Java ARPACK (net.sourceforge.f2j:arpack_combined_all:0.1 - http://f2j.sourceforge.net)
265265
(The BSD License) xmlenc Library (xmlenc:xmlenc:0.52 - http://xmlenc.sourceforge.net)
266-
(The New BSD License) Py4J (net.sf.py4j:py4j:0.10.6 - http://py4j.sourceforge.net/)
266+
(The New BSD License) Py4J (net.sf.py4j:py4j:0.10.7 - http://py4j.sourceforge.net/)
267267
(Two-clause BSD-style license) JUnit-Interface (com.novocode:junit-interface:0.10 - http://github.com/szeiger/junit-interface/)
268268
(BSD licence) sbt and sbt-launch-lib.bash
269269
(BSD 3 Clause) d3.min.js (https://github.com/mbostock/d3/blob/master/LICENSE)

R/check-cran.sh

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -25,10 +25,6 @@ pushd "$FWDIR" > /dev/null
2525

2626
. "$FWDIR/find-r.sh"
2727

28-
# Install the package (this is required for code in vignettes to run when building it later)
29-
# Build the latest docs, but not vignettes, which is built with the package next
30-
. "$FWDIR/install-dev.sh"
31-
3228
# Build source package with vignettes
3329
SPARK_HOME="$(cd "${FWDIR}"/..; pwd)"
3430
. "${SPARK_HOME}/bin/load-spark-env.sh"

R/pkg/DESCRIPTION

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@ Authors@R: c(person("Shivaram", "Venkataraman", role = c("aut", "cre"),
1313
License: Apache License (== 2.0)
1414
URL: http://www.apache.org/ http://spark.apache.org/
1515
BugReports: http://spark.apache.org/contributing.html
16+
SystemRequirements: Java (== 8)
1617
Depends:
1718
R (>= 3.0),
1819
methods

R/pkg/NAMESPACE

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -201,6 +201,10 @@ exportMethods("%<=>%",
201201
"approxCountDistinct",
202202
"approxQuantile",
203203
"array_contains",
204+
"array_max",
205+
"array_min",
206+
"array_position",
207+
"array_sort",
204208
"asc",
205209
"ascii",
206210
"asin",
@@ -245,6 +249,7 @@ exportMethods("%<=>%",
245249
"decode",
246250
"dense_rank",
247251
"desc",
252+
"element_at",
248253
"encode",
249254
"endsWith",
250255
"exp",
@@ -254,6 +259,7 @@ exportMethods("%<=>%",
254259
"expr",
255260
"factorial",
256261
"first",
262+
"flatten",
257263
"floor",
258264
"format_number",
259265
"format_string",
@@ -346,6 +352,7 @@ exportMethods("%<=>%",
346352
"sinh",
347353
"size",
348354
"skewness",
355+
"slice",
349356
"sort_array",
350357
"soundex",
351358
"spark_partition_id",

R/pkg/R/client.R

Lines changed: 37 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@
1919

2020
# Creates a SparkR client connection object
2121
# if one doesn't already exist
22-
connectBackend <- function(hostname, port, timeout) {
22+
connectBackend <- function(hostname, port, timeout, authSecret) {
2323
if (exists(".sparkRcon", envir = .sparkREnv)) {
2424
if (isOpen(.sparkREnv[[".sparkRCon"]])) {
2525
cat("SparkRBackend client connection already exists\n")
@@ -29,7 +29,7 @@ connectBackend <- function(hostname, port, timeout) {
2929

3030
con <- socketConnection(host = hostname, port = port, server = FALSE,
3131
blocking = TRUE, open = "wb", timeout = timeout)
32-
32+
doServerAuth(con, authSecret)
3333
assign(".sparkRCon", con, envir = .sparkREnv)
3434
con
3535
}
@@ -60,13 +60,48 @@ generateSparkSubmitArgs <- function(args, sparkHome, jars, sparkSubmitOpts, pack
6060
combinedArgs
6161
}
6262

63+
checkJavaVersion <- function() {
64+
javaBin <- "java"
65+
javaHome <- Sys.getenv("JAVA_HOME")
66+
javaReqs <- utils::packageDescription(utils::packageName(), fields=c("SystemRequirements"))
67+
sparkJavaVersion <- as.numeric(tail(strsplit(javaReqs, "[(=)]")[[1]], n = 1L))
68+
if (javaHome != "") {
69+
javaBin <- file.path(javaHome, "bin", javaBin)
70+
}
71+
72+
# If java is missing from PATH, we get an error in Unix and a warning in Windows
73+
javaVersionOut <- tryCatch(
74+
launchScript(javaBin, "-version", wait = TRUE, stdout = TRUE, stderr = TRUE),
75+
error = function(e) {
76+
stop("Java version check failed. Please make sure Java is installed",
77+
" and set JAVA_HOME to point to the installation directory.", e)
78+
},
79+
warning = function(w) {
80+
stop("Java version check failed. Please make sure Java is installed",
81+
" and set JAVA_HOME to point to the installation directory.", w)
82+
})
83+
javaVersionFilter <- Filter(
84+
function(x) {
85+
grepl(" version", x)
86+
}, javaVersionOut)
87+
88+
javaVersionStr <- strsplit(javaVersionFilter[[1]], "[\"]")[[1L]][2]
89+
# javaVersionStr is of the form 1.8.0_92.
90+
# Extract 8 from it to compare to sparkJavaVersion
91+
javaVersionNum <- as.integer(strsplit(javaVersionStr, "[.]")[[1L]][2])
92+
if (javaVersionNum != sparkJavaVersion) {
93+
stop(paste("Java version", sparkJavaVersion, "is required for this package; found version:", javaVersionStr))
94+
}
95+
}
96+
6397
launchBackend <- function(args, sparkHome, jars, sparkSubmitOpts, packages) {
6498
sparkSubmitBinName <- determineSparkSubmitBin()
6599
if (sparkHome != "") {
66100
sparkSubmitBin <- file.path(sparkHome, "bin", sparkSubmitBinName)
67101
} else {
68102
sparkSubmitBin <- sparkSubmitBinName
69103
}
104+
70105
combinedArgs <- generateSparkSubmitArgs(args, sparkHome, jars, sparkSubmitOpts, packages)
71106
cat("Launching java with spark-submit command", sparkSubmitBin, combinedArgs, "\n")
72107
invisible(launchScript(sparkSubmitBin, combinedArgs))

R/pkg/R/deserialize.R

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -60,14 +60,18 @@ readTypedObject <- function(con, type) {
6060
stop(paste("Unsupported type for deserialization", type)))
6161
}
6262

63-
readString <- function(con) {
64-
stringLen <- readInt(con)
65-
raw <- readBin(con, raw(), stringLen, endian = "big")
63+
readStringData <- function(con, len) {
64+
raw <- readBin(con, raw(), len, endian = "big")
6665
string <- rawToChar(raw)
6766
Encoding(string) <- "UTF-8"
6867
string
6968
}
7069

70+
readString <- function(con) {
71+
stringLen <- readInt(con)
72+
readStringData(con, stringLen)
73+
}
74+
7175
readInt <- function(con) {
7276
readBin(con, integer(), n = 1, endian = "big")
7377
}

0 commit comments

Comments
 (0)