Skip to content

Commit f4b1380

Browse files
felixcheungFelix Cheung
authored andcommitted
[SPARK-25572][SPARKR] test only if not cran
## What changes were proposed in this pull request? CRAN doesn't seem to respect the system requirements as running tests - we have seen cases where SparkR is run on Java 10, which unfortunately Spark does not start on. For 2.4, lets attempt skipping all tests ## How was this patch tested? manual, jenkins, appveyor Author: Felix Cheung <[email protected]> Closes apache#22589 from felixcheung/ralltests.
1 parent f246813 commit f4b1380

File tree

1 file changed

+44
-39
lines changed

1 file changed

+44
-39
lines changed

R/pkg/tests/run-all.R

Lines changed: 44 additions & 39 deletions
Original file line numberDiff line numberDiff line change
@@ -18,50 +18,55 @@
1818
library(testthat)
1919
library(SparkR)
2020

21-
# Turn all warnings into errors
22-
options("warn" = 2)
21+
# SPARK-25572
22+
if (identical(Sys.getenv("NOT_CRAN"), "true")) {
2323

24-
if (.Platform$OS.type == "windows") {
25-
Sys.setenv(TZ = "GMT")
26-
}
24+
# Turn all warnings into errors
25+
options("warn" = 2)
2726

28-
# Setup global test environment
29-
# Install Spark first to set SPARK_HOME
27+
if (.Platform$OS.type == "windows") {
28+
Sys.setenv(TZ = "GMT")
29+
}
3030

31-
# NOTE(shivaram): We set overwrite to handle any old tar.gz files or directories left behind on
32-
# CRAN machines. For Jenkins we should already have SPARK_HOME set.
33-
install.spark(overwrite = TRUE)
31+
# Setup global test environment
32+
# Install Spark first to set SPARK_HOME
3433

35-
sparkRDir <- file.path(Sys.getenv("SPARK_HOME"), "R")
36-
sparkRWhitelistSQLDirs <- c("spark-warehouse", "metastore_db")
37-
invisible(lapply(sparkRWhitelistSQLDirs,
38-
function(x) { unlink(file.path(sparkRDir, x), recursive = TRUE, force = TRUE)}))
39-
sparkRFilesBefore <- list.files(path = sparkRDir, all.files = TRUE)
34+
# NOTE(shivaram): We set overwrite to handle any old tar.gz files or directories left behind on
35+
# CRAN machines. For Jenkins we should already have SPARK_HOME set.
36+
install.spark(overwrite = TRUE)
4037

41-
sparkRTestMaster <- "local[1]"
42-
sparkRTestConfig <- list()
43-
if (identical(Sys.getenv("NOT_CRAN"), "true")) {
44-
sparkRTestMaster <- ""
45-
} else {
46-
# Disable hsperfdata on CRAN
47-
old_java_opt <- Sys.getenv("_JAVA_OPTIONS")
48-
Sys.setenv("_JAVA_OPTIONS" = paste("-XX:-UsePerfData", old_java_opt))
49-
tmpDir <- tempdir()
50-
tmpArg <- paste0("-Djava.io.tmpdir=", tmpDir)
51-
sparkRTestConfig <- list(spark.driver.extraJavaOptions = tmpArg,
52-
spark.executor.extraJavaOptions = tmpArg)
53-
}
38+
sparkRDir <- file.path(Sys.getenv("SPARK_HOME"), "R")
39+
sparkRWhitelistSQLDirs <- c("spark-warehouse", "metastore_db")
40+
invisible(lapply(sparkRWhitelistSQLDirs,
41+
function(x) { unlink(file.path(sparkRDir, x), recursive = TRUE, force = TRUE)}))
42+
sparkRFilesBefore <- list.files(path = sparkRDir, all.files = TRUE)
5443

55-
test_package("SparkR")
44+
sparkRTestMaster <- "local[1]"
45+
sparkRTestConfig <- list()
46+
if (identical(Sys.getenv("NOT_CRAN"), "true")) {
47+
sparkRTestMaster <- ""
48+
} else {
49+
# Disable hsperfdata on CRAN
50+
old_java_opt <- Sys.getenv("_JAVA_OPTIONS")
51+
Sys.setenv("_JAVA_OPTIONS" = paste("-XX:-UsePerfData", old_java_opt))
52+
tmpDir <- tempdir()
53+
tmpArg <- paste0("-Djava.io.tmpdir=", tmpDir)
54+
sparkRTestConfig <- list(spark.driver.extraJavaOptions = tmpArg,
55+
spark.executor.extraJavaOptions = tmpArg)
56+
}
5657

57-
if (identical(Sys.getenv("NOT_CRAN"), "true")) {
58-
# set random seed for predictable results. mostly for base's sample() in tree and classification
59-
set.seed(42)
60-
# for testthat 1.0.2 later, change reporter from "summary" to default_reporter()
61-
testthat:::run_tests("SparkR",
62-
file.path(sparkRDir, "pkg", "tests", "fulltests"),
63-
NULL,
64-
"summary")
65-
}
58+
test_package("SparkR")
59+
60+
if (identical(Sys.getenv("NOT_CRAN"), "true")) {
61+
# set random seed for predictable results. mostly for base's sample() in tree and classification
62+
set.seed(42)
63+
# for testthat 1.0.2 later, change reporter from "summary" to default_reporter()
64+
testthat:::run_tests("SparkR",
65+
file.path(sparkRDir, "pkg", "tests", "fulltests"),
66+
NULL,
67+
"summary")
68+
}
6669

67-
SparkR:::uninstallDownloadedSpark()
70+
SparkR:::uninstallDownloadedSpark()
71+
72+
}

0 commit comments

Comments
 (0)