|
18 | 18 | library(testthat)
|
19 | 19 | library(SparkR)
|
20 | 20 |
|
21 |
| -# Turn all warnings into errors |
22 |
| -options("warn" = 2) |
| 21 | +# SPARK-25572 |
| 22 | +if (identical(Sys.getenv("NOT_CRAN"), "true")) { |
23 | 23 |
|
24 |
| -if (.Platform$OS.type == "windows") { |
25 |
| - Sys.setenv(TZ = "GMT") |
26 |
| -} |
| 24 | + # Turn all warnings into errors |
| 25 | + options("warn" = 2) |
27 | 26 |
|
28 |
| -# Setup global test environment |
29 |
| -# Install Spark first to set SPARK_HOME |
| 27 | + if (.Platform$OS.type == "windows") { |
| 28 | + Sys.setenv(TZ = "GMT") |
| 29 | + } |
30 | 30 |
|
31 |
| -# NOTE(shivaram): We set overwrite to handle any old tar.gz files or directories left behind on |
32 |
| -# CRAN machines. For Jenkins we should already have SPARK_HOME set. |
33 |
| -install.spark(overwrite = TRUE) |
| 31 | + # Setup global test environment |
| 32 | + # Install Spark first to set SPARK_HOME |
34 | 33 |
|
35 |
| -sparkRDir <- file.path(Sys.getenv("SPARK_HOME"), "R") |
36 |
| -sparkRWhitelistSQLDirs <- c("spark-warehouse", "metastore_db") |
37 |
| -invisible(lapply(sparkRWhitelistSQLDirs, |
38 |
| - function(x) { unlink(file.path(sparkRDir, x), recursive = TRUE, force = TRUE)})) |
39 |
| -sparkRFilesBefore <- list.files(path = sparkRDir, all.files = TRUE) |
| 34 | + # NOTE(shivaram): We set overwrite to handle any old tar.gz files or directories left behind on |
| 35 | + # CRAN machines. For Jenkins we should already have SPARK_HOME set. |
| 36 | + install.spark(overwrite = TRUE) |
40 | 37 |
|
41 |
| -sparkRTestMaster <- "local[1]" |
42 |
| -sparkRTestConfig <- list() |
43 |
| -if (identical(Sys.getenv("NOT_CRAN"), "true")) { |
44 |
| - sparkRTestMaster <- "" |
45 |
| -} else { |
46 |
| - # Disable hsperfdata on CRAN |
47 |
| - old_java_opt <- Sys.getenv("_JAVA_OPTIONS") |
48 |
| - Sys.setenv("_JAVA_OPTIONS" = paste("-XX:-UsePerfData", old_java_opt)) |
49 |
| - tmpDir <- tempdir() |
50 |
| - tmpArg <- paste0("-Djava.io.tmpdir=", tmpDir) |
51 |
| - sparkRTestConfig <- list(spark.driver.extraJavaOptions = tmpArg, |
52 |
| - spark.executor.extraJavaOptions = tmpArg) |
53 |
| -} |
| 38 | + sparkRDir <- file.path(Sys.getenv("SPARK_HOME"), "R") |
| 39 | + sparkRWhitelistSQLDirs <- c("spark-warehouse", "metastore_db") |
| 40 | + invisible(lapply(sparkRWhitelistSQLDirs, |
| 41 | + function(x) { unlink(file.path(sparkRDir, x), recursive = TRUE, force = TRUE)})) |
| 42 | + sparkRFilesBefore <- list.files(path = sparkRDir, all.files = TRUE) |
54 | 43 |
|
55 |
| -test_package("SparkR") |
| 44 | + sparkRTestMaster <- "local[1]" |
| 45 | + sparkRTestConfig <- list() |
| 46 | + if (identical(Sys.getenv("NOT_CRAN"), "true")) { |
| 47 | + sparkRTestMaster <- "" |
| 48 | + } else { |
| 49 | + # Disable hsperfdata on CRAN |
| 50 | + old_java_opt <- Sys.getenv("_JAVA_OPTIONS") |
| 51 | + Sys.setenv("_JAVA_OPTIONS" = paste("-XX:-UsePerfData", old_java_opt)) |
| 52 | + tmpDir <- tempdir() |
| 53 | + tmpArg <- paste0("-Djava.io.tmpdir=", tmpDir) |
| 54 | + sparkRTestConfig <- list(spark.driver.extraJavaOptions = tmpArg, |
| 55 | + spark.executor.extraJavaOptions = tmpArg) |
| 56 | + } |
56 | 57 |
|
57 |
| -if (identical(Sys.getenv("NOT_CRAN"), "true")) { |
58 |
| - # set random seed for predictable results. mostly for base's sample() in tree and classification |
59 |
| - set.seed(42) |
60 |
| - # for testthat 1.0.2 later, change reporter from "summary" to default_reporter() |
61 |
| - testthat:::run_tests("SparkR", |
62 |
| - file.path(sparkRDir, "pkg", "tests", "fulltests"), |
63 |
| - NULL, |
64 |
| - "summary") |
65 |
| -} |
| 58 | + test_package("SparkR") |
| 59 | + |
| 60 | + if (identical(Sys.getenv("NOT_CRAN"), "true")) { |
| 61 | + # set random seed for predictable results. mostly for base's sample() in tree and classification |
| 62 | + set.seed(42) |
| 63 | + # for testthat 1.0.2 later, change reporter from "summary" to default_reporter() |
| 64 | + testthat:::run_tests("SparkR", |
| 65 | + file.path(sparkRDir, "pkg", "tests", "fulltests"), |
| 66 | + NULL, |
| 67 | + "summary") |
| 68 | + } |
66 | 69 |
|
67 |
| -SparkR:::uninstallDownloadedSpark() |
| 70 | + SparkR:::uninstallDownloadedSpark() |
| 71 | + |
| 72 | +} |
0 commit comments