Repository: spark
Updated Branches:
  refs/heads/master f246813af -> f4b138082


[SPARK-25572][SPARKR] test only if not cran

## What changes were proposed in this pull request?

CRAN doesn't seem to respect the system requirements as running tests - we have 
seen cases where SparkR is run on Java 10, which unfortunately Spark does not 
start on. For 2.4, lets attempt skipping all tests

## How was this patch tested?

manual, jenkins, appveyor

Author: Felix Cheung <[email protected]>

Closes #22589 from felixcheung/ralltests.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/f4b13808
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/f4b13808
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/f4b13808

Branch: refs/heads/master
Commit: f4b138082ff91be74b0f5bbe19cdb90dd9e5f131
Parents: f246813
Author: Felix Cheung <[email protected]>
Authored: Sat Sep 29 14:48:32 2018 -0700
Committer: Felix Cheung <[email protected]>
Committed: Sat Sep 29 14:48:32 2018 -0700

----------------------------------------------------------------------
 R/pkg/tests/run-all.R | 83 ++++++++++++++++++++++++----------------------
 1 file changed, 44 insertions(+), 39 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/f4b13808/R/pkg/tests/run-all.R
----------------------------------------------------------------------
diff --git a/R/pkg/tests/run-all.R b/R/pkg/tests/run-all.R
index 94d7518..1e96418 100644
--- a/R/pkg/tests/run-all.R
+++ b/R/pkg/tests/run-all.R
@@ -18,50 +18,55 @@
 library(testthat)
 library(SparkR)
 
-# Turn all warnings into errors
-options("warn" = 2)
+# SPARK-25572
+if (identical(Sys.getenv("NOT_CRAN"), "true")) {
 
-if (.Platform$OS.type == "windows") {
-  Sys.setenv(TZ = "GMT")
-}
+  # Turn all warnings into errors
+  options("warn" = 2)
 
-# Setup global test environment
-# Install Spark first to set SPARK_HOME
+  if (.Platform$OS.type == "windows") {
+    Sys.setenv(TZ = "GMT")
+  }
 
-# NOTE(shivaram): We set overwrite to handle any old tar.gz files or 
directories left behind on
-# CRAN machines. For Jenkins we should already have SPARK_HOME set.
-install.spark(overwrite = TRUE)
+  # Setup global test environment
+  # Install Spark first to set SPARK_HOME
 
-sparkRDir <- file.path(Sys.getenv("SPARK_HOME"), "R")
-sparkRWhitelistSQLDirs <- c("spark-warehouse", "metastore_db")
-invisible(lapply(sparkRWhitelistSQLDirs,
-                 function(x) { unlink(file.path(sparkRDir, x), recursive = 
TRUE, force = TRUE)}))
-sparkRFilesBefore <- list.files(path = sparkRDir, all.files = TRUE)
+  # NOTE(shivaram): We set overwrite to handle any old tar.gz files or 
directories left behind on
+  # CRAN machines. For Jenkins we should already have SPARK_HOME set.
+  install.spark(overwrite = TRUE)
 
-sparkRTestMaster <- "local[1]"
-sparkRTestConfig <- list()
-if (identical(Sys.getenv("NOT_CRAN"), "true")) {
-  sparkRTestMaster <- ""
-} else {
-  # Disable hsperfdata on CRAN
-  old_java_opt <- Sys.getenv("_JAVA_OPTIONS")
-  Sys.setenv("_JAVA_OPTIONS" = paste("-XX:-UsePerfData", old_java_opt))
-  tmpDir <- tempdir()
-  tmpArg <- paste0("-Djava.io.tmpdir=", tmpDir)
-  sparkRTestConfig <- list(spark.driver.extraJavaOptions = tmpArg,
-                           spark.executor.extraJavaOptions = tmpArg)
-}
+  sparkRDir <- file.path(Sys.getenv("SPARK_HOME"), "R")
+  sparkRWhitelistSQLDirs <- c("spark-warehouse", "metastore_db")
+  invisible(lapply(sparkRWhitelistSQLDirs,
+                   function(x) { unlink(file.path(sparkRDir, x), recursive = 
TRUE, force = TRUE)}))
+  sparkRFilesBefore <- list.files(path = sparkRDir, all.files = TRUE)
 
-test_package("SparkR")
+  sparkRTestMaster <- "local[1]"
+  sparkRTestConfig <- list()
+  if (identical(Sys.getenv("NOT_CRAN"), "true")) {
+    sparkRTestMaster <- ""
+  } else {
+    # Disable hsperfdata on CRAN
+    old_java_opt <- Sys.getenv("_JAVA_OPTIONS")
+    Sys.setenv("_JAVA_OPTIONS" = paste("-XX:-UsePerfData", old_java_opt))
+    tmpDir <- tempdir()
+    tmpArg <- paste0("-Djava.io.tmpdir=", tmpDir)
+    sparkRTestConfig <- list(spark.driver.extraJavaOptions = tmpArg,
+                             spark.executor.extraJavaOptions = tmpArg)
+  }
 
-if (identical(Sys.getenv("NOT_CRAN"), "true")) {
-  # set random seed for predictable results. mostly for base's sample() in 
tree and classification
-  set.seed(42)
-  # for testthat 1.0.2 later, change reporter from "summary" to 
default_reporter()
-  testthat:::run_tests("SparkR",
-                       file.path(sparkRDir, "pkg", "tests", "fulltests"),
-                       NULL,
-                       "summary")
-}
+  test_package("SparkR")
+
+  if (identical(Sys.getenv("NOT_CRAN"), "true")) {
+    # set random seed for predictable results. mostly for base's sample() in 
tree and classification
+    set.seed(42)
+    # for testthat 1.0.2 later, change reporter from "summary" to 
default_reporter()
+    testthat:::run_tests("SparkR",
+                         file.path(sparkRDir, "pkg", "tests", "fulltests"),
+                         NULL,
+                         "summary")
+  }
 
-SparkR:::uninstallDownloadedSpark()
+  SparkR:::uninstallDownloadedSpark()
+
+}


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to