This is an automated email from the ASF dual-hosted git repository.

yamamuro pushed a commit to branch branch-2.3
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/branch-2.3 by this push:
     new d397348  [SPARK-25572][SPARKR] test only if not cran
d397348 is described below

commit d397348b7bec20743f738694a135e4b67947fd99
Author: Felix Cheung <felixcheun...@hotmail.com>
AuthorDate: Sat Sep 29 14:48:32 2018 -0700

    [SPARK-25572][SPARKR] test only if not cran
    
    ## What changes were proposed in this pull request?
    
    CRAN doesn't seem to respect the system requirements as running tests - we 
have seen cases where SparkR is run on Java 10, which unfortunately Spark does 
not start on. For 2.4, lets attempt skipping all tests
    
    ## How was this patch tested?
    
    manual, jenkins, appveyor
    
    Author: Felix Cheung <felixcheun...@hotmail.com>
    
    Closes #22589 from felixcheung/ralltests.
    
    (cherry picked from commit f4b138082ff91be74b0f5bbe19cdb90dd9e5f131)
    Signed-off-by: Takeshi Yamamuro <yamam...@apache.org>
---
 R/pkg/tests/run-all.R | 83 +++++++++++++++++++++++++++------------------------
 1 file changed, 44 insertions(+), 39 deletions(-)

diff --git a/R/pkg/tests/run-all.R b/R/pkg/tests/run-all.R
index 94d7518..1e96418 100644
--- a/R/pkg/tests/run-all.R
+++ b/R/pkg/tests/run-all.R
@@ -18,50 +18,55 @@
 library(testthat)
 library(SparkR)
 
-# Turn all warnings into errors
-options("warn" = 2)
+# SPARK-25572
+if (identical(Sys.getenv("NOT_CRAN"), "true")) {
 
-if (.Platform$OS.type == "windows") {
-  Sys.setenv(TZ = "GMT")
-}
+  # Turn all warnings into errors
+  options("warn" = 2)
 
-# Setup global test environment
-# Install Spark first to set SPARK_HOME
+  if (.Platform$OS.type == "windows") {
+    Sys.setenv(TZ = "GMT")
+  }
 
-# NOTE(shivaram): We set overwrite to handle any old tar.gz files or 
directories left behind on
-# CRAN machines. For Jenkins we should already have SPARK_HOME set.
-install.spark(overwrite = TRUE)
+  # Setup global test environment
+  # Install Spark first to set SPARK_HOME
 
-sparkRDir <- file.path(Sys.getenv("SPARK_HOME"), "R")
-sparkRWhitelistSQLDirs <- c("spark-warehouse", "metastore_db")
-invisible(lapply(sparkRWhitelistSQLDirs,
-                 function(x) { unlink(file.path(sparkRDir, x), recursive = 
TRUE, force = TRUE)}))
-sparkRFilesBefore <- list.files(path = sparkRDir, all.files = TRUE)
+  # NOTE(shivaram): We set overwrite to handle any old tar.gz files or 
directories left behind on
+  # CRAN machines. For Jenkins we should already have SPARK_HOME set.
+  install.spark(overwrite = TRUE)
 
-sparkRTestMaster <- "local[1]"
-sparkRTestConfig <- list()
-if (identical(Sys.getenv("NOT_CRAN"), "true")) {
-  sparkRTestMaster <- ""
-} else {
-  # Disable hsperfdata on CRAN
-  old_java_opt <- Sys.getenv("_JAVA_OPTIONS")
-  Sys.setenv("_JAVA_OPTIONS" = paste("-XX:-UsePerfData", old_java_opt))
-  tmpDir <- tempdir()
-  tmpArg <- paste0("-Djava.io.tmpdir=", tmpDir)
-  sparkRTestConfig <- list(spark.driver.extraJavaOptions = tmpArg,
-                           spark.executor.extraJavaOptions = tmpArg)
-}
+  sparkRDir <- file.path(Sys.getenv("SPARK_HOME"), "R")
+  sparkRWhitelistSQLDirs <- c("spark-warehouse", "metastore_db")
+  invisible(lapply(sparkRWhitelistSQLDirs,
+                   function(x) { unlink(file.path(sparkRDir, x), recursive = 
TRUE, force = TRUE)}))
+  sparkRFilesBefore <- list.files(path = sparkRDir, all.files = TRUE)
 
-test_package("SparkR")
+  sparkRTestMaster <- "local[1]"
+  sparkRTestConfig <- list()
+  if (identical(Sys.getenv("NOT_CRAN"), "true")) {
+    sparkRTestMaster <- ""
+  } else {
+    # Disable hsperfdata on CRAN
+    old_java_opt <- Sys.getenv("_JAVA_OPTIONS")
+    Sys.setenv("_JAVA_OPTIONS" = paste("-XX:-UsePerfData", old_java_opt))
+    tmpDir <- tempdir()
+    tmpArg <- paste0("-Djava.io.tmpdir=", tmpDir)
+    sparkRTestConfig <- list(spark.driver.extraJavaOptions = tmpArg,
+                             spark.executor.extraJavaOptions = tmpArg)
+  }
 
-if (identical(Sys.getenv("NOT_CRAN"), "true")) {
-  # set random seed for predictable results. mostly for base's sample() in 
tree and classification
-  set.seed(42)
-  # for testthat 1.0.2 later, change reporter from "summary" to 
default_reporter()
-  testthat:::run_tests("SparkR",
-                       file.path(sparkRDir, "pkg", "tests", "fulltests"),
-                       NULL,
-                       "summary")
-}
+  test_package("SparkR")
+
+  if (identical(Sys.getenv("NOT_CRAN"), "true")) {
+    # set random seed for predictable results. mostly for base's sample() in 
tree and classification
+    set.seed(42)
+    # for testthat 1.0.2 later, change reporter from "summary" to 
default_reporter()
+    testthat:::run_tests("SparkR",
+                         file.path(sparkRDir, "pkg", "tests", "fulltests"),
+                         NULL,
+                         "summary")
+  }
 
-SparkR:::uninstallDownloadedSpark()
+  SparkR:::uninstallDownloadedSpark()
+
+}


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to