This is an automated email from the ASF dual-hosted git repository.
gurwls223 pushed a commit to branch branch-3.0
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/branch-3.0 by this push:
new 70f3ce6 [SPARK-32020][SQL] Better error message when SPARK_HOME or
spark.test.home is not set
70f3ce6 is described below
commit 70f3ce69ee6ae01393e853ccea16a259804b5d85
Author: Dilip Biswal <[email protected]>
AuthorDate: Thu Jun 18 22:45:55 2020 +0900
[SPARK-32020][SQL] Better error message when SPARK_HOME or spark.test.home
is not set
### What changes were proposed in this pull request?
Better error message when SPARK_HOME or spark,test.home is not set.
### Why are the changes needed?
Currently the error message is not easily consumable as it prints (see
below) the real error after printing the current environment which is rather
long.
**Old output**
`
time.name" -> "Java(TM) SE Runtime Environment", "sun.boot.library.path"
-> "/Library/Java/JavaVirtualMachines/jdk1.8.0_221.jdk/Contents/Home/jre/lib",
"java.vm.version" -> "25.221-b11",
. . .
. . .
. . .
) did not contain key "SPARK_HOME" spark.test.home or SPARK_HOME is not set.
at org.scalatest.Assertions.newAssertionFailedExceptio
`
**New output**
An exception or error caused a run to abort: spark.test.home or SPARK_HOME
is not set.
org.scalatest.exceptions.TestFailedException: spark.test.home or SPARK_HOME
is not set
### Does this PR introduce any user-facing change?
`
No.
### How was this patch tested?
Ran the tests in intellej manually to see the new error.
Closes #28825 from dilipbiswal/minor-spark-31950-followup.
Authored-by: Dilip Biswal <[email protected]>
Signed-off-by: HyukjinKwon <[email protected]>
(cherry picked from commit e4f503614625f81aa86455ad5f6bec3b61d525f1)
Signed-off-by: HyukjinKwon <[email protected]>
---
.../sql/catalyst/parser/TableIdentifierParserSuite.scala | 6 ------
.../org/apache/spark/sql/catalyst/plans/SQLHelper.scala | 9 +++++++++
.../scala/org/apache/spark/sql/ExpressionsSchemaSuite.scala | 6 ------
.../scala/org/apache/spark/sql/IntegratedUDFTestUtils.scala | 9 +--------
.../test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala | 12 +++---------
5 files changed, 13 insertions(+), 29 deletions(-)
diff --git
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/TableIdentifierParserSuite.scala
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/TableIdentifierParserSuite.scala
index 04c427d..a721e17 100644
---
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/TableIdentifierParserSuite.scala
+++
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/TableIdentifierParserSuite.scala
@@ -18,7 +18,6 @@ package org.apache.spark.sql.catalyst.parser
import java.io.File
import java.nio.file.Files
-import java.util.Locale
import scala.collection.JavaConverters._
import scala.collection.mutable
@@ -295,11 +294,6 @@ class TableIdentifierParserSuite extends SparkFunSuite
with SQLHelper {
private val sqlSyntaxDefs = {
val sqlBasePath = {
- val sparkHome = {
- assert(sys.props.contains("spark.test.home") ||
- sys.env.contains("SPARK_HOME"), "spark.test.home or SPARK_HOME is
not set.")
- sys.props.getOrElse("spark.test.home", sys.env("SPARK_HOME"))
- }
java.nio.file.Paths.get(sparkHome, "sql", "catalyst", "src", "main",
"antlr4", "org",
"apache", "spark", "sql", "catalyst", "parser", "SqlBase.g4").toFile
}
diff --git
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/plans/SQLHelper.scala
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/plans/SQLHelper.scala
index d213743..5deab79 100644
---
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/plans/SQLHelper.scala
+++
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/plans/SQLHelper.scala
@@ -21,6 +21,8 @@ import java.time.ZoneId
import scala.util.control.NonFatal
+import org.scalatest.Assertions.fail
+
import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.catalyst.util.DateTimeTestUtils
import org.apache.spark.sql.catalyst.util.DateTimeUtils.getZoneId
@@ -83,4 +85,11 @@ trait SQLHelper {
}
}
}
+
+ protected lazy val sparkHome: String = {
+ if (!(sys.props.contains("spark.test.home") ||
sys.env.contains("SPARK_HOME"))) {
+ fail("spark.test.home or SPARK_HOME is not set.")
+ }
+ sys.props.getOrElse("spark.test.home", sys.env("SPARK_HOME"))
+ }
}
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/ExpressionsSchemaSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/ExpressionsSchemaSuite.scala
index d69ecd7..81c09d1 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/ExpressionsSchemaSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/ExpressionsSchemaSuite.scala
@@ -72,12 +72,6 @@ class ExpressionsSchemaSuite extends QueryTest with
SharedSparkSession {
// We use a path based on Spark home for 2 reasons:
// 1. Maven can't get correct resource directory when resources in other
jars.
// 2. We test subclasses in the hive-thriftserver module.
- val sparkHome = {
- assert(sys.props.contains("spark.test.home") ||
- sys.env.contains("SPARK_HOME"), "spark.test.home or SPARK_HOME is not
set.")
- sys.props.getOrElse("spark.test.home", sys.env("SPARK_HOME"))
- }
-
java.nio.file.Paths.get(sparkHome,
"sql", "core", "src", "test", "resources", "sql-functions").toFile
}
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/IntegratedUDFTestUtils.scala
b/sql/core/src/test/scala/org/apache/spark/sql/IntegratedUDFTestUtils.scala
index 4a4504a..6391d56 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/IntegratedUDFTestUtils.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/IntegratedUDFTestUtils.scala
@@ -75,14 +75,7 @@ object IntegratedUDFTestUtils extends SQLHelper {
import scala.sys.process._
private lazy val pythonPath = sys.env.getOrElse("PYTHONPATH", "")
- private lazy val sparkHome = if (sys.props.contains(Tests.IS_TESTING.key)) {
- assert(sys.props.contains("spark.test.home") ||
- sys.env.contains("SPARK_HOME"), "spark.test.home or SPARK_HOME is not
set.")
- sys.props.getOrElse("spark.test.home", sys.env("SPARK_HOME"))
- } else {
- assert(sys.env.contains("SPARK_HOME"), "SPARK_HOME is not set.")
- sys.env("SPARK_HOME")
- }
+
// Note that we will directly refer pyspark's source, not the zip from a
regular build.
// It is possible the test is being ran without the build.
private lazy val sourcePath = Paths.get(sparkHome, "python").toAbsolutePath
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala
index 005cac7..f43b838 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala
@@ -19,13 +19,13 @@ package org.apache.spark.sql
import java.io.File
import java.util.Locale
-import java.util.regex.Pattern
-import scala.collection.mutable.{ArrayBuffer, HashMap}
+import scala.collection.mutable.ArrayBuffer
import scala.util.control.NonFatal
import org.apache.spark.{SparkConf, SparkException}
import org.apache.spark.sql.catalyst.planning.PhysicalOperation
+import org.apache.spark.sql.catalyst.plans.SQLHelper
import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.catalyst.rules.RuleExecutor
import org.apache.spark.sql.catalyst.util.{fileToString, stringToFile}
@@ -122,7 +122,7 @@ import org.apache.spark.tags.ExtendedSQLTest
* different types of UDFs. See 'udf/udf-inner-join.sql' as an example.
*/
@ExtendedSQLTest
-class SQLQueryTestSuite extends QueryTest with SharedSparkSession {
+class SQLQueryTestSuite extends QueryTest with SharedSparkSession with
SQLHelper {
import IntegratedUDFTestUtils._
@@ -132,12 +132,6 @@ class SQLQueryTestSuite extends QueryTest with
SharedSparkSession {
// We use a path based on Spark home for 2 reasons:
// 1. Maven can't get correct resource directory when resources in other
jars.
// 2. We test subclasses in the hive-thriftserver module.
- val sparkHome = {
- assert(sys.props.contains("spark.test.home") ||
- sys.env.contains("SPARK_HOME"), "spark.test.home or SPARK_HOME is not
set.")
- sys.props.getOrElse("spark.test.home", sys.env("SPARK_HOME"))
- }
-
java.nio.file.Paths.get(sparkHome,
"sql", "core", "src", "test", "resources", "sql-tests").toFile
}
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]