This is an automated email from the ASF dual-hosted git repository.
maxgekk pushed a commit to branch branch-3.3
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/branch-3.3 by this push:
new 2e102b8bd23 [SPARK-38949][SQL][3.3] Wrap SQL statements by double
quotes in error messages
2e102b8bd23 is described below
commit 2e102b8bd233441bb2dd74e1870de5b8218d5331
Author: Max Gekk <[email protected]>
AuthorDate: Wed Apr 20 22:52:04 2022 +0300
[SPARK-38949][SQL][3.3] Wrap SQL statements by double quotes in error
messages
### What changes were proposed in this pull request?
In the PR, I propose to wrap any SQL statement in error messages by double
quotes "", and apply new implementation of `QueryErrorsBase.toSQLStmt()` to all
exceptions in `Query.*Errors` w/ error classes. Also this PR modifies all
affected tests, see the list in the section "How was this patch tested?".
### Why are the changes needed?
To improve user experience with Spark SQL by highlighting SQL statements in
error massage and make them more visible to users.
### Does this PR introduce _any_ user-facing change?
Yes. The changes might influence on error messages that are visible to
users.
Before:
```sql
The operation DESC PARTITION is not allowed
```
After:
```sql
The operation "DESC PARTITION" is not allowed
```
### How was this patch tested?
By running affected test suites:
```
$ build/sbt "sql/testOnly *QueryExecutionErrorsSuite"
$ build/sbt "sql/testOnly *QueryParsingErrorsSuite"
$ build/sbt "sql/testOnly *QueryCompilationErrorsSuite"
$ build/sbt "test:testOnly *QueryCompilationErrorsDSv2Suite"
$ build/sbt "test:testOnly *ExtractPythonUDFFromJoinConditionSuite"
$ build/sbt "testOnly *PlanParserSuite"
$ build/sbt "sql/testOnly *SQLQueryTestSuite -- -z transform.sql"
$ build/sbt "sql/testOnly *SQLQueryTestSuite -- -z join-lateral.sql"
$ build/sbt "sql/testOnly *SQLQueryTestSuite -- -z describe.sql"
```
Authored-by: Max Gekk <max.gekkgmail.com>
Signed-off-by: Max Gekk <max.gekkgmail.com>
(cherry picked from commit 5aba2b38beae6e1baf6f0c6f9eb3b65cf607fe77)
Signed-off-by: Max Gekk <max.gekkgmail.com>
Closes #36286 from MaxGekk/error-class-apply-toSQLStmt-3.3.
Authored-by: Max Gekk <[email protected]>
Signed-off-by: Max Gekk <[email protected]>
---
python/pyspark/sql/tests/test_udf.py | 13 ++++---
.../spark/sql/catalyst/parser/AstBuilder.scala | 2 +-
.../spark/sql/errors/QueryCompilationErrors.scala | 7 +++-
.../apache/spark/sql/errors/QueryErrorsBase.scala | 7 ++++
.../spark/sql/errors/QueryExecutionErrors.scala | 4 +-
.../spark/sql/errors/QueryParsingErrors.scala | 44 ++++++++++++++++------
.../ExtractPythonUDFFromJoinConditionSuite.scala | 4 +-
.../spark/sql/catalyst/parser/DDLParserSuite.scala | 7 ++--
.../sql/catalyst/parser/PlanParserSuite.scala | 2 +-
.../sql-tests/results/join-lateral.sql.out | 4 +-
.../resources/sql-tests/results/transform.sql.out | 4 +-
.../errors/QueryCompilationErrorsDSv2Suite.scala | 2 +-
.../sql/errors/QueryCompilationErrorsSuite.scala | 2 +-
.../sql/errors/QueryExecutionErrorsSuite.scala | 4 +-
.../spark/sql/errors/QueryParsingErrorsSuite.scala | 31 +++++++--------
.../execution/SparkScriptTransformationSuite.scala | 2 +-
16 files changed, 83 insertions(+), 56 deletions(-)
diff --git a/python/pyspark/sql/tests/test_udf.py
b/python/pyspark/sql/tests/test_udf.py
index 805d5a8dfec..e40c3ba0d64 100644
--- a/python/pyspark/sql/tests/test_udf.py
+++ b/python/pyspark/sql/tests/test_udf.py
@@ -258,15 +258,16 @@ class UDFTests(ReusedSQLTestCase):
def runWithJoinType(join_type, type_string):
with self.assertRaisesRegex(
AnalysisException,
- "Using PythonUDF in join condition of join type %s is not
supported" % type_string,
+ """Using PythonUDF in join condition of join type "%s" is not
supported"""
+ % type_string,
):
left.join(right, [f("a", "b"), left.a1 == right.b1],
join_type).collect()
- runWithJoinType("full", "FullOuter")
- runWithJoinType("left", "LeftOuter")
- runWithJoinType("right", "RightOuter")
- runWithJoinType("leftanti", "LeftAnti")
- runWithJoinType("leftsemi", "LeftSemi")
+ runWithJoinType("full", "FULL OUTER")
+ runWithJoinType("left", "LEFT OUTER")
+ runWithJoinType("right", "RIGHT OUTER")
+ runWithJoinType("leftanti", "LEFT ANTI")
+ runWithJoinType("leftsemi", "LEFT SEMI")
def test_udf_as_join_condition(self):
left = self.spark.createDataFrame([Row(a=1, a1=1, a2=1), Row(a=2,
a1=2, a2=2)])
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
index e788368604f..60e691ba4ac 100644
---
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
+++
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
@@ -1161,7 +1161,7 @@ class AstBuilder extends SqlBaseParserBaseVisitor[AnyRef]
with SQLConfHelper wit
}
if (join.LATERAL != null) {
if (!Seq(Inner, Cross, LeftOuter).contains(joinType)) {
- throw QueryParsingErrors.unsupportedLateralJoinTypeError(ctx,
joinType.toString)
+ throw QueryParsingErrors.unsupportedLateralJoinTypeError(ctx,
joinType.sql)
}
LateralJoin(left, LateralSubquery(plan(join.right)), joinType,
condition)
} else {
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
index 65b59655be0..3a8cd689666 100644
---
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
+++
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
@@ -94,7 +94,9 @@ object QueryCompilationErrors extends QueryErrorsBase {
def unsupportedIfNotExistsError(tableName: String): Throwable = {
new AnalysisException(
errorClass = "UNSUPPORTED_FEATURE",
- messageParameters = Array(s"IF NOT EXISTS for the table '$tableName' by
INSERT INTO."))
+ messageParameters = Array(
+ s"${toSQLStmt("IF NOT EXISTS")} for the table '$tableName' " +
+ s"by ${toSQLStmt("INSERT INTO")}."))
}
def nonPartitionColError(partitionName: String): Throwable = {
@@ -1576,7 +1578,8 @@ object QueryCompilationErrors extends QueryErrorsBase {
new AnalysisException(
errorClass = "UNSUPPORTED_FEATURE",
messageParameters = Array(
- s"Using PythonUDF in join condition of join type $joinType is not
supported"))
+ "Using PythonUDF in join condition of join type " +
+ s"${toSQLStmt(joinType.sql)} is not supported."))
}
def conflictingAttributesInJoinConditionError(
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryErrorsBase.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryErrorsBase.scala
index 7002f19f9fc..b115891f370 100644
---
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryErrorsBase.scala
+++
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryErrorsBase.scala
@@ -17,6 +17,8 @@
package org.apache.spark.sql.errors
+import java.util.Locale
+
import org.apache.spark.sql.catalyst.expressions.Literal
import org.apache.spark.sql.types.{DataType, DoubleType, FloatType}
@@ -45,6 +47,11 @@ trait QueryErrorsBase {
litToErrorValue(Literal.create(v, t))
}
+ // Quote sql statements in error messages.
+ def toSQLStmt(text: String): String = {
+ "\"" + text.toUpperCase(Locale.ROOT) + "\""
+ }
+
def toSQLType(t: DataType): String = {
t.sql
}
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
index cbae61a66d5..86dcf4cfc3c 100644
---
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
+++
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
@@ -1927,13 +1927,13 @@ object QueryExecutionErrors extends QueryErrorsBase {
def repeatedPivotsUnsupportedError(): Throwable = {
new SparkUnsupportedOperationException(
errorClass = "UNSUPPORTED_FEATURE",
- messageParameters = Array("Repeated pivots."))
+ messageParameters = Array(s"Repeated ${toSQLStmt("pivot")}s."))
}
def pivotNotAfterGroupByUnsupportedError(): Throwable = {
new SparkUnsupportedOperationException(
errorClass = "UNSUPPORTED_FEATURE",
- messageParameters = Array("Pivot not after a groupBy."))
+ messageParameters = Array(s"${toSQLStmt("pivot")} not after a
${toSQLStmt("group by")}."))
}
def invalidAesKeyLengthError(actualLength: Int): RuntimeException = {
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala
index ad0973ccbb4..39c1944bbba 100644
---
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala
+++
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala
@@ -91,13 +91,19 @@ object QueryParsingErrors extends QueryErrorsBase {
}
def transformNotSupportQuantifierError(ctx: ParserRuleContext): Throwable = {
- new ParseException("UNSUPPORTED_FEATURE",
- Array("TRANSFORM does not support DISTINCT/ALL in inputs"), ctx)
+ new ParseException(
+ errorClass = "UNSUPPORTED_FEATURE",
+ messageParameters = Array(s"${toSQLStmt("TRANSFORM")} does not support" +
+ s" ${toSQLStmt("DISTINCT")}/${toSQLStmt("ALL")} in inputs"),
+ ctx)
}
def transformWithSerdeUnsupportedError(ctx: ParserRuleContext): Throwable = {
- new ParseException("UNSUPPORTED_FEATURE",
- Array("TRANSFORM with serde is only supported in hive mode"), ctx)
+ new ParseException(
+ errorClass = "UNSUPPORTED_FEATURE",
+ messageParameters = Array(
+ s"${toSQLStmt("TRANSFORM")} with serde is only supported in hive
mode"),
+ ctx)
}
def lateralWithPivotInFromClauseNotAllowedError(ctx: FromClauseContext):
Throwable = {
@@ -105,19 +111,31 @@ object QueryParsingErrors extends QueryErrorsBase {
}
def lateralJoinWithNaturalJoinUnsupportedError(ctx: ParserRuleContext):
Throwable = {
- new ParseException("UNSUPPORTED_FEATURE", Array("LATERAL join with NATURAL
join."), ctx)
+ new ParseException(
+ errorClass = "UNSUPPORTED_FEATURE",
+ messageParameters = Array(s"${toSQLStmt("LATERAL")} join with
${toSQLStmt("NATURAL")} join."),
+ ctx)
}
def lateralJoinWithUsingJoinUnsupportedError(ctx: ParserRuleContext):
Throwable = {
- new ParseException("UNSUPPORTED_FEATURE", Array("LATERAL join with USING
join."), ctx)
+ new ParseException(
+ errorClass = "UNSUPPORTED_FEATURE",
+ messageParameters = Array(s"${toSQLStmt("LATERAL")} join with
${toSQLStmt("USING")} join."),
+ ctx)
}
def unsupportedLateralJoinTypeError(ctx: ParserRuleContext, joinType:
String): Throwable = {
- new ParseException("UNSUPPORTED_FEATURE", Array(s"LATERAL join type
'$joinType'."), ctx)
+ new ParseException(
+ errorClass = "UNSUPPORTED_FEATURE",
+ messageParameters = Array(s"${toSQLStmt("LATERAL")} join type
${toSQLStmt(joinType)}."),
+ ctx)
}
def invalidLateralJoinRelationError(ctx: RelationPrimaryContext): Throwable
= {
- new ParseException("INVALID_SQL_SYNTAX", Array("LATERAL can only be used
with subquery."), ctx)
+ new ParseException(
+ errorClass = "INVALID_SQL_SYNTAX",
+ messageParameters = Array(s"${toSQLStmt("LATERAL")} can only be used
with subquery."),
+ ctx)
}
def repetitiveWindowDefinitionError(name: String, ctx: WindowClauseContext):
Throwable = {
@@ -136,7 +154,7 @@ object QueryParsingErrors extends QueryErrorsBase {
}
def naturalCrossJoinUnsupportedError(ctx: RelationContext): Throwable = {
- new ParseException("UNSUPPORTED_FEATURE", Array("NATURAL CROSS JOIN."),
ctx)
+ new ParseException("UNSUPPORTED_FEATURE", Array(toSQLStmt("NATURAL CROSS
JOIN") + "."), ctx)
}
def emptyInputForTableSampleError(ctx: ParserRuleContext): Throwable = {
@@ -298,14 +316,18 @@ object QueryParsingErrors extends QueryErrorsBase {
}
def showFunctionsUnsupportedError(identifier: String, ctx:
IdentifierContext): Throwable = {
- new ParseException(s"SHOW $identifier FUNCTIONS not supported", ctx)
+ new ParseException(
+ errorClass = "INVALID_SQL_SYNTAX",
+ messageParameters = Array(
+ s"${toSQLStmt("SHOW")} $identifier ${toSQLStmt("FUNCTIONS")} not
supported"),
+ ctx)
}
def showFunctionsInvalidPatternError(pattern: String, ctx:
ParserRuleContext): Throwable = {
new ParseException(
errorClass = "INVALID_SQL_SYNTAX",
messageParameters = Array(
- s"Invalid pattern in SHOW FUNCTIONS: $pattern. " +
+ s"Invalid pattern in ${toSQLStmt("SHOW FUNCTIONS")}: $pattern. " +
s"It must be a ${toSQLType(StringType)} literal."),
ctx)
}
diff --git
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ExtractPythonUDFFromJoinConditionSuite.scala
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ExtractPythonUDFFromJoinConditionSuite.scala
index 65c8f5d300c..1e58f5c94b0 100644
---
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ExtractPythonUDFFromJoinConditionSuite.scala
+++
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ExtractPythonUDFFromJoinConditionSuite.scala
@@ -187,9 +187,9 @@ class ExtractPythonUDFFromJoinConditionSuite extends
PlanTest {
condition = Some(unevaluableJoinCond))
Optimize.execute(query.analyze)
}
- assert(e.message.contentEquals(
+ assert(e.message ==
"The feature is not supported: " +
- s"Using PythonUDF in join condition of join type $joinType is not
supported"))
+ s"""Using PythonUDF in join condition of join type "${joinType.sql}"
is not supported.""")
val query2 = testRelationLeft.join(
testRelationRight,
diff --git
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala
index 472506fa907..bc5380e27f5 100644
---
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala
+++
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala
@@ -2048,12 +2048,11 @@ class DDLParserSuite extends AnalysisTest {
comparePlans(
parsePlan("SHOW FUNCTIONS IN db LIKE 'funct*'"),
ShowFunctions(UnresolvedNamespace(Seq("db")), true, true,
Some("funct*")))
- val sql = "SHOW other FUNCTIONS"
- intercept(sql, s"$sql not supported")
+ intercept("SHOW other FUNCTIONS", "\"SHOW\" other \"FUNCTIONS\" not
supported")
intercept("SHOW FUNCTIONS IN db f1",
- "Invalid pattern in SHOW FUNCTIONS: f1")
+ "Invalid pattern in \"SHOW FUNCTIONS\": f1")
intercept("SHOW FUNCTIONS IN db LIKE f1",
- "Invalid pattern in SHOW FUNCTIONS: f1")
+ "Invalid pattern in \"SHOW FUNCTIONS\": f1")
// The legacy syntax.
comparePlans(
diff --git
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/PlanParserSuite.scala
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/PlanParserSuite.scala
index 688c0d12373..fb9fdfb8598 100644
---
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/PlanParserSuite.scala
+++
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/PlanParserSuite.scala
@@ -1254,7 +1254,7 @@ class PlanParserSuite extends AnalysisTest {
| "escapeChar" = "\\")
|FROM testData
""".stripMargin,
- "TRANSFORM with serde is only supported in hive mode")
+ "\"TRANSFORM\" with serde is only supported in hive mode")
}
diff --git a/sql/core/src/test/resources/sql-tests/results/join-lateral.sql.out
b/sql/core/src/test/resources/sql-tests/results/join-lateral.sql.out
index cc1619813dd..6e47579a9b0 100644
--- a/sql/core/src/test/resources/sql-tests/results/join-lateral.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/join-lateral.sql.out
@@ -153,7 +153,7 @@ struct<>
-- !query output
org.apache.spark.sql.catalyst.parser.ParseException
-The feature is not supported: LATERAL join with NATURAL join.(line 1, pos 14)
+The feature is not supported: "LATERAL" join with "NATURAL" join.(line 1, pos
14)
== SQL ==
SELECT * FROM t1 NATURAL JOIN LATERAL (SELECT c1 + c2 AS c2)
@@ -167,7 +167,7 @@ struct<>
-- !query output
org.apache.spark.sql.catalyst.parser.ParseException
-The feature is not supported: LATERAL join with USING join.(line 1, pos 14)
+The feature is not supported: "LATERAL" join with "USING" join.(line 1, pos 14)
== SQL ==
SELECT * FROM t1 JOIN LATERAL (SELECT c1 + c2 AS c2) USING (c2)
diff --git a/sql/core/src/test/resources/sql-tests/results/transform.sql.out
b/sql/core/src/test/resources/sql-tests/results/transform.sql.out
index be57390761b..69fe58e1343 100644
--- a/sql/core/src/test/resources/sql-tests/results/transform.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/transform.sql.out
@@ -719,7 +719,7 @@ struct<>
-- !query output
org.apache.spark.sql.catalyst.parser.ParseException
-The feature is not supported: TRANSFORM does not support DISTINCT/ALL in
inputs(line 1, pos 17)
+The feature is not supported: "TRANSFORM" does not support "DISTINCT"/"ALL" in
inputs(line 1, pos 17)
== SQL ==
SELECT TRANSFORM(DISTINCT b, a, c)
@@ -739,7 +739,7 @@ struct<>
-- !query output
org.apache.spark.sql.catalyst.parser.ParseException
-The feature is not supported: TRANSFORM does not support DISTINCT/ALL in
inputs(line 1, pos 17)
+The feature is not supported: "TRANSFORM" does not support "DISTINCT"/"ALL" in
inputs(line 1, pos 17)
== SQL ==
SELECT TRANSFORM(ALL b, a, c)
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsDSv2Suite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsDSv2Suite.scala
index bfea3f535dd..be8e6524920 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsDSv2Suite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsDSv2Suite.scala
@@ -43,7 +43,7 @@ class QueryCompilationErrorsDSv2Suite
checkAnswer(spark.table(tbl), spark.emptyDataFrame)
assert(e.getMessage === "The feature is not supported: " +
- s"IF NOT EXISTS for the table '$tbl' by INSERT INTO.")
+ s""""IF NOT EXISTS" for the table '$tbl' by "INSERT INTO".""")
assert(e.getErrorClass === "UNSUPPORTED_FEATURE")
assert(e.getSqlState === "0A000")
}
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsSuite.scala
index cac1ef67fac..6a7da405fcc 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsSuite.scala
@@ -149,7 +149,7 @@ class QueryCompilationErrorsSuite extends QueryTest with
SharedSparkSession {
assert(e.getSqlState === "0A000")
assert(e.message ===
"The feature is not supported: " +
- "Using PythonUDF in join condition of join type LeftOuter is not
supported")
+ "Using PythonUDF in join condition of join type \"LEFT OUTER\" is not
supported.")
}
test("UNSUPPORTED_FEATURE: Using pandas UDF aggregate expression with
pivot") {
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala
index f73d1e1c3c5..9ff57859acb 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala
@@ -156,7 +156,7 @@ class QueryExecutionErrorsSuite extends QueryTest
}
assert(e1.getErrorClass === "UNSUPPORTED_FEATURE")
assert(e1.getSqlState === "0A000")
- assert(e1.getMessage === "The feature is not supported: Repeated pivots.")
+ assert(e1.getMessage === """The feature is not supported: Repeated
"PIVOT"s.""")
val e2 = intercept[SparkUnsupportedOperationException] {
trainingSales
@@ -167,7 +167,7 @@ class QueryExecutionErrorsSuite extends QueryTest
}
assert(e2.getErrorClass === "UNSUPPORTED_FEATURE")
assert(e2.getSqlState === "0A000")
- assert(e2.getMessage === "The feature is not supported: Pivot not after a
groupBy.")
+ assert(e2.getMessage === """The feature is not supported: "PIVOT" not
after a "GROUP BY".""")
}
test("INCONSISTENT_BEHAVIOR_CROSS_VERSION: " +
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryParsingErrorsSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryParsingErrorsSuite.scala
index 5610c4d000b..5a47ce5ae73 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryParsingErrorsSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryParsingErrorsSuite.scala
@@ -21,6 +21,8 @@ import org.apache.spark.sql.QueryTest
import org.apache.spark.sql.catalyst.parser.ParseException
import org.apache.spark.sql.test.SharedSparkSession
+// Turn of the length check because most of the tests check entire error
messages
+// scalastyle:off line.size.limit
class QueryParsingErrorsSuite extends QueryTest with SharedSparkSession {
def validateParsingError(
sqlText: String,
@@ -42,7 +44,7 @@ class QueryParsingErrorsSuite extends QueryTest with
SharedSparkSession {
sqlState = "0A000",
message =
"""
- |The feature is not supported: LATERAL join with NATURAL join.(line
1, pos 14)
+ |The feature is not supported: "LATERAL" join with "NATURAL"
join.(line 1, pos 14)
|
|== SQL ==
|SELECT * FROM t1 NATURAL JOIN LATERAL (SELECT c1 + c2 AS c2)
@@ -57,7 +59,7 @@ class QueryParsingErrorsSuite extends QueryTest with
SharedSparkSession {
sqlState = "0A000",
message =
"""
- |The feature is not supported: LATERAL join with USING join.(line 1,
pos 14)
+ |The feature is not supported: "LATERAL" join with "USING"
join.(line 1, pos 14)
|
|== SQL ==
|SELECT * FROM t1 JOIN LATERAL (SELECT c1 + c2 AS c2) USING (c2)
@@ -66,21 +68,17 @@ class QueryParsingErrorsSuite extends QueryTest with
SharedSparkSession {
}
test("UNSUPPORTED_FEATURE: Unsupported LATERAL join type") {
- Seq(
- ("RIGHT OUTER", "RightOuter"),
- ("FULL OUTER", "FullOuter"),
- ("LEFT SEMI", "LeftSemi"),
- ("LEFT ANTI", "LeftAnti")).foreach { pair =>
+ Seq("RIGHT OUTER", "FULL OUTER", "LEFT SEMI", "LEFT ANTI").foreach {
joinType =>
validateParsingError(
- sqlText = s"SELECT * FROM t1 ${pair._1} JOIN LATERAL (SELECT c1 + c2
AS c3) ON c2 = c3",
+ sqlText = s"SELECT * FROM t1 $joinType JOIN LATERAL (SELECT c1 + c2 AS
c3) ON c2 = c3",
errorClass = "UNSUPPORTED_FEATURE",
sqlState = "0A000",
message =
s"""
- |The feature is not supported: LATERAL join type
'${pair._2}'.(line 1, pos 14)
+ |The feature is not supported: "LATERAL" join type
"$joinType".(line 1, pos 14)
|
|== SQL ==
- |SELECT * FROM t1 ${pair._1} JOIN LATERAL (SELECT c1 + c2 AS c3)
ON c2 = c3
+ |SELECT * FROM t1 $joinType JOIN LATERAL (SELECT c1 + c2 AS c3) ON
c2 = c3
|--------------^^^
|""".stripMargin)
}
@@ -101,7 +99,7 @@ class QueryParsingErrorsSuite extends QueryTest with
SharedSparkSession {
sqlState = "42000",
message =
s"""
- |Invalid SQL syntax: LATERAL can only be used with subquery.(line
1, pos $pos)
+ |Invalid SQL syntax: "LATERAL" can only be used with
subquery.(line 1, pos $pos)
|
|== SQL ==
|$sqlText
@@ -117,7 +115,7 @@ class QueryParsingErrorsSuite extends QueryTest with
SharedSparkSession {
sqlState = "0A000",
message =
"""
- |The feature is not supported: NATURAL CROSS JOIN.(line 1, pos 14)
+ |The feature is not supported: "NATURAL CROSS JOIN".(line 1, pos 14)
|
|== SQL ==
|SELECT * FROM a NATURAL CROSS JOIN b
@@ -177,8 +175,7 @@ class QueryParsingErrorsSuite extends QueryTest with
SharedSparkSession {
sqlState = "0A000",
message =
"""
- |The feature is not supported: """.stripMargin +
- """TRANSFORM does not support DISTINCT/ALL in inputs(line 1, pos 17)
+ |The feature is not supported: "TRANSFORM" does not support
"DISTINCT"/"ALL" in inputs(line 1, pos 17)
|
|== SQL ==
|SELECT TRANSFORM(DISTINCT a) USING 'a' FROM t
@@ -194,12 +191,10 @@ class QueryParsingErrorsSuite extends QueryTest with
SharedSparkSession {
sqlState = "0A000",
message =
"""
- |The feature is not supported: """.stripMargin +
- """TRANSFORM with serde is only supported in hive mode(line 1, pos 0)
+ |The feature is not supported: "TRANSFORM" with serde is only
supported in hive mode(line 1, pos 0)
|
|== SQL ==
- |SELECT TRANSFORM(a) ROW FORMAT SERDE """.stripMargin +
- """'org.apache.hadoop.hive.serde2.OpenCSVSerde' USING 'a' FROM t
+ |SELECT TRANSFORM(a) ROW FORMAT SERDE
'org.apache.hadoop.hive.serde2.OpenCSVSerde' USING 'a' FROM t
|^^^
|""".stripMargin)
}
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/execution/SparkScriptTransformationSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/execution/SparkScriptTransformationSuite.scala
index 5638743b763..1f431e173b3 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/execution/SparkScriptTransformationSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/execution/SparkScriptTransformationSuite.scala
@@ -56,7 +56,7 @@ class SparkScriptTransformationSuite extends
BaseScriptTransformationSuite with
|FROM v
""".stripMargin)
}.getMessage
- assert(e.contains("TRANSFORM with serde is only supported in hive mode"))
+ assert(e.contains("\"TRANSFORM\" with serde is only supported in hive
mode"))
}
}
}
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]