This is an automated email from the ASF dual-hosted git repository.
wenchen pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new f9e117e01672 [SPARK-50659][SQL] Move Union-related errors to
QueryCompilationErrors
f9e117e01672 is described below
commit f9e117e01672e65034ea96672e0f137d835ddcb0
Author: Vladimir Golubev <[email protected]>
AuthorDate: Wed Dec 25 11:13:41 2024 +0800
[SPARK-50659][SQL] Move Union-related errors to QueryCompilationErrors
### What changes were proposed in this pull request?
Move Union-related `NUM_COLUMNS_MISMATCH` and `INCOMPATIBLE_COLUMN_TYPE`
errors to `QueryCompilationErrors`.
### Why are the changes needed?
To improve the code health and to reuse those in the single-pass Analyzer.
### Does this PR introduce _any_ user-facing change?
No.
### How was this patch tested?
Existing tests.
### Was this patch authored or co-authored using generative AI tooling?
No.
Closes #49284 from
vladimirg-db/vladimirg-db/move-union-related-errors-to-query-compilation-errors.
Authored-by: Vladimir Golubev <[email protected]>
Signed-off-by: Wenchen Fan <[email protected]>
---
.../sql/catalyst/analysis/CheckAnalysis.scala | 32 ++++++++---------
.../spark/sql/errors/QueryCompilationErrors.scala | 40 ++++++++++++++++++++++
2 files changed, 56 insertions(+), 16 deletions(-)
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala
index 8f4431cb1ac1..b0d6a2a46baa 100644
---
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala
+++
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala
@@ -702,13 +702,13 @@ trait CheckAnalysis extends PredicateHelper with
LookupCatalog with QueryErrorsB
operator.children.tail.zipWithIndex.foreach { case (child, ti) =>
// Check the number of columns
if (child.output.length != ref.length) {
- e.failAnalysis(
- errorClass = "NUM_COLUMNS_MISMATCH",
- messageParameters = Map(
- "operator" -> toSQLStmt(operator.nodeName),
- "firstNumColumns" -> ref.length.toString,
- "invalidOrdinalNum" -> ordinalNumber(ti + 1),
- "invalidNumColumns" -> child.output.length.toString))
+ throw QueryCompilationErrors.numColumnsMismatch(
+ operator = operator.nodeName,
+ firstNumColumns = ref.length,
+ invalidOrdinalNum = ti + 1,
+ invalidNumColumns = child.output.length,
+ origin = operator.origin
+ )
}
val dataTypesAreCompatibleFn =
getDataTypesAreCompatibleFn(operator)
@@ -716,15 +716,15 @@ trait CheckAnalysis extends PredicateHelper with
LookupCatalog with QueryErrorsB
dataTypes(child).zip(ref).zipWithIndex.foreach { case ((dt1,
dt2), ci) =>
// SPARK-18058: we shall not care about the nullability of
columns
if (!dataTypesAreCompatibleFn(dt1, dt2)) {
- e.failAnalysis(
- errorClass = "INCOMPATIBLE_COLUMN_TYPE",
- messageParameters = Map(
- "operator" -> toSQLStmt(operator.nodeName),
- "columnOrdinalNumber" -> ordinalNumber(ci),
- "tableOrdinalNumber" -> ordinalNumber(ti + 1),
- "dataType1" -> toSQLType(dt1),
- "dataType2" -> toSQLType(dt2),
- "hint" -> extraHintForAnsiTypeCoercionPlan(operator)))
+ throw QueryCompilationErrors.incompatibleColumnTypeError(
+ operator = operator.nodeName,
+ columnOrdinalNumber = ci,
+ tableOrdinalNumber = ti + 1,
+ dataType1 = dt1,
+ dataType2 = dt2,
+ hint = extraHintForAnsiTypeCoercionPlan(operator),
+ origin = operator.origin
+ )
}
}
}
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
index e55d4d1f9523..d38c7a01e1c4 100644
---
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
+++
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
@@ -4271,4 +4271,44 @@ private[sql] object QueryCompilationErrors extends
QueryErrorsBase with Compilat
)
)
}
+
+ def numColumnsMismatch(
+ operator: String,
+ firstNumColumns: Int,
+ invalidOrdinalNum: Int,
+ invalidNumColumns: Int,
+ origin: Origin): Throwable = {
+ new AnalysisException(
+ errorClass = "NUM_COLUMNS_MISMATCH",
+ messageParameters = Map(
+ "operator" -> toSQLStmt(operator),
+ "firstNumColumns" -> firstNumColumns.toString,
+ "invalidOrdinalNum" -> ordinalNumber(invalidOrdinalNum),
+ "invalidNumColumns" -> invalidNumColumns.toString
+ ),
+ origin = origin
+ )
+ }
+
+ def incompatibleColumnTypeError(
+ operator: String,
+ columnOrdinalNumber: Int,
+ tableOrdinalNumber: Int,
+ dataType1: DataType,
+ dataType2: DataType,
+ hint: String,
+ origin: Origin): Throwable = {
+ new AnalysisException(
+ errorClass = "INCOMPATIBLE_COLUMN_TYPE",
+ messageParameters = Map(
+ "operator" -> toSQLStmt(operator),
+ "columnOrdinalNumber" -> ordinalNumber(columnOrdinalNumber),
+ "tableOrdinalNumber" -> ordinalNumber(tableOrdinalNumber),
+ "dataType1" -> toSQLType(dataType1),
+ "dataType2" -> toSQLType(dataType2),
+ "hint" -> hint
+ ),
+ origin = origin
+ )
+ }
}
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]