This is an automated email from the ASF dual-hosted git repository.
wenchen pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new 62ca7647ed5 [SPARK-41574][SQL] Update `_LEGACY_ERROR_TEMP_2009` as
`INTERNAL_ERROR`
62ca7647ed5 is described below
commit 62ca7647ed5a9b67ad0a873cd511367adf4ccb4b
Author: itholic <[email protected]>
AuthorDate: Wed Jan 18 17:20:40 2023 +0800
[SPARK-41574][SQL] Update `_LEGACY_ERROR_TEMP_2009` as `INTERNAL_ERROR`
### What changes were proposed in this pull request?
This PR proposes to update `_LEGACY_ERROR_TEMP_2136` as `INTERNAL_ERROR`.
### Why are the changes needed?
We should turn error class into INTERNAL_ERROR when it's not triggered by
user space.
### Does this PR introduce _any_ user-facing change?
No
### How was this patch tested?
`./build/sbt "sql/testOnly org.apache.spark.sql.SQLQueryTestSuite*`
Closes #39389 from itholic/SPARK-41574.
Lead-authored-by: itholic <[email protected]>
Co-authored-by: Haejoon Lee <[email protected]>
Signed-off-by: Wenchen Fan <[email protected]>
---
core/src/main/resources/error/error-classes.json | 5 -----
.../apache/spark/sql/catalyst/expressions/windowExpressions.scala | 6 +++---
.../scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala | 5 +++++
.../scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala | 6 ------
4 files changed, 8 insertions(+), 14 deletions(-)
diff --git a/core/src/main/resources/error/error-classes.json
b/core/src/main/resources/error/error-classes.json
index 2570ffeba3b..465aa162981 100644
--- a/core/src/main/resources/error/error-classes.json
+++ b/core/src/main/resources/error/error-classes.json
@@ -3581,11 +3581,6 @@
"Find an invalid url string <url>. If necessary set <ansiConfig> to
false to bypass this error."
]
},
- "_LEGACY_ERROR_TEMP_2009" : {
- "message" : [
- "dataType"
- ]
- },
"_LEGACY_ERROR_TEMP_2010" : {
"message" : [
"Window Functions do not support merging."
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/windowExpressions.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/windowExpressions.scala
index c32bf4d4d45..6fed9714002 100644
---
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/windowExpressions.scala
+++
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/windowExpressions.scala
@@ -27,7 +27,7 @@ import
org.apache.spark.sql.catalyst.expressions.Cast.{toSQLExpr, toSQLType}
import org.apache.spark.sql.catalyst.expressions.aggregate.{AggregateFunction,
DeclarativeAggregate, NoOp}
import org.apache.spark.sql.catalyst.trees.{BinaryLike, LeafLike, TernaryLike,
UnaryLike}
import org.apache.spark.sql.catalyst.trees.TreePattern.{TreePattern,
UNRESOLVED_WINDOW_EXPRESSION, WINDOW_EXPRESSION}
-import org.apache.spark.sql.errors.{QueryErrorsBase, QueryExecutionErrors}
+import org.apache.spark.sql.errors.{QueryCompilationErrors, QueryErrorsBase,
QueryExecutionErrors}
import org.apache.spark.sql.types._
/**
@@ -62,7 +62,7 @@ case class WindowSpecDefinition(
checkInputDataTypes().isSuccess
override def nullable: Boolean = true
- override def dataType: DataType = throw
QueryExecutionErrors.dataTypeOperationUnsupportedError
+ override def dataType: DataType = throw
QueryCompilationErrors.dataTypeOperationUnsupportedError
override def checkInputDataTypes(): TypeCheckResult = {
frameSpecification match {
@@ -182,7 +182,7 @@ case object CurrentRow extends SpecialFrameBoundary {
* Represents a window frame.
*/
sealed trait WindowFrame extends Expression with Unevaluable {
- override def dataType: DataType = throw
QueryExecutionErrors.dataTypeOperationUnsupportedError
+ override def dataType: DataType = throw
QueryCompilationErrors.dataTypeOperationUnsupportedError
override def nullable: Boolean = false
}
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
index 83b69d700ac..91412b760bd 100644
---
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
+++
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
@@ -3401,4 +3401,9 @@ private[sql] object QueryCompilationErrors extends
QueryErrorsBase {
)
)
}
+
+ def dataTypeOperationUnsupportedError(): Throwable = {
+ SparkException.internalError(
+ "The operation `dataType` is not supported.")
+ }
}
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
index 0473736d018..8634f60e34e 100644
---
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
+++
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
@@ -378,12 +378,6 @@ private[sql] object QueryExecutionErrors extends
QueryErrorsBase {
)
}
- def dataTypeOperationUnsupportedError(): SparkUnsupportedOperationException
= {
- new SparkUnsupportedOperationException(
- errorClass = "_LEGACY_ERROR_TEMP_2009",
- messageParameters = Map.empty)
- }
-
def mergeUnsupportedByWindowFunctionError():
SparkUnsupportedOperationException = {
new SparkUnsupportedOperationException(
errorClass = "_LEGACY_ERROR_TEMP_2010",
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]