This is an automated email from the ASF dual-hosted git repository.
maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new 737a65e0ec41 [SPARK-50096][SQL] Assign appropriate error condition for
`_LEGACY_ERROR_TEMP_2150`: `TUPLE_SIZE_EXCEEDS_LIMIT`
737a65e0ec41 is described below
commit 737a65e0ec41bb4ae83fc98c2b935f33184201b0
Author: Haejoon Lee <[email protected]>
AuthorDate: Wed Nov 6 09:03:45 2024 +0100
[SPARK-50096][SQL] Assign appropriate error condition for
`_LEGACY_ERROR_TEMP_2150`: `TUPLE_SIZE_EXCEEDS_LIMIT`
### What changes were proposed in this pull request?
This PR proposes to assign proper error condition & sqlstate for
`_LEGACY_ERROR_TEMP_2150`: `TUPLE_SIZE_EXCEEDS_LIMIT`
### Why are the changes needed?
To improve the error message by assigning proper error condition and
SQLSTATE
### Does this PR introduce _any_ user-facing change?
No, only user-facing error message improved
### How was this patch tested?
Updated the existing tests
### Was this patch authored or co-authored using generative AI tooling?
No
Closes #48631 from itholic/LEGACY_2150.
Lead-authored-by: Haejoon Lee <[email protected]>
Co-authored-by: Haejoon Lee <[email protected]>
Signed-off-by: Max Gekk <[email protected]>
---
.../src/main/resources/error/error-conditions.json | 17 ++++++++++++-----
.../spark/sql/catalyst/encoders/AgnosticEncoder.scala | 4 +++-
.../org/apache/spark/sql/errors/ExecutionErrors.scala | 6 +++++-
.../sql/catalyst/encoders/ExpressionEncoderSuite.scala | 13 ++++++++++++-
4 files changed, 32 insertions(+), 8 deletions(-)
diff --git a/common/utils/src/main/resources/error/error-conditions.json
b/common/utils/src/main/resources/error/error-conditions.json
index b8722f26105d..31c10f9b9aac 100644
--- a/common/utils/src/main/resources/error/error-conditions.json
+++ b/common/utils/src/main/resources/error/error-conditions.json
@@ -4641,6 +4641,18 @@
],
"sqlState" : "42K09"
},
+ "TUPLE_IS_EMPTY" : {
+ "message" : [
+ "Due to Scala's limited support of tuple, empty tuple is not supported."
+ ],
+ "sqlState" : "22004"
+ },
+ "TUPLE_SIZE_EXCEEDS_LIMIT" : {
+ "message" : [
+ "Due to Scala's limited support of tuple, tuples with more than 22
elements are not supported."
+ ],
+ "sqlState" : "54011"
+ },
"UDTF_ALIAS_NUMBER_MISMATCH" : {
"message" : [
"The number of aliases supplied in the AS clause does not match the
number of columns output by the UDTF.",
@@ -7249,11 +7261,6 @@
"null value found but field <name> is not nullable."
]
},
- "_LEGACY_ERROR_TEMP_2150" : {
- "message" : [
- "Due to Scala's limited support of tuple, tuple with more than 22
elements are not supported."
- ]
- },
"_LEGACY_ERROR_TEMP_2154" : {
"message" : [
"Failed to get outer pointer for <innerCls>."
diff --git
a/sql/api/src/main/scala/org/apache/spark/sql/catalyst/encoders/AgnosticEncoder.scala
b/sql/api/src/main/scala/org/apache/spark/sql/catalyst/encoders/AgnosticEncoder.scala
index 10f734b3f84e..9ae7de97abf5 100644
---
a/sql/api/src/main/scala/org/apache/spark/sql/catalyst/encoders/AgnosticEncoder.scala
+++
b/sql/api/src/main/scala/org/apache/spark/sql/catalyst/encoders/AgnosticEncoder.scala
@@ -139,7 +139,9 @@ object AgnosticEncoders {
encoders: Seq[AgnosticEncoder[_]],
elementsCanBeNull: Boolean = false): AgnosticEncoder[_] = {
val numElements = encoders.size
- if (numElements < 1 || numElements > MAX_TUPLE_ELEMENTS) {
+ if (numElements < 1) {
+ throw ExecutionErrors.emptyTupleNotSupportedError()
+ } else if (numElements > MAX_TUPLE_ELEMENTS) {
throw ExecutionErrors.elementsOfTupleExceedLimitError()
}
val fields = encoders.zipWithIndex.map { case (e, id) =>
diff --git
a/sql/api/src/main/scala/org/apache/spark/sql/errors/ExecutionErrors.scala
b/sql/api/src/main/scala/org/apache/spark/sql/errors/ExecutionErrors.scala
index 0ee1d7037d43..862067765447 100644
--- a/sql/api/src/main/scala/org/apache/spark/sql/errors/ExecutionErrors.scala
+++ b/sql/api/src/main/scala/org/apache/spark/sql/errors/ExecutionErrors.scala
@@ -228,7 +228,11 @@ private[sql] trait ExecutionErrors extends
DataTypeErrorsBase {
}
def elementsOfTupleExceedLimitError(): SparkUnsupportedOperationException = {
- new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_2150")
+ new SparkUnsupportedOperationException("TUPLE_SIZE_EXCEEDS_LIMIT")
+ }
+
+ def emptyTupleNotSupportedError(): SparkUnsupportedOperationException = {
+ new SparkUnsupportedOperationException("TUPLE_IS_EMPTY")
}
def invalidAgnosticEncoderError(encoder: AnyRef): Throwable = {
diff --git
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/ExpressionEncoderSuite.scala
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/ExpressionEncoderSuite.scala
index 3b5cbed2cc52..77c9672fd957 100644
---
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/ExpressionEncoderSuite.scala
+++
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/ExpressionEncoderSuite.scala
@@ -518,7 +518,18 @@ class ExpressionEncoderSuite extends
CodegenInterpretedPlanTest with AnalysisTes
exception = intercept[SparkUnsupportedOperationException] {
Encoders.tupleEncoder(encoders: _*)
},
- condition = "_LEGACY_ERROR_TEMP_2150",
+ condition = "TUPLE_SIZE_EXCEEDS_LIMIT",
+ parameters = Map.empty)
+ }
+
+ test("throw exception for empty tuple") {
+ val encoders = Seq.empty[Encoder[Int]]
+
+ checkError(
+ exception = intercept[SparkUnsupportedOperationException] {
+ Encoders.tupleEncoder(encoders: _*)
+ },
+ condition = "TUPLE_IS_EMPTY",
parameters = Map.empty)
}
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]