This is an automated email from the ASF dual-hosted git repository.
maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new 7823f84942a [SPARK-41578][SQL] Assign name to _LEGACY_ERROR_TEMP_2141
7823f84942a is described below
commit 7823f84942acd1a1a6abc5c1f9045317795d00fb
Author: itholic <[email protected]>
AuthorDate: Fri Dec 30 12:18:50 2022 +0500
[SPARK-41578][SQL] Assign name to _LEGACY_ERROR_TEMP_2141
### What changes were proposed in this pull request?
This PR proposes to assign name to _LEGACY_ERROR_TEMP_2141,
"ENCODER_NOT_FOUND".
### Why are the changes needed?
We should assign proper name to _LEGACY_ERROR_TEMP_*
### Does this PR introduce _any_ user-facing change?
No
### How was this patch tested?
`./build/sbt "sql/testOnly org.apache.spark.sql.SQLQueryTestSuite*`
Closes #39279 from itholic/LEGACY_2141.
Authored-by: itholic <[email protected]>
Signed-off-by: Max Gekk <[email protected]>
---
core/src/main/resources/error/error-classes.json | 11 ++-
.../spark/sql/catalyst/ScalaReflection.scala | 2 +-
.../spark/sql/errors/QueryExecutionErrors.scala | 8 +--
.../encoders/EncoderErrorMessageSuite.scala | 80 ++++++++++------------
.../catalyst/encoders/ExpressionEncoderSuite.scala | 13 ++--
5 files changed, 52 insertions(+), 62 deletions(-)
diff --git a/core/src/main/resources/error/error-classes.json
b/core/src/main/resources/error/error-classes.json
index 21b7c467b64..67398a30180 100644
--- a/core/src/main/resources/error/error-classes.json
+++ b/core/src/main/resources/error/error-classes.json
@@ -459,6 +459,11 @@
"The index 0 is invalid. An index shall be either < 0 or > 0 (the first
element has index 1)."
]
},
+ "ENCODER_NOT_FOUND" : {
+ "message" : [
+ "Not found an encoder of the type <typeName> to Spark SQL internal
representation. Consider to change the input type to one of supported at
https://spark.apache.org/docs/latest/sql-ref-datatypes.html."
+ ]
+ },
"FAILED_EXECUTE_UDF" : {
"message" : [
"Failed to execute user defined function (<functionName>: (<signature>)
=> <result>)"
@@ -4116,12 +4121,6 @@
"<walkedTypePath>"
]
},
- "_LEGACY_ERROR_TEMP_2141" : {
- "message" : [
- "No Encoder found for <tpe>",
- "<walkedTypePath>"
- ]
- },
"_LEGACY_ERROR_TEMP_2142" : {
"message" : [
"Attributes for type <schema> is not supported"
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ScalaReflection.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ScalaReflection.scala
index 0a8a823216f..e02e42cea1a 100644
---
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ScalaReflection.scala
+++
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ScalaReflection.scala
@@ -779,7 +779,7 @@ object ScalaReflection extends ScalaReflection {
}
ProductEncoder(ClassTag(getClassFromType(t)), params)
case _ =>
- throw QueryExecutionErrors.cannotFindEncoderForTypeError(tpe.toString,
path)
+ throw QueryExecutionErrors.cannotFindEncoderForTypeError(tpe.toString)
}
}
}
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
index cef4acafe07..3e234cfee2c 100644
---
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
+++
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
@@ -1483,13 +1483,11 @@ private[sql] object QueryExecutionErrors extends
QueryErrorsBase {
"walkedTypePath" -> walkedTypePath.toString()))
}
- def cannotFindEncoderForTypeError(
- tpe: String, walkedTypePath: WalkedTypePath):
SparkUnsupportedOperationException = {
+ def cannotFindEncoderForTypeError(typeName: String):
SparkUnsupportedOperationException = {
new SparkUnsupportedOperationException(
- errorClass = "_LEGACY_ERROR_TEMP_2141",
+ errorClass = "ENCODER_NOT_FOUND",
messageParameters = Map(
- "tpe" -> tpe,
- "walkedTypePath" -> walkedTypePath.toString()))
+ "typeName" -> typeName))
}
def attributesForTypeUnsupportedError(schema: Schema):
SparkUnsupportedOperationException = {
diff --git
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/EncoderErrorMessageSuite.scala
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/EncoderErrorMessageSuite.scala
index 8c766ef8299..501dfa58305 100644
---
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/EncoderErrorMessageSuite.scala
+++
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/EncoderErrorMessageSuite.scala
@@ -19,7 +19,7 @@ package org.apache.spark.sql.catalyst.encoders
import scala.reflect.ClassTag
-import org.apache.spark.SparkFunSuite
+import org.apache.spark.{SparkFunSuite, SparkUnsupportedOperationException}
import org.apache.spark.sql.Encoders
class NonEncodable(i: Int)
@@ -52,50 +52,40 @@ class EncoderErrorMessageSuite extends SparkFunSuite {
}
test("nice error message for missing encoder") {
- val errorMsg1 =
-
intercept[UnsupportedOperationException](ExpressionEncoder[ComplexNonEncodable1]).getMessage
- assert(errorMsg1.contains(
- s"""root class: "${clsName[ComplexNonEncodable1]}""""))
- assert(errorMsg1.contains(
- s"""field (class: "${clsName[NonEncodable]}", name: "name1")"""))
-
- val errorMsg2 =
-
intercept[UnsupportedOperationException](ExpressionEncoder[ComplexNonEncodable2]).getMessage
- assert(errorMsg2.contains(
- s"""root class: "${clsName[ComplexNonEncodable2]}""""))
- assert(errorMsg2.contains(
- s"""field (class: "${clsName[ComplexNonEncodable1]}", name: "name2")"""))
- assert(errorMsg1.contains(
- s"""field (class: "${clsName[NonEncodable]}", name: "name1")"""))
-
- val errorMsg3 =
-
intercept[UnsupportedOperationException](ExpressionEncoder[ComplexNonEncodable3]).getMessage
- assert(errorMsg3.contains(
- s"""root class: "${clsName[ComplexNonEncodable3]}""""))
- assert(errorMsg3.contains(
- s"""field (class: "scala.Option", name: "name3")"""))
- assert(errorMsg3.contains(
- s"""option value class: "${clsName[NonEncodable]}""""))
-
- val errorMsg4 =
-
intercept[UnsupportedOperationException](ExpressionEncoder[ComplexNonEncodable4]).getMessage
- assert(errorMsg4.contains(
- s"""root class: "${clsName[ComplexNonEncodable4]}""""))
- assert(errorMsg4.contains(
- s"""field (class: "scala.Array", name: "name4")"""))
- assert(errorMsg4.contains(
- s"""array element class: "${clsName[NonEncodable]}""""))
-
- val errorMsg5 =
-
intercept[UnsupportedOperationException](ExpressionEncoder[ComplexNonEncodable5]).getMessage
- assert(errorMsg5.contains(
- s"""root class: "${clsName[ComplexNonEncodable5]}""""))
- assert(errorMsg5.contains(
- s"""field (class: "scala.Option", name: "name5")"""))
- assert(errorMsg5.contains(
- s"""option value class: "scala.Array""""))
- assert(errorMsg5.contains(
- s"""array element class: "${clsName[NonEncodable]}""""))
+ checkError(
+ exception = intercept[
+
SparkUnsupportedOperationException](ExpressionEncoder[ComplexNonEncodable1]),
+ errorClass = "ENCODER_NOT_FOUND",
+ parameters = Map("typeName" ->
"org.apache.spark.sql.catalyst.encoders.NonEncodable")
+ )
+
+ checkError(
+ exception = intercept[
+
SparkUnsupportedOperationException](ExpressionEncoder[ComplexNonEncodable2]),
+ errorClass = "ENCODER_NOT_FOUND",
+ parameters = Map("typeName" ->
"org.apache.spark.sql.catalyst.encoders.NonEncodable")
+ )
+
+ checkError(
+ exception = intercept[
+
SparkUnsupportedOperationException](ExpressionEncoder[ComplexNonEncodable3]),
+ errorClass = "ENCODER_NOT_FOUND",
+ parameters = Map("typeName" ->
"org.apache.spark.sql.catalyst.encoders.NonEncodable")
+ )
+
+ checkError(
+ exception = intercept[
+
SparkUnsupportedOperationException](ExpressionEncoder[ComplexNonEncodable4]),
+ errorClass = "ENCODER_NOT_FOUND",
+ parameters = Map("typeName" ->
"org.apache.spark.sql.catalyst.encoders.NonEncodable")
+ )
+
+ checkError(
+ exception = intercept[
+
SparkUnsupportedOperationException](ExpressionEncoder[ComplexNonEncodable5]),
+ errorClass = "ENCODER_NOT_FOUND",
+ parameters = Map("typeName" ->
"org.apache.spark.sql.catalyst.encoders.NonEncodable")
+ )
}
private def clsName[T : ClassTag]: String =
implicitly[ClassTag[T]].runtimeClass.getName
diff --git
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/ExpressionEncoderSuite.scala
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/ExpressionEncoderSuite.scala
index e9336405a53..3a0db1ca121 100644
---
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/ExpressionEncoderSuite.scala
+++
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/ExpressionEncoderSuite.scala
@@ -24,7 +24,7 @@ import java.util.Arrays
import scala.collection.mutable.ArrayBuffer
import scala.reflect.runtime.universe.TypeTag
-import org.apache.spark.{SparkArithmeticException, SparkRuntimeException}
+import org.apache.spark.{SparkArithmeticException, SparkRuntimeException,
SparkUnsupportedOperationException}
import org.apache.spark.sql.{Encoder, Encoders}
import org.apache.spark.sql.catalyst.{FooClassWithEnum, FooEnum, OptionalData,
PrimitiveData, ScroogeLikeExample}
import org.apache.spark.sql.catalyst.analysis.AnalysisTest
@@ -483,10 +483,13 @@ class ExpressionEncoderSuite extends
CodegenInterpretedPlanTest with AnalysisTes
productTest(("UDT", new ExamplePoint(0.1, 0.2)))
test("AnyVal class with Any fields") {
- val exception =
intercept[UnsupportedOperationException](implicitly[ExpressionEncoder[Foo]])
- val errorMsg = exception.getMessage
- assert(errorMsg.contains("root class:
\"org.apache.spark.sql.catalyst.encoders.Foo\""))
- assert(errorMsg.contains("No Encoder found for Any"))
+ val exception = intercept[SparkUnsupportedOperationException](
+ implicitly[ExpressionEncoder[Foo]])
+ checkError(
+ exception = exception,
+ errorClass = "ENCODER_NOT_FOUND",
+ parameters = Map("typeName" -> "Any")
+ )
}
test("nullable of encoder schema") {
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]