This is an automated email from the ASF dual-hosted git repository. maxgekk pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push: new e62596a09f3 [SPARK-41175][SQL] Assign a name to the error class _LEGACY_ERROR_TEMP_1078 e62596a09f3 is described below commit e62596a09f323bfe0f8592ba7a3c45674ce04ac6 Author: panbingkun <pbk1...@gmail.com> AuthorDate: Sat Nov 19 09:02:33 2022 +0300 [SPARK-41175][SQL] Assign a name to the error class _LEGACY_ERROR_TEMP_1078 ### What changes were proposed in this pull request? In the PR, I propose to assign the name `CANNOT_LOAD_FUNCTION_CLASS` to the error class _LEGACY_ERROR_TEMP_1078. ### Why are the changes needed? Proper names of error classes should improve user experience with Spark SQL. ### Does this PR introduce _any_ user-facing change? No. ### How was this patch tested? By running the affected test suites: > $ build/sbt "catalyst/testOnly *SessionCatalogSuite" Closes #38696 from panbingkun/SPARK-41175. Authored-by: panbingkun <pbk1...@gmail.com> Signed-off-by: Max Gekk <max.g...@gmail.com> --- core/src/main/resources/error/error-classes.json | 10 +++++----- .../apache/spark/sql/errors/QueryCompilationErrors.scala | 4 ++-- .../spark/sql/catalyst/catalog/SessionCatalogSuite.scala | 15 +++++++++++++++ .../test/resources/sql-tests/results/udaf/udaf.sql.out | 4 ++-- .../test/resources/sql-tests/results/udf/udf-udaf.sql.out | 4 ++-- .../test/scala/org/apache/spark/sql/SQLQuerySuite.scala | 14 ++++++++++---- .../org/apache/spark/sql/execution/command/DDLSuite.scala | 14 ++++++++++---- 7 files changed, 46 insertions(+), 19 deletions(-) diff --git a/core/src/main/resources/error/error-classes.json b/core/src/main/resources/error/error-classes.json index a2d9fa071d0..fe340c517a2 100644 --- a/core/src/main/resources/error/error-classes.json +++ b/core/src/main/resources/error/error-classes.json @@ -48,6 +48,11 @@ ], "sqlState" : "42000" }, + "CANNOT_LOAD_FUNCTION_CLASS" : { + "message" : [ + "Cannot load class <className> when registering the function <functionName>, please make sure it is on the classpath." + ] + }, "CANNOT_LOAD_PROTOBUF_CLASS" : { "message" : [ "Could not load Protobuf class with name <protobufClassName>. <explanation>." @@ -2075,11 +2080,6 @@ "Partition spec is invalid. <details>." ] }, - "_LEGACY_ERROR_TEMP_1078" : { - "message" : [ - "Can not load class '<className>' when registering the function '<func>', please make sure it is on the classpath." - ] - }, "_LEGACY_ERROR_TEMP_1079" : { "message" : [ "Resource Type '<resourceType>' is not supported." diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala index e6ce12756ca..22b4cfdb3c6 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala @@ -899,10 +899,10 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase { def cannotLoadClassWhenRegisteringFunctionError( className: String, func: FunctionIdentifier): Throwable = { new AnalysisException( - errorClass = "_LEGACY_ERROR_TEMP_1078", + errorClass = "CANNOT_LOAD_FUNCTION_CLASS", messageParameters = Map( "className" -> className, - "func" -> func.toString)) + "functionName" -> toSQLId(func.toString))) } def resourceTypeNotSupportedError(resourceType: String): Throwable = { diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala index f86d12474d6..a7254865c1e 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala @@ -1477,6 +1477,21 @@ abstract class SessionCatalogSuite extends AnalysisTest with Eventually { assert( catalog.lookupFunction( FunctionIdentifier("temp1"), arguments) === Literal(arguments.length)) + + checkError( + exception = intercept[AnalysisException] { + catalog.registerFunction( + CatalogFunction(FunctionIdentifier("temp2", None), + "function_class_cannot_load", Seq.empty[FunctionResource]), + overrideIfExists = false, + None) + }, + errorClass = "CANNOT_LOAD_FUNCTION_CLASS", + parameters = Map( + "className" -> "function_class_cannot_load", + "functionName" -> "`temp2`" + ) + ) } } diff --git a/sql/core/src/test/resources/sql-tests/results/udaf/udaf.sql.out b/sql/core/src/test/resources/sql-tests/results/udaf/udaf.sql.out index 69221c731a9..90295e40edd 100644 --- a/sql/core/src/test/resources/sql-tests/results/udaf/udaf.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udaf/udaf.sql.out @@ -63,10 +63,10 @@ struct<> -- !query output org.apache.spark.sql.AnalysisException { - "errorClass" : "_LEGACY_ERROR_TEMP_1078", + "errorClass" : "CANNOT_LOAD_FUNCTION_CLASS", "messageParameters" : { "className" : "test.non.existent.udaf", - "func" : "spark_catalog.default.udaf1" + "functionName" : "`spark_catalog`.`default`.`udaf1`" }, "queryContext" : [ { "objectType" : "", diff --git a/sql/core/src/test/resources/sql-tests/results/udf/udf-udaf.sql.out b/sql/core/src/test/resources/sql-tests/results/udf/udf-udaf.sql.out index 899905a41cd..6e13a62d4b9 100644 --- a/sql/core/src/test/resources/sql-tests/results/udf/udf-udaf.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udf/udf-udaf.sql.out @@ -63,10 +63,10 @@ struct<> -- !query output org.apache.spark.sql.AnalysisException { - "errorClass" : "_LEGACY_ERROR_TEMP_1078", + "errorClass" : "CANNOT_LOAD_FUNCTION_CLASS", "messageParameters" : { "className" : "test.non.existent.udaf", - "func" : "spark_catalog.default.udaf1" + "functionName" : "`spark_catalog`.`default`.`udaf1`" }, "queryContext" : [ { "objectType" : "", diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala index bd8e123f33f..cc703035b1e 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala @@ -3827,10 +3827,16 @@ class SQLQuerySuite extends QueryTest with SharedSparkSession with AdaptiveSpark s"default.$functionName" -> false, functionName -> true) { // create temporary function without class - val e = intercept[AnalysisException] { - sql(s"CREATE TEMPORARY FUNCTION $functionName AS '$sumFuncClass'") - }.getMessage - assert(e.contains("Can not load class 'org.apache.spark.examples.sql.Spark33084")) + checkError( + exception = intercept[AnalysisException] { + sql(s"CREATE TEMPORARY FUNCTION $functionName AS '$sumFuncClass'") + }, + errorClass = "CANNOT_LOAD_FUNCTION_CLASS", + parameters = Map( + "className" -> "org.apache.spark.examples.sql.Spark33084", + "functionName" -> "`test_udf`" + ) + ) sql("ADD JAR ivy://org.apache.spark:SPARK-33084:1.0") sql(s"CREATE TEMPORARY FUNCTION $functionName AS '$sumFuncClass'") // create a view using a function in 'default' database diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala index 5aa36471770..7245408629c 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala @@ -2092,10 +2092,16 @@ abstract class DDLSuite extends QueryTest with DDLSuiteBase { val function = CatalogFunction(func, "test.non.exists.udf", Seq.empty) spark.sessionState.catalog.createFunction(function, false) assert(!spark.sessionState.catalog.isRegisteredFunction(func)) - val err = intercept[AnalysisException] { - sql("REFRESH FUNCTION func1") - }.getMessage - assert(err.contains("Can not load class")) + checkError( + exception = intercept[AnalysisException] { + sql("REFRESH FUNCTION func1") + }, + errorClass = "CANNOT_LOAD_FUNCTION_CLASS", + parameters = Map( + "className" -> "test.non.exists.udf", + "functionName" -> "`spark_catalog`.`default`.`func1`" + ) + ) assert(!spark.sessionState.catalog.isRegisteredFunction(func)) } } --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org For additional commands, e-mail: commits-h...@spark.apache.org