This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new cd832e546fc [SPARK-41565][SQL] Add the error class `UNRESOLVED_ROUTINE`
cd832e546fc is described below

commit cd832e546fc58c522d4afa90fc781c3be2cc527e
Author: Max Gekk <max.g...@gmail.com>
AuthorDate: Wed Dec 21 16:33:02 2022 +0300

    [SPARK-41565][SQL] Add the error class `UNRESOLVED_ROUTINE`
    
    ### What changes were proposed in this pull request?
    In the PR, I propose to remove the error classes `_LEGACY_ERROR_TEMP_1041`, 
`_LEGACY_ERROR_TEMP_1242` and `_LEGACY_ERROR_TEMP_1243`, and use new one 
`UNRESOLVED_ROUTINE` instead.
    
    Closes #38870
    
    ### Why are the changes needed?
    To improve user experience with Spark SQL, and unify representation of 
error messages.
    
    ### Does this PR introduce _any_ user-facing change?
    Yes, the PR changes an user-facing error message.
    
    ### How was this patch tested?
    By running the modified test suites:
    ```
    $ build/sbt "core/testOnly *SparkThrowableSuite"
    $ build/sbt "test:testOnly *SQLQuerySuite"
    $ build/sbt "test:testOnly *UDFSuite"
    $ build/sbt "test:testOnly *HiveUDFSuite"
    $ build/sbt "test:testOnly *HiveQuerySuite"
    $ build/sbt "test:testOnly *JDBCV2Suite"
    $ build/sbt "test:testOnly *DDLSuite"
    $ build/sbt "test:testOnly *DataSourceV2FunctionSuite"
    $ build/sbt "test:testOnly *LookupFunctionsSuite"
    $ PYSPARK_PYTHON=python3 build/sbt "sql/testOnly 
org.apache.spark.sql.SQLQueryTestSuite"
    ```
    
    Closes #39095 from MaxGekk/unresolved-routine-error-class.
    
    Lead-authored-by: Max Gekk <max.g...@gmail.com>
    Co-authored-by: Serge Rielau <se...@rielau.com>
    Signed-off-by: Max Gekk <max.g...@gmail.com>
---
 core/src/main/resources/error/error-classes.json   | 21 ++++-------
 .../spark/sql/catalyst/analysis/Analyzer.scala     | 15 ++++++--
 .../sql/catalyst/analysis/CheckAnalysis.scala      |  7 ++--
 .../sql/catalyst/analysis/FunctionRegistry.scala   |  2 +-
 .../spark/sql/errors/QueryCompilationErrors.scala  | 41 ++++++++++------------
 .../catalyst/analysis/LookupFunctionsSuite.scala   | 10 +++---
 .../apache/spark/sql/internal/CatalogImpl.scala    |  5 ++-
 .../double-quoted-identifiers-disabled.sql.out     | 13 ++++---
 .../ansi/double-quoted-identifiers-enabled.sql.out | 26 ++++++++------
 .../sql-tests/results/ansi/interval.sql.out        | 28 ++++++++-------
 .../results/double-quoted-identifiers.sql.out      | 13 ++++---
 .../sql-tests/results/inline-table.sql.out         |  7 ++--
 .../resources/sql-tests/results/interval.sql.out   | 28 ++++++++-------
 .../results/postgreSQL/window_part3.sql.out        |  7 ++--
 .../sql-tests/results/udf/udf-inline-table.sql.out |  7 ++--
 .../scala/org/apache/spark/sql/SQLQuerySuite.scala | 14 ++++++--
 .../test/scala/org/apache/spark/sql/UDFSuite.scala | 19 ++++++----
 .../sql/connector/DataSourceV2FunctionSuite.scala  | 15 ++++++--
 .../spark/sql/execution/command/DDLSuite.scala     | 34 ++++++++++++------
 .../org/apache/spark/sql/jdbc/JDBCV2Suite.scala    | 32 ++++++++++++-----
 .../spark/sql/hive/execution/HiveQuerySuite.scala  | 16 ++++-----
 .../spark/sql/hive/execution/HiveUDFSuite.scala    | 17 ++++++---
 .../spark/sql/hive/execution/SQLQuerySuite.scala   | 13 +++++--
 23 files changed, 244 insertions(+), 146 deletions(-)

diff --git a/core/src/main/resources/error/error-classes.json 
b/core/src/main/resources/error/error-classes.json
index e6ae5678993..989df84ed53 100644
--- a/core/src/main/resources/error/error-classes.json
+++ b/core/src/main/resources/error/error-classes.json
@@ -1266,6 +1266,12 @@
     },
     "sqlState" : "42000"
   },
+  "UNRESOLVED_ROUTINE" : {
+    "message" : [
+      "Cannot resolve function <routineName> on search path <searchPath>."
+    ],
+    "sqlState" : "42000"
+  },
   "UNSUPPORTED_DATATYPE" : {
     "message" : [
       "Unsupported data type <typeName>"
@@ -2060,11 +2066,6 @@
       "Gap duration expression used in session window must be 
CalendarIntervalType, but got <dt>."
     ]
   },
-  "_LEGACY_ERROR_TEMP_1041" : {
-    "message" : [
-      "Undefined function <name>."
-    ]
-  },
   "_LEGACY_ERROR_TEMP_1045" : {
     "message" : [
       "ALTER TABLE SET LOCATION does not support partition for v2 tables."
@@ -2920,16 +2921,6 @@
       "CREATE-TABLE-AS-SELECT cannot create table with location to a non-empty 
directory <tablePath>. To allow overwriting the existing non-empty directory, 
set '<config>' to true."
     ]
   },
-  "_LEGACY_ERROR_TEMP_1242" : {
-    "message" : [
-      "Undefined function: <rawName>. This function is neither a 
built-in/temporary function, nor a persistent function that is qualified as 
<fullName>."
-    ]
-  },
-  "_LEGACY_ERROR_TEMP_1243" : {
-    "message" : [
-      "Undefined function: <rawName>"
-    ]
-  },
   "_LEGACY_ERROR_TEMP_1244" : {
     "message" : [
       "Attempted to unset non-existent property '<property>' in table 
'<table>'."
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
index c21ff7bd90f..daeddd309d7 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
@@ -2323,7 +2323,11 @@ class Analyzer(override val catalogManager: 
CatalogManager)
               externalFunctionNameSet.add(fullName)
               f
             } else {
-              throw QueryCompilationErrors.noSuchFunctionError(nameParts, f, 
Some(fullName))
+              val catalogPath = (catalog.name() +: 
catalogManager.currentNamespace).mkString(".")
+              throw QueryCompilationErrors.unresolvedRoutineError(
+                nameParts,
+                Seq("system.builtin", "system.session", catalogPath),
+                f.origin)
             }
           }
       }
@@ -2420,8 +2424,13 @@ class Analyzer(override val catalogManager: 
CatalogManager)
                 errorClass = "_LEGACY_ERROR_TEMP_2306",
                 messageParameters = Map(
                   "class" -> other.getClass.getCanonicalName))
-              // We don't support persistent high-order functions yet.
-            }.getOrElse(throw 
QueryCompilationErrors.noSuchFunctionError(nameParts, u))
+            }.getOrElse {
+              throw QueryCompilationErrors.unresolvedRoutineError(
+                nameParts,
+                // We don't support persistent high-order functions yet.
+                Seq("system.builtin", "system.session"),
+                u.origin)
+            }
           }
 
           case u if !u.childrenResolved => u // Skip until children are 
resolved.
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala
index 2c57c2b9bab..d5f1daeb346 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala
@@ -170,8 +170,11 @@ trait CheckAnalysis extends PredicateHelper with 
LookupCatalog with QueryErrorsB
         u.tableNotFound(u.multipartIdentifier)
 
       case u: UnresolvedFunc =>
-        throw QueryCompilationErrors.noSuchFunctionError(
-          u.multipartIdentifier, u, u.possibleQualifiedName)
+        val catalogPath = (currentCatalog.name +: 
catalogManager.currentNamespace).mkString(".")
+        throw QueryCompilationErrors.unresolvedRoutineError(
+          u.multipartIdentifier,
+          Seq("system.builtin", "system.session", catalogPath),
+          u.origin)
 
       case u: UnresolvedHint =>
         u.failAnalysis(
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala
index f9be7bbefbe..4b6603b6358 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala
@@ -227,7 +227,7 @@ trait SimpleFunctionRegistryBase[T] extends 
FunctionRegistryBase[T] with Logging
   override def lookupFunction(name: FunctionIdentifier, children: 
Seq[Expression]): T = {
     val func = synchronized {
       functionBuilders.get(normalizeFuncName(name)).map(_._2).getOrElse {
-        throw QueryCompilationErrors.functionUndefinedError(name)
+        throw QueryCompilationErrors.unresolvedRoutineError(name, 
Seq("system.builtin"))
       }
     }
     func(children)
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
index d537c1685d6..621f3e1ca90 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
@@ -631,10 +631,25 @@ private[sql] object QueryCompilationErrors extends 
QueryErrorsBase {
       messageParameters = Map("dt" -> dt.toString))
   }
 
-  def functionUndefinedError(name: FunctionIdentifier): Throwable = {
+  def unresolvedRoutineError(name: FunctionIdentifier, searchPath: 
Seq[String]): Throwable = {
     new AnalysisException(
-      errorClass = "_LEGACY_ERROR_TEMP_1041",
-      messageParameters = Map("name" -> name.toString))
+      errorClass = "UNRESOLVED_ROUTINE",
+      messageParameters = Map(
+        "routineName" -> toSQLId(name.funcName),
+        "searchPath" -> searchPath.map(toSQLId).mkString("[", ", ", "]")))
+  }
+
+  def unresolvedRoutineError(
+      nameParts: Seq[String],
+      searchPath: Seq[String],
+      context: Origin): Throwable = {
+    new AnalysisException(
+      errorClass = "UNRESOLVED_ROUTINE",
+      messageParameters = Map(
+        "routineName" -> toSQLId(nameParts),
+        "searchPath" -> searchPath.map(toSQLId).mkString("[", ", ", "]")
+      ),
+      origin = context)
   }
 
   def invalidFunctionArgumentsError(
@@ -2349,26 +2364,6 @@ private[sql] object QueryCompilationErrors extends 
QueryErrorsBase {
         "config" -> SQLConf.ALLOW_NON_EMPTY_LOCATION_IN_CTAS.key))
   }
 
-  def noSuchFunctionError(
-      rawName: Seq[String],
-      t: TreeNode[_],
-      fullName: Option[Seq[String]] = None): Throwable = {
-    if (rawName.length == 1 && fullName.isDefined) {
-      new AnalysisException(
-        errorClass = "_LEGACY_ERROR_TEMP_1242",
-        messageParameters = Map(
-          "rawName" -> rawName.head,
-          "fullName" -> fullName.get.quoted
-        ),
-        origin = t.origin)
-    } else {
-      new AnalysisException(
-        errorClass = "_LEGACY_ERROR_TEMP_1243",
-        messageParameters = Map("rawName" -> rawName.quoted),
-        origin = t.origin)
-    }
-  }
-
   def unsetNonExistentPropertyError(property: String, table: TableIdentifier): 
Throwable = {
     new AnalysisException(
       errorClass = "_LEGACY_ERROR_TEMP_1244",
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/LookupFunctionsSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/LookupFunctionsSuite.scala
index de8c57f9fd4..ae32365e69b 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/LookupFunctionsSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/LookupFunctionsSuite.scala
@@ -60,10 +60,12 @@ class LookupFunctionsSuite extends PlanTest {
               )
             )
           }
-
-          assert(cause.getMessage.contains("Undefined function: undefined_fn"))
-          // SPARK-21318: the error message should contains the current 
database name
-          assert(cause.getMessage.contains("db1"))
+          checkError(
+            exception = cause,
+            errorClass = "UNRESOLVED_ROUTINE",
+            parameters = Map(
+              "routineName" -> "`undefined_fn`",
+              "searchPath" -> "[`system`.`builtin`, `system`.`session`, 
`spark_catalog`.`db1`]"))
         } finally {
           catalog.reset()
         }
diff --git 
a/sql/core/src/main/scala/org/apache/spark/sql/internal/CatalogImpl.scala 
b/sql/core/src/main/scala/org/apache/spark/sql/internal/CatalogImpl.scala
index 335e52fee18..81f7f8a8e73 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/internal/CatalogImpl.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/internal/CatalogImpl.scala
@@ -274,7 +274,10 @@ class CatalogImpl(sparkSession: SparkSession) extends 
Catalog {
           className = className,
           isTemporary = true)
 
-      case _ => throw QueryCompilationErrors.noSuchFunctionError(ident, plan)
+      case _ =>
+        val catalogPath = (currentCatalog +:
+          
sparkSession.sessionState.catalogManager.currentNamespace).mkString(".")
+        throw QueryCompilationErrors.unresolvedRoutineError(ident, 
Seq(catalogPath), plan.origin)
     }
   }
 
diff --git 
a/sql/core/src/test/resources/sql-tests/results/ansi/double-quoted-identifiers-disabled.sql.out
 
b/sql/core/src/test/resources/sql-tests/results/ansi/double-quoted-identifiers-disabled.sql.out
index f34070baa69..d1e4699f727 100644
--- 
a/sql/core/src/test/resources/sql-tests/results/ansi/double-quoted-identifiers-disabled.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/results/ansi/double-quoted-identifiers-disabled.sql.out
@@ -259,10 +259,11 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_1242",
+  "errorClass" : "UNRESOLVED_ROUTINE",
+  "sqlState" : "42000",
   "messageParameters" : {
-    "fullName" : "spark_catalog.default.not_exist",
-    "rawName" : "not_exist"
+    "routineName" : "`not_exist`",
+    "searchPath" : "[`system`.`builtin`, `system`.`session`, 
`spark_catalog`.`default`]"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -281,9 +282,11 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_1243",
+  "errorClass" : "UNRESOLVED_ROUTINE",
+  "sqlState" : "42000",
   "messageParameters" : {
-    "rawName" : "not_exist.not_exist"
+    "routineName" : "`not_exist`.`not_exist`",
+    "searchPath" : "[`system`.`builtin`, `system`.`session`, 
`spark_catalog`.`default`]"
   },
   "queryContext" : [ {
     "objectType" : "",
diff --git 
a/sql/core/src/test/resources/sql-tests/results/ansi/double-quoted-identifiers-enabled.sql.out
 
b/sql/core/src/test/resources/sql-tests/results/ansi/double-quoted-identifiers-enabled.sql.out
index 426951377f8..3611538a1b4 100644
--- 
a/sql/core/src/test/resources/sql-tests/results/ansi/double-quoted-identifiers-enabled.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/results/ansi/double-quoted-identifiers-enabled.sql.out
@@ -131,10 +131,11 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_1242",
+  "errorClass" : "UNRESOLVED_ROUTINE",
+  "sqlState" : "42000",
   "messageParameters" : {
-    "fullName" : "spark_catalog.default.not_exist",
-    "rawName" : "not_exist"
+    "routineName" : "`not_exist`",
+    "searchPath" : "[`system`.`builtin`, `system`.`session`, 
`spark_catalog`.`default`]"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -153,9 +154,11 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_1243",
+  "errorClass" : "UNRESOLVED_ROUTINE",
+  "sqlState" : "42000",
   "messageParameters" : {
-    "rawName" : "not_exist.not_exist"
+    "routineName" : "`not_exist`.`not_exist`",
+    "searchPath" : "[`system`.`builtin`, `system`.`session`, 
`spark_catalog`.`default`]"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -299,10 +302,11 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_1242",
+  "errorClass" : "UNRESOLVED_ROUTINE",
+  "sqlState" : "42000",
   "messageParameters" : {
-    "fullName" : "spark_catalog.default.not_exist",
-    "rawName" : "not_exist"
+    "routineName" : "`not_exist`",
+    "searchPath" : "[`system`.`builtin`, `system`.`session`, 
`spark_catalog`.`default`]"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -321,9 +325,11 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_1243",
+  "errorClass" : "UNRESOLVED_ROUTINE",
+  "sqlState" : "42000",
   "messageParameters" : {
-    "rawName" : "not_exist.not_exist"
+    "routineName" : "`not_exist`.`not_exist`",
+    "searchPath" : "[`system`.`builtin`, `system`.`session`, 
`spark_catalog`.`default`]"
   },
   "queryContext" : [ {
     "objectType" : "",
diff --git 
a/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out 
b/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out
index 493d8769ad4..b29ff88d3c2 100644
--- a/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out
@@ -1682,10 +1682,11 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_1242",
+  "errorClass" : "UNRESOLVED_ROUTINE",
+  "sqlState" : "42000",
   "messageParameters" : {
-    "fullName" : "spark_catalog.default.interval",
-    "rawName" : "interval"
+    "routineName" : "`interval`",
+    "searchPath" : "[`system`.`builtin`, `system`.`session`, 
`spark_catalog`.`default`]"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -1704,10 +1705,11 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_1242",
+  "errorClass" : "UNRESOLVED_ROUTINE",
+  "sqlState" : "42000",
   "messageParameters" : {
-    "fullName" : "spark_catalog.default.interval",
-    "rawName" : "interval"
+    "routineName" : "`interval`",
+    "searchPath" : "[`system`.`builtin`, `system`.`session`, 
`spark_catalog`.`default`]"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -1742,10 +1744,11 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_1242",
+  "errorClass" : "UNRESOLVED_ROUTINE",
+  "sqlState" : "42000",
   "messageParameters" : {
-    "fullName" : "spark_catalog.default.interval",
-    "rawName" : "interval"
+    "routineName" : "`interval`",
+    "searchPath" : "[`system`.`builtin`, `system`.`session`, 
`spark_catalog`.`default`]"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -1764,10 +1767,11 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_1242",
+  "errorClass" : "UNRESOLVED_ROUTINE",
+  "sqlState" : "42000",
   "messageParameters" : {
-    "fullName" : "spark_catalog.default.interval",
-    "rawName" : "interval"
+    "routineName" : "`interval`",
+    "searchPath" : "[`system`.`builtin`, `system`.`session`, 
`spark_catalog`.`default`]"
   },
   "queryContext" : [ {
     "objectType" : "",
diff --git 
a/sql/core/src/test/resources/sql-tests/results/double-quoted-identifiers.sql.out
 
b/sql/core/src/test/resources/sql-tests/results/double-quoted-identifiers.sql.out
index f34070baa69..d1e4699f727 100644
--- 
a/sql/core/src/test/resources/sql-tests/results/double-quoted-identifiers.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/results/double-quoted-identifiers.sql.out
@@ -259,10 +259,11 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_1242",
+  "errorClass" : "UNRESOLVED_ROUTINE",
+  "sqlState" : "42000",
   "messageParameters" : {
-    "fullName" : "spark_catalog.default.not_exist",
-    "rawName" : "not_exist"
+    "routineName" : "`not_exist`",
+    "searchPath" : "[`system`.`builtin`, `system`.`session`, 
`spark_catalog`.`default`]"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -281,9 +282,11 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_1243",
+  "errorClass" : "UNRESOLVED_ROUTINE",
+  "sqlState" : "42000",
   "messageParameters" : {
-    "rawName" : "not_exist.not_exist"
+    "routineName" : "`not_exist`.`not_exist`",
+    "searchPath" : "[`system`.`builtin`, `system`.`session`, 
`spark_catalog`.`default`]"
   },
   "queryContext" : [ {
     "objectType" : "",
diff --git a/sql/core/src/test/resources/sql-tests/results/inline-table.sql.out 
b/sql/core/src/test/resources/sql-tests/results/inline-table.sql.out
index 23e657a587a..a3f856bb9e9 100644
--- a/sql/core/src/test/resources/sql-tests/results/inline-table.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/inline-table.sql.out
@@ -199,10 +199,11 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_1242",
+  "errorClass" : "UNRESOLVED_ROUTINE",
+  "sqlState" : "42000",
   "messageParameters" : {
-    "fullName" : "spark_catalog.default.random_not_exist_func",
-    "rawName" : "random_not_exist_func"
+    "routineName" : "`random_not_exist_func`",
+    "searchPath" : "[`system`.`builtin`, `system`.`session`, 
`spark_catalog`.`default`]"
   },
   "queryContext" : [ {
     "objectType" : "",
diff --git a/sql/core/src/test/resources/sql-tests/results/interval.sql.out 
b/sql/core/src/test/resources/sql-tests/results/interval.sql.out
index 690f3da0f9a..bfe7789c6b9 100644
--- a/sql/core/src/test/resources/sql-tests/results/interval.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/interval.sql.out
@@ -1563,10 +1563,11 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_1242",
+  "errorClass" : "UNRESOLVED_ROUTINE",
+  "sqlState" : "42000",
   "messageParameters" : {
-    "fullName" : "spark_catalog.default.interval",
-    "rawName" : "interval"
+    "routineName" : "`interval`",
+    "searchPath" : "[`system`.`builtin`, `system`.`session`, 
`spark_catalog`.`default`]"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -1585,10 +1586,11 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_1242",
+  "errorClass" : "UNRESOLVED_ROUTINE",
+  "sqlState" : "42000",
   "messageParameters" : {
-    "fullName" : "spark_catalog.default.interval",
-    "rawName" : "interval"
+    "routineName" : "`interval`",
+    "searchPath" : "[`system`.`builtin`, `system`.`session`, 
`spark_catalog`.`default`]"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -1623,10 +1625,11 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_1242",
+  "errorClass" : "UNRESOLVED_ROUTINE",
+  "sqlState" : "42000",
   "messageParameters" : {
-    "fullName" : "spark_catalog.default.interval",
-    "rawName" : "interval"
+    "routineName" : "`interval`",
+    "searchPath" : "[`system`.`builtin`, `system`.`session`, 
`spark_catalog`.`default`]"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -1645,10 +1648,11 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_1242",
+  "errorClass" : "UNRESOLVED_ROUTINE",
+  "sqlState" : "42000",
   "messageParameters" : {
-    "fullName" : "spark_catalog.default.interval",
-    "rawName" : "interval"
+    "routineName" : "`interval`",
+    "searchPath" : "[`system`.`builtin`, `system`.`session`, 
`spark_catalog`.`default`]"
   },
   "queryContext" : [ {
     "objectType" : "",
diff --git 
a/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part3.sql.out 
b/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part3.sql.out
index 38005e0c607..8dd52749d63 100644
--- 
a/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part3.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part3.sql.out
@@ -436,10 +436,11 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_1242",
+  "errorClass" : "UNRESOLVED_ROUTINE",
+  "sqlState" : "42000",
   "messageParameters" : {
-    "fullName" : "spark_catalog.default.range",
-    "rawName" : "range"
+    "routineName" : "`range`",
+    "searchPath" : "[`system`.`builtin`, `system`.`session`, 
`spark_catalog`.`default`]"
   },
   "queryContext" : [ {
     "objectType" : "",
diff --git 
a/sql/core/src/test/resources/sql-tests/results/udf/udf-inline-table.sql.out 
b/sql/core/src/test/resources/sql-tests/results/udf/udf-inline-table.sql.out
index b7e8bb74a76..fe965b9653f 100644
--- a/sql/core/src/test/resources/sql-tests/results/udf/udf-inline-table.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/udf/udf-inline-table.sql.out
@@ -183,10 +183,11 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_1242",
+  "errorClass" : "UNRESOLVED_ROUTINE",
+  "sqlState" : "42000",
   "messageParameters" : {
-    "fullName" : "spark_catalog.default.random_not_exist_func",
-    "rawName" : "random_not_exist_func"
+    "routineName" : "`random_not_exist_func`",
+    "searchPath" : "[`system`.`builtin`, `system`.`session`, 
`spark_catalog`.`default`]"
   },
   "queryContext" : [ {
     "objectType" : "",
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
index fe7baa52883..b1b3ec97d1e 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
@@ -95,9 +95,17 @@ class SQLQuerySuite extends QueryTest with 
SharedSparkSession with AdaptiveSpark
 
     checkKeywordsNotExist(sql("describe functioN Upper"), "Extended Usage")
 
-    val e = intercept[AnalysisException](sql("describe functioN abcadf"))
-    assert(e.message.contains("Undefined function: abcadf. This function is 
neither a " +
-      "built-in/temporary function, nor a persistent function"))
+    val sqlText = "describe functioN abcadf"
+    checkError(
+      exception = intercept[AnalysisException](sql(sqlText)),
+      errorClass = "UNRESOLVED_ROUTINE",
+      parameters = Map(
+        "routineName" -> "`abcadf`",
+        "searchPath" -> "[`system`.`builtin`, `system`.`session`, 
`spark_catalog`.`default`]"),
+      context = ExpectedContext(
+        fragment = sqlText,
+        start = 0,
+        stop = 23))
   }
 
   test("SPARK-34678: describe functions for table-valued functions") {
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/UDFSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/UDFSuite.scala
index e08e7cb789a..4e01677c171 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/UDFSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/UDFSuite.scala
@@ -139,12 +139,19 @@ class UDFSuite extends QueryTest with SharedSparkSession {
   }
 
   test("error reporting for undefined functions") {
-    val df = spark.emptyDataFrame
-    val e = intercept[AnalysisException] {
-      df.selectExpr("a_function_that_does_not_exist()")
-    }
-    assert(e.getMessage.contains("Undefined function"))
-    assert(e.getMessage.contains("a_function_that_does_not_exist"))
+    val sqlText = "a_function_that_does_not_exist()"
+    checkError(
+      exception = intercept[AnalysisException] {
+        spark.emptyDataFrame.selectExpr(sqlText)
+      },
+      errorClass = "UNRESOLVED_ROUTINE",
+      parameters = Map(
+        "routineName" -> "`a_function_that_does_not_exist`",
+        "searchPath" -> "[`system`.`builtin`, `system`.`session`, 
`spark_catalog`.`default`]"),
+      context = ExpectedContext(
+        fragment = sqlText,
+        start = 0,
+        stop = 31))
   }
 
   test("Simple UDF") {
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2FunctionSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2FunctionSuite.scala
index 8277a8e2979..9a31948889c 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2FunctionSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2FunctionSuite.scala
@@ -141,9 +141,18 @@ class DataSourceV2FunctionSuite extends 
DatasourceV2SQLBase {
   }
 
   test("undefined function") {
-    assert(intercept[AnalysisException](
-      sql("SELECT testcat.non_exist('abc')").collect()
-    ).getMessage.contains("Undefined function"))
+    checkError(
+      exception = intercept[AnalysisException](
+        sql("SELECT testcat.non_exist('abc')").collect()
+      ),
+      errorClass = "UNRESOLVED_ROUTINE",
+      parameters = Map(
+        "routineName" -> "`testcat`.`non_exist`",
+        "searchPath" -> "[`system`.`builtin`, `system`.`session`, 
`testcat`.`default`]"),
+      context = ExpectedContext(
+        fragment = "testcat.non_exist('abc')",
+        start = 7,
+        stop = 30))
   }
 
   test("non-function catalog") {
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
index 1e12340d983..6cc37a41210 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
@@ -2079,10 +2079,18 @@ abstract class DDLSuite extends QueryTest with 
DDLSuiteBase {
     }.getMessage
     assert(msg.contains(
       "md5 is a built-in/temporary function. 'REFRESH FUNCTION' expects a 
persistent function"))
-    val msg2 = intercept[AnalysisException] {
-      sql("REFRESH FUNCTION default.md5")
-    }.getMessage
-    assert(msg2.contains(s"Undefined function: default.md5"))
+    checkError(
+      exception = intercept[AnalysisException] {
+        sql("REFRESH FUNCTION default.md5")
+      },
+      errorClass = "UNRESOLVED_ROUTINE",
+      parameters = Map(
+        "routineName" -> "`default`.`md5`",
+        "searchPath" -> "[`system`.`builtin`, `system`.`session`, 
`spark_catalog`.`default`]"),
+      context = ExpectedContext(
+        fragment = "REFRESH FUNCTION default.md5",
+        start = 0,
+        stop = 27))
 
     withUserDefinedFunction("func1" -> true) {
       sql("CREATE TEMPORARY FUNCTION func1 AS 
'test.org.apache.spark.sql.MyDoubleAvg'")
@@ -2105,12 +2113,18 @@ abstract class DDLSuite extends QueryTest with 
DDLSuiteBase {
       assert(!spark.sessionState.catalog.isRegisteredFunction(func))
       sql("REFRESH FUNCTION func1")
       assert(spark.sessionState.catalog.isRegisteredFunction(func))
-      val msg = intercept[AnalysisException] {
-        sql("REFRESH FUNCTION func2")
-      }.getMessage
-      assert(msg.contains(s"Undefined function: func2. This function is 
neither a " +
-        "built-in/temporary function, nor a persistent function that is 
qualified as " +
-        "spark_catalog.default.func2"))
+      checkError(
+        exception = intercept[AnalysisException] {
+          sql("REFRESH FUNCTION func2")
+        },
+        errorClass = "UNRESOLVED_ROUTINE",
+        parameters = Map(
+          "routineName" -> "`func2`",
+          "searchPath" -> "[`system`.`builtin`, `system`.`session`, 
`spark_catalog`.`default`]"),
+        context = ExpectedContext(
+          fragment = "REFRESH FUNCTION func2",
+          start = 0,
+          stop = 21))
       assert(spark.sessionState.catalog.isRegisteredFunction(func))
 
       spark.sessionState.catalog.externalCatalog.dropFunction("default", 
"func1")
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCV2Suite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCV2Suite.scala
index 46281ee6644..98d61499b52 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCV2Suite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCV2Suite.scala
@@ -2549,14 +2549,30 @@ class JDBCV2Suite extends QueryTest with 
SharedSparkSession with ExplainSuiteHel
       val df = sql("SELECT h2.my_avg(id) FROM h2.test.people")
       checkAggregateRemoved(df)
       checkAnswer(df, Row(1) :: Nil)
-      val e1 = intercept[AnalysisException] {
-        checkAnswer(sql("SELECT h2.test.my_avg2(id) FROM h2.test.people"), 
Seq.empty)
-      }
-      assert(e1.getMessage.contains("Undefined function: h2.test.my_avg2"))
-      val e2 = intercept[AnalysisException] {
-        checkAnswer(sql("SELECT h2.my_avg2(id) FROM h2.test.people"), 
Seq.empty)
-      }
-      assert(e2.getMessage.contains("Undefined function: h2.my_avg2"))
+      checkError(
+        exception = intercept[AnalysisException] {
+          checkAnswer(sql("SELECT h2.test.my_avg2(id) FROM h2.test.people"), 
Seq.empty)
+        },
+        errorClass = "UNRESOLVED_ROUTINE",
+        parameters = Map(
+          "routineName" -> "`h2`.`test`.`my_avg2`",
+          "searchPath" -> "[`system`.`builtin`, `system`.`session`, 
`h2`.`default`]"),
+        context = ExpectedContext(
+          fragment = "h2.test.my_avg2(id)",
+          start = 7,
+          stop = 25))
+      checkError(
+        exception = intercept[AnalysisException] {
+          checkAnswer(sql("SELECT h2.my_avg2(id) FROM h2.test.people"), 
Seq.empty)
+        },
+        errorClass = "UNRESOLVED_ROUTINE",
+        parameters = Map(
+          "routineName" -> "`h2`.`my_avg2`",
+          "searchPath" -> "[`system`.`builtin`, `system`.`session`, 
`h2`.`default`]"),
+        context = ExpectedContext(
+          fragment = "h2.my_avg2(id)",
+          start = 7,
+          stop = 20))
     } finally {
       JdbcDialects.unregisterDialect(testH2Dialect)
       JdbcDialects.registerDialect(H2Dialect)
diff --git 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala
 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala
index e0ecc51a5d5..9da94223dfb 100644
--- 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala
+++ 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala
@@ -1422,15 +1422,15 @@ class HiveQuerySuite extends HiveComparisonTest with 
SQLTestUtils with BeforeAnd
   }
 
   test("lookup hive UDF in another thread") {
-    checkErrorMatchPVals(
+    checkError(
       exception = intercept[AnalysisException] {
         range(1).selectExpr("not_a_udf()")
       },
-      errorClass = "_LEGACY_ERROR_TEMP_1242",
+      errorClass = "UNRESOLVED_ROUTINE",
       sqlState = None,
       parameters = Map(
-        "rawName" -> "not_a_udf",
-        "fullName" -> "spark_catalog.[a-z]+.not_a_udf"),
+        "routineName" -> "`not_a_udf`",
+        "searchPath" -> "[`system`.`builtin`, `system`.`session`, 
`spark_catalog`.`a`]"),
       context = ExpectedContext(
         fragment = "not_a_udf()",
         start = 0,
@@ -1439,15 +1439,15 @@ class HiveQuerySuite extends HiveComparisonTest with 
SQLTestUtils with BeforeAnd
     var success = false
     val t = new Thread("test") {
       override def run(): Unit = {
-        checkErrorMatchPVals(
+        checkError(
           exception = intercept[AnalysisException] {
             range(1).selectExpr("not_a_udf()")
           },
-          errorClass = "_LEGACY_ERROR_TEMP_1242",
+          errorClass = "UNRESOLVED_ROUTINE",
           sqlState = None,
           parameters = Map(
-            "rawName" -> "not_a_udf",
-            "fullName" -> "spark_catalog.[a-z]+.not_a_udf"),
+            "routineName" -> "`not_a_udf`",
+            "searchPath" -> "[`system`.`builtin`, `system`.`session`, 
`spark_catalog`.`a`]"),
           context = ExpectedContext(
             fragment = "not_a_udf()",
             start = 0,
diff --git 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveUDFSuite.scala
 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveUDFSuite.scala
index 084850e6f0c..f494232502f 100644
--- 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveUDFSuite.scala
+++ 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveUDFSuite.scala
@@ -570,10 +570,19 @@ class HiveUDFSuite extends QueryTest with 
TestHiveSingleton with SQLTestUtils {
             sql(s"CREATE FUNCTION dAtABaSe1.test_avg AS 
'${classOf[GenericUDAFAverage].getName}'")
             checkAnswer(sql("SELECT dAtABaSe1.test_avg(1)"), Row(1.0))
           }
-          val message = intercept[AnalysisException] {
-            sql("SELECT dAtABaSe1.unknownFunc(1)")
-          }.getMessage
-          assert(message.contains("Undefined function: dAtABaSe1.unknownFunc"))
+          checkError(
+            exception = intercept[AnalysisException] {
+              sql("SELECT dAtABaSe1.unknownFunc(1)")
+            },
+            errorClass = "UNRESOLVED_ROUTINE",
+            parameters = Map(
+              "routineName" -> "`dAtABaSe1`.`unknownFunc`",
+              "searchPath" ->
+                "[`system`.`builtin`, `system`.`session`, 
`spark_catalog`.`default`]"),
+            context = ExpectedContext(
+              fragment = "dAtABaSe1.unknownFunc(1)",
+              start = 7,
+              stop = 30))
         }
       }
     }
diff --git 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
index a8745b2946b..7976dab3c44 100644
--- 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
+++ 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
@@ -216,8 +216,17 @@ abstract class SQLQuerySuiteBase extends QueryTest with 
SQLTestUtils with TestHi
     checkKeywordsNotExist(sql("describe functioN Upper"),
       "Extended Usage")
 
-    val e = intercept[AnalysisException](sql("describe functioN abcadf"))
-    assert(e.message.contains("Undefined function: abcadf"))
+    val sqlText = "describe functioN abcadf"
+    checkError(
+      exception = intercept[AnalysisException](sql(sqlText)),
+      errorClass = "UNRESOLVED_ROUTINE",
+      parameters = Map(
+        "routineName" -> "`abcadf`",
+        "searchPath" -> "[`system`.`builtin`, `system`.`session`, 
`spark_catalog`.`default`]"),
+      context = ExpectedContext(
+        fragment = sqlText,
+        start = 0,
+        stop = 23))
 
     checkKeywordsExist(sql("describe functioN  `~`"),
       "Function: ~",


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org


Reply via email to