This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new add92d28605 [SPARK-39778][SQL] Improve error classes and messages
add92d28605 is described below

commit add92d2860583bfd972e9b9df05455bdac238fb6
Author: Max Gekk <max.g...@gmail.com>
AuthorDate: Fri Oct 28 08:09:22 2022 +0300

    [SPARK-39778][SQL] Improve error classes and messages
    
    ### What changes were proposed in this pull request?
    1. Remove unused error classes: 
INCONSISTENT_BEHAVIOR_CROSS_VERSION.FORMAT_DATETIME_BY_NEW_PARSER, 
NAMESPACE_ALREADY_EXISTS, NAMESPACE_NOT_EMPTY, NAMESPACE_NOT_FOUND.
    2. Rename the error class WRONG_NUM_PARAMS to WRONG_NUM_ARGS.
    3. Use correct error class INDEX_ALREADY_EXISTS in the exception 
`IndexAlreadyExistsException` instead of INDEX_NOT_FOUND.
    4. Quote regexp patterns by ''.
    5. Fix indentations in 
[QueryCompilationErrors.scala](https://github.com/apache/spark/pull/38398/files#diff-744ac13f6fe074fddeab09b407404bffa2386f54abc83c501e6e1fe618f6db56).
    
    ### Why are the changes needed?
    To address tech debts.
    
    ### Does this PR introduce _any_ user-facing change?
    Yes, it modifies user-facing error messages.
    
    ### How was this patch tested?
    By running the modified test suites:
    ```
    $ PYSPARK_PYTHON=python3 build/sbt "sql/testOnly 
org.apache.spark.sql.SQLQueryTestSuite"
    $ build/sbt "test:testOnly *SQLQuerySuite"
    $ build/sbt "test:testOnly *StringExpressionsSuite"
    $ build/sbt "test:testOnly *.RegexpExpressionsSuite"
    ```
    
    Closes #38398 from MaxGekk/remove-unused-error-classes.
    
    Authored-by: Max Gekk <max.g...@gmail.com>
    Signed-off-by: Max Gekk <max.g...@gmail.com>
---
 core/src/main/resources/error/error-classes.json   | 36 +++-------------------
 .../catalyst/analysis/AlreadyExistException.scala  |  2 +-
 .../expressions/CallMethodViaReflection.scala      |  2 +-
 .../sql/catalyst/expressions/arithmetic.scala      |  4 +--
 .../spark/sql/catalyst/expressions/hash.scala      |  2 +-
 .../sql/catalyst/expressions/jsonExpressions.scala |  2 +-
 .../catalyst/expressions/stringExpressions.scala   |  2 +-
 .../spark/sql/errors/QueryCompilationErrors.scala  | 22 +++++++------
 .../spark/sql/errors/QueryExecutionErrors.scala    |  4 +--
 .../analysis/ExpressionTypeCheckingSuite.scala     |  6 ++--
 .../expressions/RegexpExpressionsSuite.scala       |  6 ++--
 .../expressions/StringExpressionsSuite.scala       |  2 +-
 .../sql-tests/results/postgreSQL/strings.sql.out   | 16 +++++-----
 .../sql-tests/results/regexp-functions.sql.out     |  6 ++--
 .../results/timestampNTZ/timestamp-ansi.sql.out    |  6 ++--
 .../results/timestampNTZ/timestamp.sql.out         |  6 ++--
 .../apache/spark/sql/DataFrameFunctionsSuite.scala | 16 +++++-----
 .../scala/org/apache/spark/sql/SQLQuerySuite.scala |  2 +-
 18 files changed, 58 insertions(+), 84 deletions(-)

diff --git a/core/src/main/resources/error/error-classes.json 
b/core/src/main/resources/error/error-classes.json
index 16347f89463..4797ee0d0d0 100644
--- a/core/src/main/resources/error/error-classes.json
+++ b/core/src/main/resources/error/error-classes.json
@@ -300,14 +300,14 @@
           "The <exprName> must be between <valueRange> (current value = 
<currentValue>)"
         ]
       },
-      "WRONG_NUM_ENDPOINTS" : {
+      "WRONG_NUM_ARGS" : {
         "message" : [
-          "The number of endpoints must be >= 2 to construct intervals but the 
actual number is <actualNumber>."
+          "The <functionName> requires <expectedNum> parameters but the actual 
number is <actualNum>."
         ]
       },
-      "WRONG_NUM_PARAMS" : {
+      "WRONG_NUM_ENDPOINTS" : {
         "message" : [
-          "The <functionName> requires <expectedNum> parameters but the actual 
number is <actualNum>."
+          "The number of endpoints must be >= 2 to construct intervals but the 
actual number is <actualNumber>."
         ]
       }
     }
@@ -422,12 +422,6 @@
           "Fail to recognize <pattern> pattern in the DateTimeFormatter. 1) 
You can set <config> to \"LEGACY\" to restore the behavior before Spark 3.0. 2) 
You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html";
         ]
       },
-      "FORMAT_DATETIME_BY_NEW_PARSER" : {
-        "message" : [
-          "Spark >= 3.0:",
-          "Fail to format it to <resultCandidate> in the new formatter. You 
can set <config> to \"LEGACY\" to restore the behavior before Spark 3.0, or set 
to \"CORRECTED\" and treat it as an invalid datetime string."
-        ]
-      },
       "PARSE_DATETIME_BY_NEW_PARSER" : {
         "message" : [
           "Spark >= 3.0:",
@@ -595,28 +589,6 @@
       "More than one row returned by a subquery used as an expression."
     ]
   },
-  "NAMESPACE_ALREADY_EXISTS" : {
-    "message" : [
-      "Cannot create namespace <nameSpaceName> because it already exists.",
-      "Choose a different name, drop the existing namespace, or add the IF NOT 
EXISTS clause to tolerate pre-existing namespace."
-    ],
-    "sqlState" : "42000"
-  },
-  "NAMESPACE_NOT_EMPTY" : {
-    "message" : [
-      "Cannot drop a namespace <nameSpaceNameName> because it contains 
objects.",
-      "Use DROP NAMESPACE ... CASCADE to drop the namespace and all its 
objects."
-    ],
-    "sqlState" : "42000"
-  },
-  "NAMESPACE_NOT_FOUND" : {
-    "message" : [
-      "The namespace <nameSpaceName> cannot be found. Verify the spelling and 
correctness of the namespace.",
-      "If you did not qualify the name with, verify the current_schema() 
output, or qualify the name with the correctly.",
-      "To tolerate the error on drop use DROP NAMESPACE IF EXISTS."
-    ],
-    "sqlState" : "42000"
-  },
   "NON_LITERAL_PIVOT_VALUES" : {
     "message" : [
       "Literal expressions required for pivot values, found <expression>."
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/AlreadyExistException.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/AlreadyExistException.scala
index 50050d39159..a047b187dbf 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/AlreadyExistException.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/AlreadyExistException.scala
@@ -127,5 +127,5 @@ class FunctionAlreadyExistsException(errorClass: String, 
messageParameters: Map[
 }
 
 class IndexAlreadyExistsException(message: String, cause: Option[Throwable] = 
None)
-  extends AnalysisException(errorClass = "INDEX_NOT_FOUND",
+  extends AnalysisException(errorClass = "INDEX_ALREADY_EXISTS",
     Map("message" -> message), cause)
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/CallMethodViaReflection.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/CallMethodViaReflection.scala
index fa52e6cd851..3d01ae1b781 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/CallMethodViaReflection.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/CallMethodViaReflection.scala
@@ -65,7 +65,7 @@ case class CallMethodViaReflection(children: Seq[Expression])
   override def checkInputDataTypes(): TypeCheckResult = {
     if (children.size < 2) {
       DataTypeMismatch(
-        errorSubClass = "WRONG_NUM_PARAMS",
+        errorSubClass = "WRONG_NUM_ARGS",
         messageParameters = Map(
           "functionName" -> toSQLId(prettyName),
           "expectedNum" -> "> 1",
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala
index 4d99c3b02a0..116227224fd 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala
@@ -1209,7 +1209,7 @@ case class Least(children: Seq[Expression]) extends 
ComplexTypeMergingExpression
   override def checkInputDataTypes(): TypeCheckResult = {
     if (children.length <= 1) {
       DataTypeMismatch(
-        errorSubClass = "WRONG_NUM_PARAMS",
+        errorSubClass = "WRONG_NUM_ARGS",
         messageParameters = Map(
           "functionName" -> toSQLId(prettyName),
           "expectedNum" -> "> 1",
@@ -1300,7 +1300,7 @@ case class Greatest(children: Seq[Expression]) extends 
ComplexTypeMergingExpress
   override def checkInputDataTypes(): TypeCheckResult = {
     if (children.length <= 1) {
       DataTypeMismatch(
-        errorSubClass = "WRONG_NUM_PARAMS",
+        errorSubClass = "WRONG_NUM_ARGS",
         messageParameters = Map(
           "functionName" -> toSQLId(prettyName),
           "expectedNum" -> "> 1",
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/hash.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/hash.scala
index 4f8ed1953f4..3cdf7b3b0d0 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/hash.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/hash.scala
@@ -271,7 +271,7 @@ abstract class HashExpression[E] extends Expression {
   override def checkInputDataTypes(): TypeCheckResult = {
     if (children.length < 1) {
       DataTypeMismatch(
-        errorSubClass = "WRONG_NUM_PARAMS",
+        errorSubClass = "WRONG_NUM_ARGS",
         messageParameters = Map(
           "functionName" ->  toSQLId(prettyName),
           "expectedNum" -> "> 0",
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/jsonExpressions.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/jsonExpressions.scala
index 3529644aeea..aab9b0a13c3 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/jsonExpressions.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/jsonExpressions.scala
@@ -396,7 +396,7 @@ case class JsonTuple(children: Seq[Expression])
   override def checkInputDataTypes(): TypeCheckResult = {
     if (children.length < 2) {
       DataTypeMismatch(
-        errorSubClass = "WRONG_NUM_PARAMS",
+        errorSubClass = "WRONG_NUM_ARGS",
         messageParameters = Map(
           "functionName" -> toSQLId(prettyName),
           "expectedNum" -> "> 1",
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/stringExpressions.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/stringExpressions.scala
index 8ae4bb9c29c..cc47d739d71 100755
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/stringExpressions.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/stringExpressions.scala
@@ -276,7 +276,7 @@ case class Elt(
   override def checkInputDataTypes(): TypeCheckResult = {
     if (children.size < 2) {
       DataTypeMismatch(
-        errorSubClass = "WRONG_NUM_PARAMS",
+        errorSubClass = "WRONG_NUM_ARGS",
         messageParameters = Map(
           "functionName" -> toSQLId(prettyName),
           "expectedNum" -> "> 1",
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
index a006687c6dd..cf7e3524d5b 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
@@ -978,7 +978,7 @@ private[sql] object QueryCompilationErrors extends 
QueryErrorsBase {
   }
 
   def corruptedViewReferredTempFunctionsInCatalogError(e: Exception): 
Throwable = {
-        new AnalysisException(
+    new AnalysisException(
       errorClass = "_LEGACY_ERROR_TEMP_1088",
       messageParameters = Map.empty,
       cause = Some(e))
@@ -1309,19 +1309,19 @@ private[sql] object QueryCompilationErrors extends 
QueryErrorsBase {
   }
 
   def tableIsNotRowLevelOperationTableError(table: Table): Throwable = {
-        new AnalysisException(
+    new AnalysisException(
       errorClass = "_LEGACY_ERROR_TEMP_1122",
       messageParameters = Map("table" -> table.name()))
   }
 
   def cannotRenameTableWithAlterViewError(): Throwable = {
-        new AnalysisException(
+    new AnalysisException(
       errorClass = "_LEGACY_ERROR_TEMP_1123",
       messageParameters = Map.empty)
   }
 
   private def notSupportedForV2TablesError(cmd: String): Throwable = {
-        new AnalysisException(
+    new AnalysisException(
       errorClass = "_LEGACY_ERROR_TEMP_1124",
       messageParameters = Map("cmd" -> cmd))
   }
@@ -1355,25 +1355,25 @@ private[sql] object QueryCompilationErrors extends 
QueryErrorsBase {
   }
 
   def databaseFromV1SessionCatalogNotSpecifiedError(): Throwable = {
-        new AnalysisException(
+    new AnalysisException(
       errorClass = "_LEGACY_ERROR_TEMP_1125",
       messageParameters = Map.empty)
   }
 
   def nestedDatabaseUnsupportedByV1SessionCatalogError(catalog: String): 
Throwable = {
-        new AnalysisException(
+    new AnalysisException(
       errorClass = "_LEGACY_ERROR_TEMP_1126",
       messageParameters = Map("catalog" -> catalog))
   }
 
   def invalidRepartitionExpressionsError(sortOrders: Seq[Any]): Throwable = {
-        new AnalysisException(
+    new AnalysisException(
       errorClass = "_LEGACY_ERROR_TEMP_1127",
       messageParameters = Map("sortOrders" -> sortOrders.toString()))
   }
 
   def partitionColumnNotSpecifiedError(format: String, partitionColumn: 
String): Throwable = {
-        new AnalysisException(
+    new AnalysisException(
       errorClass = "_LEGACY_ERROR_TEMP_1128",
       messageParameters = Map(
         "format" -> format,
@@ -2145,7 +2145,9 @@ private[sql] object QueryCompilationErrors extends 
QueryErrorsBase {
   def invalidPatternError(pattern: String, message: String): Throwable = {
     new AnalysisException(
       errorClass = "_LEGACY_ERROR_TEMP_1216",
-      messageParameters = Map("pattern" -> pattern, "message" -> message))
+      messageParameters = Map(
+        "pattern" -> toSQLValue(pattern, StringType),
+        "message" -> message))
   }
 
   def tableIdentifierExistsError(tableIdentifier: TableIdentifier): Throwable 
= {
@@ -2305,7 +2307,7 @@ private[sql] object QueryCompilationErrors extends 
QueryErrorsBase {
   }
 
   def analyzeTableNotSupportedOnViewsError(): Throwable = {
-        new AnalysisException(
+    new AnalysisException(
       errorClass = "_LEGACY_ERROR_TEMP_1236",
       messageParameters = Map.empty)
   }
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
index ba78858debc..41190d3f2f4 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
@@ -1395,7 +1395,7 @@ private[sql] object QueryExecutionErrors extends 
QueryErrorsBase {
   def failToRecognizePatternError(pattern: String, e: Throwable): 
SparkRuntimeException = {
     new SparkRuntimeException(
       errorClass = "_LEGACY_ERROR_TEMP_2130",
-      messageParameters = Map("pattern" -> pattern),
+      messageParameters = Map("pattern" -> toSQLValue(pattern, StringType)),
       cause = e)
   }
 
@@ -2686,7 +2686,7 @@ private[sql] object QueryExecutionErrors extends 
QueryErrorsBase {
       messageParameters = Map(
         "parameter" -> "regexp",
         "functionName" -> toSQLId(funcName),
-        "expected" -> pattern))
+        "expected" -> toSQLValue(pattern, StringType)))
   }
 
   def tooManyArrayElementsError(
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/ExpressionTypeCheckingSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/ExpressionTypeCheckingSuite.scala
index e3829311e2d..a7cdd589606 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/ExpressionTypeCheckingSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/ExpressionTypeCheckingSuite.scala
@@ -90,7 +90,7 @@ class ExpressionTypeCheckingSuite extends SparkFunSuite with 
SQLHelper with Quer
       exception = intercept[AnalysisException] {
         assertSuccess(expr)
       },
-      errorClass = "DATATYPE_MISMATCH.WRONG_NUM_PARAMS",
+      errorClass = "DATATYPE_MISMATCH.WRONG_NUM_ARGS",
       parameters = messageParameters)
   }
 
@@ -447,7 +447,7 @@ class ExpressionTypeCheckingSuite extends SparkFunSuite 
with SQLHelper with Quer
       exception = intercept[AnalysisException] {
         assertSuccess(murmur3Hash)
       },
-      errorClass = "DATATYPE_MISMATCH.WRONG_NUM_PARAMS",
+      errorClass = "DATATYPE_MISMATCH.WRONG_NUM_ARGS",
       parameters = Map(
         "sqlExpr" -> "\"hash()\"",
         "functionName" -> toSQLId(murmur3Hash.prettyName),
@@ -459,7 +459,7 @@ class ExpressionTypeCheckingSuite extends SparkFunSuite 
with SQLHelper with Quer
       exception = intercept[AnalysisException] {
         assertSuccess(xxHash64)
       },
-      errorClass = "DATATYPE_MISMATCH.WRONG_NUM_PARAMS",
+      errorClass = "DATATYPE_MISMATCH.WRONG_NUM_ARGS",
       parameters = Map(
         "sqlExpr" -> "\"xxhash64()\"",
         "functionName" -> toSQLId(xxHash64.prettyName),
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/RegexpExpressionsSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/RegexpExpressionsSuite.scala
index 98a6a9bc19c..095c2736ae0 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/RegexpExpressionsSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/RegexpExpressionsSuite.scala
@@ -523,15 +523,15 @@ class RegexpExpressionsSuite extends SparkFunSuite with 
ExpressionEvalHelper {
     checkExceptionInExpression[SparkRuntimeException](
       RegExpExtract(s, p, r),
       create_row("1a 2b 14m", "(?l)", 0),
-      s"$prefix `regexp_extract` is invalid: (?l)")
+      s"$prefix `regexp_extract` is invalid: '(?l)'")
     checkExceptionInExpression[SparkRuntimeException](
       RegExpExtractAll(s, p, r),
       create_row("abc", "] [", 0),
-      s"$prefix `regexp_extract_all` is invalid: ] [")
+      s"$prefix `regexp_extract_all` is invalid: '] ['")
     checkExceptionInExpression[SparkRuntimeException](
       RegExpInStr(s, p, r),
       create_row("abc", ", (", 0),
-      s"$prefix `regexp_instr` is invalid: , (")
+      s"$prefix `regexp_instr` is invalid: ', ('")
   }
 
   test("RegExpReplace: fails analysis if pos is not a constant") {
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/StringExpressionsSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/StringExpressionsSuite.scala
index 15513037fe1..f9726c4a6dd 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/StringExpressionsSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/StringExpressionsSuite.scala
@@ -1593,7 +1593,7 @@ class StringExpressionsSuite extends SparkFunSuite with 
ExpressionEvalHelper {
     val expr1 = Elt(Seq(indexExpr1))
     assert(expr1.checkInputDataTypes() ==
       DataTypeMismatch(
-        errorSubClass = "WRONG_NUM_PARAMS",
+        errorSubClass = "WRONG_NUM_ARGS",
         messageParameters = Map(
           "functionName" -> "`elt`",
           "expectedNum" -> "> 1",
diff --git 
a/sql/core/src/test/resources/sql-tests/results/postgreSQL/strings.sql.out 
b/sql/core/src/test/resources/sql-tests/results/postgreSQL/strings.sql.out
index b3c1e94314d..27ec604cb45 100644
--- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/strings.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/strings.sql.out
@@ -447,7 +447,7 @@ org.apache.spark.sql.AnalysisException
   "errorClass" : "_LEGACY_ERROR_TEMP_1216",
   "messageParameters" : {
     "message" : "the escape character is not allowed to precede 'a'",
-    "pattern" : "m%aca"
+    "pattern" : "'m%aca'"
   }
 }
 
@@ -462,7 +462,7 @@ org.apache.spark.sql.AnalysisException
   "errorClass" : "_LEGACY_ERROR_TEMP_1216",
   "messageParameters" : {
     "message" : "the escape character is not allowed to precede 'a'",
-    "pattern" : "m%aca"
+    "pattern" : "'m%aca'"
   }
 }
 
@@ -477,7 +477,7 @@ org.apache.spark.sql.AnalysisException
   "errorClass" : "_LEGACY_ERROR_TEMP_1216",
   "messageParameters" : {
     "message" : "the escape character is not allowed to precede 'a'",
-    "pattern" : "m%a%%a"
+    "pattern" : "'m%a%%a'"
   }
 }
 
@@ -492,7 +492,7 @@ org.apache.spark.sql.AnalysisException
   "errorClass" : "_LEGACY_ERROR_TEMP_1216",
   "messageParameters" : {
     "message" : "the escape character is not allowed to precede 'a'",
-    "pattern" : "m%a%%a"
+    "pattern" : "'m%a%%a'"
   }
 }
 
@@ -507,7 +507,7 @@ org.apache.spark.sql.AnalysisException
   "errorClass" : "_LEGACY_ERROR_TEMP_1216",
   "messageParameters" : {
     "message" : "the escape character is not allowed to precede 'e'",
-    "pattern" : "b_ear"
+    "pattern" : "'b_ear'"
   }
 }
 
@@ -522,7 +522,7 @@ org.apache.spark.sql.AnalysisException
   "errorClass" : "_LEGACY_ERROR_TEMP_1216",
   "messageParameters" : {
     "message" : "the escape character is not allowed to precede 'e'",
-    "pattern" : "b_ear"
+    "pattern" : "'b_ear'"
   }
 }
 
@@ -537,7 +537,7 @@ org.apache.spark.sql.AnalysisException
   "errorClass" : "_LEGACY_ERROR_TEMP_1216",
   "messageParameters" : {
     "message" : "the escape character is not allowed to precede 'e'",
-    "pattern" : "b_e__r"
+    "pattern" : "'b_e__r'"
   }
 }
 
@@ -552,7 +552,7 @@ org.apache.spark.sql.AnalysisException
   "errorClass" : "_LEGACY_ERROR_TEMP_1216",
   "messageParameters" : {
     "message" : "the escape character is not allowed to precede 'e'",
-    "pattern" : "b_e__r"
+    "pattern" : "'b_e__r'"
   }
 }
 
diff --git 
a/sql/core/src/test/resources/sql-tests/results/regexp-functions.sql.out 
b/sql/core/src/test/resources/sql-tests/results/regexp-functions.sql.out
index 60094af7a99..3474aba3911 100644
--- a/sql/core/src/test/resources/sql-tests/results/regexp-functions.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/regexp-functions.sql.out
@@ -163,7 +163,7 @@ org.apache.spark.SparkRuntimeException
   "errorClass" : "INVALID_PARAMETER_VALUE",
   "sqlState" : "22023",
   "messageParameters" : {
-    "expected" : "(?l)",
+    "expected" : "'(?l)'",
     "functionName" : "`regexp_extract`",
     "parameter" : "regexp"
   }
@@ -334,7 +334,7 @@ org.apache.spark.SparkRuntimeException
   "errorClass" : "INVALID_PARAMETER_VALUE",
   "sqlState" : "22023",
   "messageParameters" : {
-    "expected" : "], [",
+    "expected" : "'], ['",
     "functionName" : "`regexp_extract_all`",
     "parameter" : "regexp"
   }
@@ -671,7 +671,7 @@ org.apache.spark.SparkRuntimeException
   "errorClass" : "INVALID_PARAMETER_VALUE",
   "sqlState" : "22023",
   "messageParameters" : {
-    "expected" : ") ?",
+    "expected" : "') ?'",
     "functionName" : "`regexp_instr`",
     "parameter" : "regexp"
   }
diff --git 
a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out
 
b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out
index 5e04562a648..a326e009af4 100644
--- 
a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out
@@ -894,7 +894,7 @@ org.apache.spark.SparkRuntimeException
 {
   "errorClass" : "_LEGACY_ERROR_TEMP_2130",
   "messageParameters" : {
-    "pattern" : "yyyy-MM-dd GGGGG"
+    "pattern" : "'yyyy-MM-dd GGGGG'"
   }
 }
 
@@ -908,7 +908,7 @@ org.apache.spark.SparkRuntimeException
 {
   "errorClass" : "_LEGACY_ERROR_TEMP_2130",
   "messageParameters" : {
-    "pattern" : "dd MM yyyy EEEEEE"
+    "pattern" : "'dd MM yyyy EEEEEE'"
   }
 }
 
@@ -922,7 +922,7 @@ org.apache.spark.SparkRuntimeException
 {
   "errorClass" : "_LEGACY_ERROR_TEMP_2130",
   "messageParameters" : {
-    "pattern" : "dd MM yyyy EEEEE"
+    "pattern" : "'dd MM yyyy EEEEE'"
   }
 }
 
diff --git 
a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp.sql.out 
b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp.sql.out
index 8d98209e625..24273560001 100644
--- 
a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp.sql.out
@@ -871,7 +871,7 @@ org.apache.spark.SparkRuntimeException
 {
   "errorClass" : "_LEGACY_ERROR_TEMP_2130",
   "messageParameters" : {
-    "pattern" : "yyyy-MM-dd GGGGG"
+    "pattern" : "'yyyy-MM-dd GGGGG'"
   }
 }
 
@@ -885,7 +885,7 @@ org.apache.spark.SparkRuntimeException
 {
   "errorClass" : "_LEGACY_ERROR_TEMP_2130",
   "messageParameters" : {
-    "pattern" : "dd MM yyyy EEEEEE"
+    "pattern" : "'dd MM yyyy EEEEEE'"
   }
 }
 
@@ -899,7 +899,7 @@ org.apache.spark.SparkRuntimeException
 {
   "errorClass" : "_LEGACY_ERROR_TEMP_2130",
   "messageParameters" : {
-    "pattern" : "dd MM yyyy EEEEE"
+    "pattern" : "'dd MM yyyy EEEEE'"
   }
 }
 
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameFunctionsSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameFunctionsSuite.scala
index 3f02429fe62..3adf1751818 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameFunctionsSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameFunctionsSuite.scala
@@ -4282,7 +4282,7 @@ class DataFrameFunctionsSuite extends QueryTest with 
SharedSparkSession {
       exception = intercept[AnalysisException] {
         df.select(hash())
       },
-      errorClass = "DATATYPE_MISMATCH.WRONG_NUM_PARAMS",
+      errorClass = "DATATYPE_MISMATCH.WRONG_NUM_ARGS",
       sqlState = None,
       parameters = Map(
         "sqlExpr" -> "\"hash()\"",
@@ -4294,7 +4294,7 @@ class DataFrameFunctionsSuite extends QueryTest with 
SharedSparkSession {
       exception = intercept[AnalysisException] {
         df.selectExpr("hash()")
       },
-      errorClass = "DATATYPE_MISMATCH.WRONG_NUM_PARAMS",
+      errorClass = "DATATYPE_MISMATCH.WRONG_NUM_ARGS",
       sqlState = None,
       parameters = Map(
         "sqlExpr" -> "\"hash()\"",
@@ -4310,7 +4310,7 @@ class DataFrameFunctionsSuite extends QueryTest with 
SharedSparkSession {
       exception = intercept[AnalysisException] {
         df.select(xxhash64())
       },
-      errorClass = "DATATYPE_MISMATCH.WRONG_NUM_PARAMS",
+      errorClass = "DATATYPE_MISMATCH.WRONG_NUM_ARGS",
       sqlState = None,
       parameters = Map(
         "sqlExpr" -> "\"xxhash64()\"",
@@ -4322,7 +4322,7 @@ class DataFrameFunctionsSuite extends QueryTest with 
SharedSparkSession {
       exception = intercept[AnalysisException] {
         df.selectExpr("xxhash64()")
       },
-      errorClass = "DATATYPE_MISMATCH.WRONG_NUM_PARAMS",
+      errorClass = "DATATYPE_MISMATCH.WRONG_NUM_ARGS",
       sqlState = None,
       parameters = Map(
         "sqlExpr" -> "\"xxhash64()\"",
@@ -4338,7 +4338,7 @@ class DataFrameFunctionsSuite extends QueryTest with 
SharedSparkSession {
       exception = intercept[AnalysisException] {
         df.select(greatest())
       },
-      errorClass = "DATATYPE_MISMATCH.WRONG_NUM_PARAMS",
+      errorClass = "DATATYPE_MISMATCH.WRONG_NUM_ARGS",
       sqlState = None,
       parameters = Map(
         "sqlExpr" -> "\"greatest()\"",
@@ -4351,7 +4351,7 @@ class DataFrameFunctionsSuite extends QueryTest with 
SharedSparkSession {
       exception = intercept[AnalysisException] {
         df.selectExpr("greatest()")
       },
-      errorClass = "DATATYPE_MISMATCH.WRONG_NUM_PARAMS",
+      errorClass = "DATATYPE_MISMATCH.WRONG_NUM_ARGS",
       sqlState = None,
       parameters = Map(
         "sqlExpr" -> "\"greatest()\"",
@@ -4368,7 +4368,7 @@ class DataFrameFunctionsSuite extends QueryTest with 
SharedSparkSession {
       exception = intercept[AnalysisException] {
         df.select(least())
       },
-      errorClass = "DATATYPE_MISMATCH.WRONG_NUM_PARAMS",
+      errorClass = "DATATYPE_MISMATCH.WRONG_NUM_ARGS",
       sqlState = None,
       parameters = Map(
         "sqlExpr" -> "\"least()\"",
@@ -4381,7 +4381,7 @@ class DataFrameFunctionsSuite extends QueryTest with 
SharedSparkSession {
       exception = intercept[AnalysisException] {
         df.selectExpr("least()")
       },
-      errorClass = "DATATYPE_MISMATCH.WRONG_NUM_PARAMS",
+      errorClass = "DATATYPE_MISMATCH.WRONG_NUM_ARGS",
       sqlState = None,
       parameters = Map(
         "sqlExpr" -> "\"least()\"",
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
index dd3ad0f4d6b..e9aeba9c820 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
@@ -3709,7 +3709,7 @@ class SQLQuerySuite extends QueryTest with 
SharedSparkSession with AdaptiveSpark
         },
         errorClass = "_LEGACY_ERROR_TEMP_1216",
         parameters = Map(
-          "pattern" -> "m%@ca",
+          "pattern" -> "'m%@ca'",
           "message" -> "the escape character is not allowed to precede '@'"))
 
       checkAnswer(sql("SELECT s LIKE 'm@@ca' ESCAPE '@' FROM df"), Row(true))


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to