This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new f5411ca2238 [SPARK-40663][SQL][FOLLOWUP] Revert LEGACY_ERROR_TEMP_2002
f5411ca2238 is described below

commit f5411ca2238d286b1f2f06283f57094c6ea81d95
Author: itholic <haejoon....@databricks.com>
AuthorDate: Mon Oct 31 19:01:53 2022 +0300

    [SPARK-40663][SQL][FOLLOWUP] Revert LEGACY_ERROR_TEMP_2002
    
    ### What changes were proposed in this pull request?
    
    This PR fix the regression for follow-up 
https://github.com/apache/spark/pull/38104/files#r1008997881.
    
    ### Why are the changes needed?
    
    Fix the regression.
    
    ### Does this PR introduce _any_ user-facing change?
    
    Yes, the error message is fixed with the previous one.
    
    ### How was this patch tested?
    
    ```
    ./build/sbt "sql/testOnly org.apache.spark.sql.SQLQueryTestSuite -- -z 
ansi/date.sql"
    ```
    
    Closes #38450 from itholic/revert-legacy-2002.
    
    Authored-by: itholic <haejoon....@databricks.com>
    Signed-off-by: Max Gekk <max.g...@gmail.com>
---
 core/src/main/resources/error/error-classes.json                   | 7 +------
 .../scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala   | 6 ++----
 sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out    | 3 ++-
 3 files changed, 5 insertions(+), 11 deletions(-)

diff --git a/core/src/main/resources/error/error-classes.json 
b/core/src/main/resources/error/error-classes.json
index 000eb92f1cb..15f11455d46 100644
--- a/core/src/main/resources/error/error-classes.json
+++ b/core/src/main/resources/error/error-classes.json
@@ -3177,11 +3177,6 @@
       "<message>. If necessary set <ansiConfig> to false to bypass this error."
     ]
   },
-  "_LEGACY_ERROR_TEMP_2002" : {
-    "message" : [
-      "<message>"
-    ]
-  },
   "_LEGACY_ERROR_TEMP_2003" : {
     "message" : [
       "Unsuccessful try to zip maps with <size> unique keys due to exceeding 
the array size limit <maxRoundedArrayLength>"
@@ -4505,4 +4500,4 @@
       "<msg>"
     ]
   }
-}
\ No newline at end of file
+}
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
index 41190d3f2f4..c4983332220 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
@@ -301,10 +301,8 @@ private[sql] object QueryExecutionErrors extends 
QueryErrorsBase {
         "ansiConfig" -> toSQLConf(SQLConf.ANSI_ENABLED.key)))
   }
 
-  def ansiIllegalArgumentError(e: Exception): SparkIllegalArgumentException = {
-    new SparkIllegalArgumentException(
-      errorClass = "_LEGACY_ERROR_TEMP_2002",
-      messageParameters = Map("message" -> e.getMessage))
+  def ansiIllegalArgumentError(e: IllegalArgumentException): 
IllegalArgumentException = {
+    ansiIllegalArgumentError(e.getMessage)
   }
 
   def overflowInSumOfDecimalError(context: SQLQueryContext): 
ArithmeticException = {
diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out 
b/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out
index e25614213eb..9ddbaec4f99 100644
--- a/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out
@@ -240,8 +240,9 @@ struct<>
 -- !query output
 org.apache.spark.SparkIllegalArgumentException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2002",
+  "errorClass" : "_LEGACY_ERROR_TEMP_2000",
   "messageParameters" : {
+    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
     "message" : "Illegal input for day of week: xx"
   }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to