This is an automated email from the ASF dual-hosted git repository.

wenchen pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 8dd478f90a4d [SPARK-54534][SQL] Migrate Hive-related legacy error 
codes to proper error conditions
8dd478f90a4d is described below

commit 8dd478f90a4da88b8312c6adf323524d7f407584
Author: Ganesha S <[email protected]>
AuthorDate: Tue Dec 2 13:30:42 2025 -0800

    [SPARK-54534][SQL] Migrate Hive-related legacy error codes to proper error 
conditions
    
    ### What changes were proposed in this pull request?
    
    This PR migrates three Hive-related legacy error codes to proper error 
conditions with the `INTERNAL_ERROR_` prefix:
    
    1. **_LEGACY_ERROR_TEMP_2186** → 
**INTERNAL_ERROR_SERDE_INTERFACE_NOT_FOUND**
    
    2. **_LEGACY_ERROR_TEMP_2187** → **INTERNAL_ERROR_INVALID_HIVE_COLUMN_TYPE**
    
    3. **_LEGACY_ERROR_TEMP_2192** → 
**INTERNAL_ERROR_INVALID_PARTITION_FILTER_VALUE**
    
    ### Why are the changes needed?
    
    - Improves error message clarity and consistency
    - Removes legacy error codes as part of ongoing cleanup effort
    - Provides better user experience with more descriptive error messages
    - Follows Spark's error condition naming conventions
    
    ### Does this PR introduce _any_ user-facing change?
    
    Yes, users will see improved error messages for these three Hive-related 
error scenarios.
    
    ### How was this patch tested?
    
    - Existing tests continue to pass
    - Error message format follows Spark's error condition guidelines
    - All error parameters are properly defined in error-conditions.json
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    Generated-by: Claude Sonnet 4.5
    
    Closes #53244 from ganeshashree/SPARK-54534.
    
    Authored-by: Ganesha S <[email protected]>
    Signed-off-by: Wenchen Fan <[email protected]>
---
 .../src/main/resources/error/error-conditions.json | 37 +++++++++++++---------
 .../spark/sql/errors/QueryExecutionErrors.scala    | 10 +++---
 2 files changed, 26 insertions(+), 21 deletions(-)

diff --git a/common/utils/src/main/resources/error/error-conditions.json 
b/common/utils/src/main/resources/error/error-conditions.json
index 492a33c57461..27d4552758f2 100644
--- a/common/utils/src/main/resources/error/error-conditions.json
+++ b/common/utils/src/main/resources/error/error-conditions.json
@@ -2444,6 +2444,20 @@
     ],
     "sqlState" : "XX000"
   },
+  "INTERNAL_ERROR_INVALID_HIVE_COLUMN_TYPE" : {
+    "message" : [
+      "Failed to convert Hive table to Spark catalog table.",
+      "Database: <dbName>",
+      "Table: <tableName>"
+    ],
+    "sqlState" : "XX000"
+  },
+  "INTERNAL_ERROR_INVALID_PARTITION_FILTER_VALUE" : {
+    "message" : [
+      "Partition filter value cannot contain both double quotes (\") and 
single quotes (')."
+    ],
+    "sqlState" : "XX000"
+  },
   "INTERNAL_ERROR_MEMORY" : {
     "message" : [
       "<message>"
@@ -2494,6 +2508,14 @@
     ],
     "sqlState" : "XX000"
   },
+  "INTERNAL_ERROR_SERDE_INTERFACE_NOT_FOUND" : {
+    "message" : [
+      "The SerDe interface was removed since Hive 2.3 (HIVE-15167).",
+      "Please migrate your custom SerDes to Hive 2.3 or later.",
+      "For more details, see: https://issues.apache.org/jira/browse/HIVE-15167";
+    ],
+    "sqlState" : "XX000"
+  },
   "INTERNAL_ERROR_SHUFFLE" : {
     "message" : [
       "<message>"
@@ -9021,21 +9043,6 @@
       "Cannot create staging directory: <message>"
     ]
   },
-  "_LEGACY_ERROR_TEMP_2186" : {
-    "message" : [
-      "The SerDe interface removed since Hive 2.3(HIVE-15167). Please migrate 
your custom SerDes to Hive 2.3. See HIVE-15167 for more details."
-    ]
-  },
-  "_LEGACY_ERROR_TEMP_2187" : {
-    "message" : [
-      "<message>, db: <dbName>, table: <tableName>."
-    ]
-  },
-  "_LEGACY_ERROR_TEMP_2192" : {
-    "message" : [
-      "Partition filter cannot have both `\"` and `'` characters."
-    ]
-  },
   "_LEGACY_ERROR_TEMP_2194" : {
     "message" : [
       "Unsupported Hive Metastore version <version>. Please set <key> with a 
valid version."
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
index cb3c063f434e..3f14eaa45cfc 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
@@ -1629,17 +1629,16 @@ private[sql] object QueryExecutionErrors extends 
QueryErrorsBase with ExecutionE
 
   def serDeInterfaceNotFoundError(e: NoClassDefFoundError): 
SparkClassNotFoundException = {
     new SparkClassNotFoundException(
-      errorClass = "_LEGACY_ERROR_TEMP_2186",
-      messageParameters = Map.empty,
+      errorClass = "INTERNAL_ERROR_SERDE_INTERFACE_NOT_FOUND",
+      messageParameters = Map.empty[String, String],
       cause = e)
   }
 
   def convertHiveTableToCatalogTableError(
       e: SparkException, dbName: String, tableName: String): Throwable = {
     new SparkException(
-      errorClass = "_LEGACY_ERROR_TEMP_2187",
+      errorClass = "INTERNAL_ERROR_INVALID_HIVE_COLUMN_TYPE",
       messageParameters = Map(
-        "message" -> e.getMessage,
         "dbName" -> dbName,
         "tableName" -> tableName),
       cause = e)
@@ -1661,8 +1660,7 @@ private[sql] object QueryExecutionErrors extends 
QueryErrorsBase with ExecutionE
   }
 
   def invalidPartitionFilterError(): SparkUnsupportedOperationException = {
-    new SparkUnsupportedOperationException(
-      errorClass = "_LEGACY_ERROR_TEMP_2192")
+    new 
SparkUnsupportedOperationException("INTERNAL_ERROR_INVALID_PARTITION_FILTER_VALUE")
   }
 
   def getPartitionMetadataByFilterError(e: Exception): SparkRuntimeException = 
{


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to