This is an automated email from the ASF dual-hosted git repository.

hvanhovell pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 1aadbc439c05 [SPARK-54887] Add previously removed legacy error class 
back in
1aadbc439c05 is described below

commit 1aadbc439c05157cf0c806128b7d30caf07b0402
Author: Garland Zhang <[email protected]>
AuthorDate: Thu Jan 29 19:53:35 2026 +0100

    [SPARK-54887] Add previously removed legacy error class back in
    
    ### What changes were proposed in this pull request?
    This legacy class was removed in a recent commit 
https://github.com/apache/spark/commit/8acdc7a83803145cd0a40f631223ef6055513ec4#diff-cda1fdf98b32b5dcf475ca37f06ca8f621ac2234ad19f473f5b9121ce714f2b9
    
    ### Why are the changes needed?
    to maintain parity
    
    ### Does this PR introduce _any_ user-facing change?
    
    ### How was this patch tested?
    Added regression (and previously missing) tests on all these legacy error 
clases
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    Closes #54008 from garlandz-db/readd_old_class.
    
    Authored-by: Garland Zhang <[email protected]>
    Signed-off-by: Herman van Hövell <[email protected]>
---
 .../org/apache/spark/sql/AnalysisException.scala   | 13 ++-
 .../connect/client/SparkConnectClientSuite.scala   | 27 ++++++
 .../connect/client/GrpcExceptionConverter.scala    | 99 ++++++++++++++--------
 3 files changed, 94 insertions(+), 45 deletions(-)

diff --git 
a/sql/api/src/main/scala/org/apache/spark/sql/AnalysisException.scala 
b/sql/api/src/main/scala/org/apache/spark/sql/AnalysisException.scala
index 12eebf866a1f..0aeb7ee653d6 100644
--- a/sql/api/src/main/scala/org/apache/spark/sql/AnalysisException.scala
+++ b/sql/api/src/main/scala/org/apache/spark/sql/AnalysisException.scala
@@ -138,19 +138,16 @@ class AnalysisException protected (
       context = origin.getQueryContext,
       cause = cause)
 
-  def this(
-      message: String,
-      cause: Option[Throwable],
-      errorClass: Option[String],
+  private[sql] def this(
+      errorClass: String,
       messageParameters: Map[String, String],
       context: Array[QueryContext],
+      cause: Option[Throwable],
       sqlState: Option[String]) =
     this(
-      message = message,
-      line = None,
-      startPosition = None,
+      message = SparkThrowableHelper.getMessage(errorClass, messageParameters),
       cause = cause,
-      errorClass = errorClass,
+      errorClass = Some(errorClass),
       messageParameters = messageParameters,
       context = context,
       sqlState = sqlState)
diff --git 
a/sql/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/SparkConnectClientSuite.scala
 
b/sql/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/SparkConnectClientSuite.scala
index fc4a590716b4..1f0e7b89ddc7 100644
--- 
a/sql/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/SparkConnectClientSuite.scala
+++ 
b/sql/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/SparkConnectClientSuite.scala
@@ -257,6 +257,33 @@ class SparkConnectClientSuite extends ConnectFunSuite {
     }
   }
 
+  test("Legacy error class is set as default") {
+    Seq(
+      ("org.apache.spark.sql.AnalysisException", "_LEGACY_ERROR_TEMP_3100"),
+      ("java.lang.NumberFormatException", "_LEGACY_ERROR_TEMP_3104"),
+      ("java.lang.IllegalArgumentException", "_LEGACY_ERROR_TEMP_3105"),
+      ("java.lang.ArithmeticException", "_LEGACY_ERROR_TEMP_3106"),
+      ("java.lang.UnsupportedOperationException", "_LEGACY_ERROR_TEMP_3107"),
+      ("java.lang.ArrayIndexOutOfBoundsException", "_LEGACY_ERROR_TEMP_3108"),
+      ("java.time.DateTimeException", "_LEGACY_ERROR_TEMP_3109")).foreach {
+      case (className, legacyErrorClass) =>
+        val baseParams = GrpcExceptionConverter.ErrorParams(
+          message = "Test error message",
+          cause = None,
+          errorClass = None,
+          messageParameters = Map.empty,
+          queryContext = Array.empty,
+          sqlState = None)
+
+        val error = GrpcExceptionConverter
+          .errorFactory(className)(baseParams)
+          .asInstanceOf[SparkThrowable]
+        assert(error.asInstanceOf[Exception].getMessage.contains("Test error 
message"))
+        assert(error.getCondition == legacyErrorClass)
+        assert(error.getSqlState == "XXKCM")
+    }
+  }
+
   private case class TestPackURI(
       connectionString: String,
       isCorrect: Boolean,
diff --git 
a/sql/connect/common/src/main/scala/org/apache/spark/sql/connect/client/GrpcExceptionConverter.scala
 
b/sql/connect/common/src/main/scala/org/apache/spark/sql/connect/client/GrpcExceptionConverter.scala
index 5bcf4c9acd40..7b57f75d55ce 100644
--- 
a/sql/connect/common/src/main/scala/org/apache/spark/sql/connect/client/GrpcExceptionConverter.scala
+++ 
b/sql/connect/common/src/main/scala/org/apache/spark/sql/connect/client/GrpcExceptionConverter.scala
@@ -268,14 +268,15 @@ private[client] object GrpcExceptionConverter {
         errorClass = getErrorClassOrFallback(params),
         messageParameters = errorParamsToMessageParameters(params),
         queryContext = params.queryContext)),
-    errorConstructor(params =>
+    errorConstructor(params => {
+      val updatedParams = getParamsWithLegacyErrorClass(params, 
"_LEGACY_ERROR_TEMP_3100")
       new AnalysisException(
-        message = params.message,
-        cause = params.cause,
-        errorClass = resolveParams(params).errorClass,
-        messageParameters = errorParamsToMessageParameters(params),
-        context = params.queryContext,
-        sqlState = getSqlStateOrFallback(params))),
+        errorClass = updatedParams.errorClass.get,
+        messageParameters = updatedParams.messageParameters,
+        cause = updatedParams.cause,
+        context = updatedParams.queryContext,
+        sqlState = getSqlStateOrFallback(updatedParams))
+    }),
     errorConstructor(params =>
       new NamespaceAlreadyExistsException(
         getErrorClassOrFallback(params),
@@ -304,45 +305,57 @@ private[client] object GrpcExceptionConverter {
         getErrorClassOrFallback(params),
         errorParamsToMessageParameters(params),
         params.cause)),
-    errorConstructor[NumberFormatException](params =>
+    errorConstructor[NumberFormatException](params => {
+      val updatedParams = getParamsWithLegacyErrorClass(params, 
"_LEGACY_ERROR_TEMP_3104")
       new SparkNumberFormatException(
-        errorClass = params.errorClass.getOrElse("_LEGACY_ERROR_TEMP_3104"),
-        messageParameters = errorParamsToMessageParameters(params),
-        params.queryContext,
-        getSqlStateOrFallback(params))),
-    errorConstructor[IllegalArgumentException](params =>
+        errorClass = updatedParams.errorClass.get,
+        messageParameters = updatedParams.messageParameters,
+        updatedParams.queryContext,
+        getSqlStateOrFallback(updatedParams))
+    }),
+    errorConstructor[IllegalArgumentException](params => {
+      val updatedParams = getParamsWithLegacyErrorClass(params, 
"_LEGACY_ERROR_TEMP_3105")
       new SparkIllegalArgumentException(
-        errorClass = params.errorClass.getOrElse("_LEGACY_ERROR_TEMP_3105"),
-        messageParameters = errorParamsToMessageParameters(params),
-        params.queryContext,
+        errorClass = updatedParams.errorClass.get,
+        messageParameters = updatedParams.messageParameters,
+        updatedParams.queryContext,
         summary = "",
-        cause = params.cause.orNull,
-        getSqlStateOrFallback(params))),
-    errorConstructor[ArithmeticException](params =>
+        cause = updatedParams.cause.orNull,
+        getSqlStateOrFallback(updatedParams))
+    }),
+    errorConstructor[ArithmeticException](params => {
+      val updatedParams = getParamsWithLegacyErrorClass(params, 
"_LEGACY_ERROR_TEMP_3106")
       new SparkArithmeticException(
-        errorClass = params.errorClass.getOrElse("_LEGACY_ERROR_TEMP_3106"),
-        messageParameters = errorParamsToMessageParameters(params),
-        params.queryContext,
-        getSqlStateOrFallback(params))),
-    errorConstructor[UnsupportedOperationException](params =>
+        errorClass = updatedParams.errorClass.get,
+        messageParameters = updatedParams.messageParameters,
+        updatedParams.queryContext,
+        getSqlStateOrFallback(updatedParams))
+    }),
+    errorConstructor[UnsupportedOperationException](params => {
+      val updatedParams = getParamsWithLegacyErrorClass(params, 
"_LEGACY_ERROR_TEMP_3107")
       new SparkUnsupportedOperationException(
-        errorClass = params.errorClass.getOrElse("_LEGACY_ERROR_TEMP_3107"),
-        messageParameters = errorParamsToMessageParameters(params),
-        getSqlStateOrFallback(params))),
-    errorConstructor[ArrayIndexOutOfBoundsException](params =>
+        errorClass = updatedParams.errorClass.get,
+        messageParameters = updatedParams.messageParameters,
+        getSqlStateOrFallback(updatedParams))
+    }),
+    errorConstructor[ArrayIndexOutOfBoundsException](params => {
+      val updatedParams = getParamsWithLegacyErrorClass(params, 
"_LEGACY_ERROR_TEMP_3108")
       new SparkArrayIndexOutOfBoundsException(
-        errorClass = params.errorClass.getOrElse("_LEGACY_ERROR_TEMP_3108"),
-        messageParameters = errorParamsToMessageParameters(params),
-        params.queryContext,
-        getSqlStateOrFallback(params))),
-    errorConstructor[DateTimeException](params =>
+        errorClass = updatedParams.errorClass.get,
+        messageParameters = updatedParams.messageParameters,
+        updatedParams.queryContext,
+        getSqlStateOrFallback(updatedParams))
+    }),
+    errorConstructor[DateTimeException](params => {
+      val updatedParams = getParamsWithLegacyErrorClass(params, 
"_LEGACY_ERROR_TEMP_3109")
       new SparkDateTimeException(
-        errorClass = params.errorClass.getOrElse("_LEGACY_ERROR_TEMP_3109"),
-        messageParameters = errorParamsToMessageParameters(params),
-        params.queryContext,
+        errorClass = updatedParams.errorClass.get,
+        messageParameters = updatedParams.messageParameters,
+        updatedParams.queryContext,
         summary = "",
         cause = None,
-        getSqlStateOrFallback(params))),
+        getSqlStateOrFallback(updatedParams))
+    }),
     errorConstructor(params =>
       new SparkRuntimeException(
         getErrorClassOrFallback(params),
@@ -365,6 +378,18 @@ private[client] object GrpcExceptionConverter {
         context = params.queryContext,
         sqlState = getSqlStateOrFallback(params))))
 
+  // Explicitly deal with cases where there are fallback legacy error classes
+  private def getParamsWithLegacyErrorClass(
+      params: ErrorParams,
+      fallbackErrorClass: String): ErrorParams = {
+    if (params.errorClass.isDefined) {
+      return params
+    }
+    params.copy(
+      errorClass = Some(fallbackErrorClass),
+      messageParameters = Map("message" -> params.message))
+  }
+
   /**
    * errorsToThrowable reconstructs the exception based on a list of protobuf 
messages
    * FetchErrorDetailsResponse.Error with un-truncated error messages and 
server-side stacktrace


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to