This is an automated email from the ASF dual-hosted git repository.

gurwls223 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new fa8f4b1edd6 [SPARK-45609][CONNECT] Include SqlState in SparkThrowable 
proto message
fa8f4b1edd6 is described below

commit fa8f4b1edd6715cbfcc075178ba0a673c3d919f0
Author: Yihong He <heyihong...@gmail.com>
AuthorDate: Fri Oct 20 17:24:15 2023 +0900

    [SPARK-45609][CONNECT] Include SqlState in SparkThrowable proto message
    
    ### What changes were proposed in this pull request?
    
    - Include SqlState in SparkThrowable proto message
    
    ### Why are the changes needed?
    
    - Better integration with the error framework
    
    ### Does this PR introduce _any_ user-facing change?
    
    - No
    
    ### How was this patch tested?
    
    `build/sbt "connect/testOnly *FetchErrorDetailsHandlerSuite"`
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    Closes #43457 from heyihong/SPARK-45609.
    
    Authored-by: Yihong He <heyihong...@gmail.com>
    Signed-off-by: Hyukjin Kwon <gurwls...@apache.org>
---
 .../src/main/protobuf/spark/connect/base.proto     |  4 ++++
 .../spark/sql/connect/utils/ErrorUtils.scala       |  3 +++
 .../service/FetchErrorDetailsHandlerSuite.scala    | 21 +++++++++++++++++++
 python/pyspark/sql/connect/proto/base_pb2.py       | 18 ++++++++--------
 python/pyspark/sql/connect/proto/base_pb2.pyi      | 24 +++++++++++++++++++++-
 5 files changed, 60 insertions(+), 10 deletions(-)

diff --git 
a/connector/connect/common/src/main/protobuf/spark/connect/base.proto 
b/connector/connect/common/src/main/protobuf/spark/connect/base.proto
index 27351227222..e2532cfc66d 100644
--- a/connector/connect/common/src/main/protobuf/spark/connect/base.proto
+++ b/connector/connect/common/src/main/protobuf/spark/connect/base.proto
@@ -852,6 +852,10 @@ message FetchErrorDetailsResponse {
 
     // The query context of a SparkThrowable.
     repeated QueryContext query_contexts = 3;
+
+    // Portable error identifier across SQL engines
+    // If null, error class or SQLSTATE is not set.
+    optional string sql_state = 4;
   }
 
   // Error defines the schema for the representing exception.
diff --git 
a/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/utils/ErrorUtils.scala
 
b/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/utils/ErrorUtils.scala
index 875b5bd5b9c..f49b81dda8d 100644
--- 
a/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/utils/ErrorUtils.scala
+++ 
b/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/utils/ErrorUtils.scala
@@ -126,6 +126,9 @@ private[connect] object ErrorUtils extends Logging {
                 .setFragment(queryCtx.fragment())
                 .build())
           }
+          if (sparkThrowable.getSqlState != null) {
+            sparkThrowableBuilder.setSqlState(sparkThrowable.getSqlState)
+          }
           
sparkThrowableBuilder.putAllMessageParameters(sparkThrowable.getMessageParameters)
           builder.setSparkThrowable(sparkThrowableBuilder.build())
         case _ =>
diff --git 
a/connector/connect/server/src/test/scala/org/apache/spark/sql/connect/service/FetchErrorDetailsHandlerSuite.scala
 
b/connector/connect/server/src/test/scala/org/apache/spark/sql/connect/service/FetchErrorDetailsHandlerSuite.scala
index 7633fa7df5d..1b908cab567 100644
--- 
a/connector/connect/server/src/test/scala/org/apache/spark/sql/connect/service/FetchErrorDetailsHandlerSuite.scala
+++ 
b/connector/connect/server/src/test/scala/org/apache/spark/sql/connect/service/FetchErrorDetailsHandlerSuite.scala
@@ -20,11 +20,13 @@ import java.util.UUID
 
 import scala.concurrent.Promise
 import scala.concurrent.duration._
+import scala.util.Try
 
 import io.grpc.stub.StreamObserver
 
 import org.apache.spark.connect.proto
 import org.apache.spark.connect.proto.FetchErrorDetailsResponse
+import org.apache.spark.sql.AnalysisException
 import org.apache.spark.sql.connect.ResourceHelper
 import org.apache.spark.sql.connect.config.Connect
 import org.apache.spark.sql.connect.utils.ErrorUtils
@@ -188,4 +190,23 @@ class FetchErrorDetailsHandlerSuite extends 
SharedSparkSession with ResourceHelp
     assert(response.getErrors(0).getStackTraceCount > 0)
     assert(!response.getErrors(0).getStackTrace(0).hasFileName)
   }
+
+  test("error framework parameters are set") {
+    val testError = Try(spark.sql("select 
x")).failed.get.asInstanceOf[AnalysisException]
+    val errorId = UUID.randomUUID().toString()
+
+    SparkConnectService
+      .getOrCreateIsolatedSession(userId, sessionId)
+      .errorIdToError
+      .put(errorId, testError)
+
+    val response = fetchErrorDetails(userId, sessionId, errorId)
+    assert(response.hasRootErrorIdx)
+    assert(response.getRootErrorIdx == 0)
+
+    val sparkThrowableProto = response.getErrors(0).getSparkThrowable
+    assert(sparkThrowableProto.getErrorClass == testError.errorClass.get)
+    assert(sparkThrowableProto.getMessageParametersMap == 
testError.getMessageParameters)
+    assert(sparkThrowableProto.getSqlState == testError.getSqlState)
+  }
 }
diff --git a/python/pyspark/sql/connect/proto/base_pb2.py 
b/python/pyspark/sql/connect/proto/base_pb2.py
index 77c660c36c8..bc9272772a8 100644
--- a/python/pyspark/sql/connect/proto/base_pb2.py
+++ b/python/pyspark/sql/connect/proto/base_pb2.py
@@ -37,7 +37,7 @@ from pyspark.sql.connect.proto import types_pb2 as 
spark_dot_connect_dot_types__
 
 
 DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
-    
b'\n\x18spark/connect/base.proto\x12\rspark.connect\x1a\x19google/protobuf/any.proto\x1a\x1cspark/connect/commands.proto\x1a\x1aspark/connect/common.proto\x1a\x1fspark/connect/expressions.proto\x1a\x1dspark/connect/relations.proto\x1a\x19spark/connect/types.proto"t\n\x04Plan\x12-\n\x04root\x18\x01
 
\x01(\x0b\x32\x17.spark.connect.RelationH\x00R\x04root\x12\x32\n\x07\x63ommand\x18\x02
 
\x01(\x0b\x32\x16.spark.connect.CommandH\x00R\x07\x63ommandB\t\n\x07op_type"z\n\x0bUserContext\x12\x17
 [...]
+    
b'\n\x18spark/connect/base.proto\x12\rspark.connect\x1a\x19google/protobuf/any.proto\x1a\x1cspark/connect/commands.proto\x1a\x1aspark/connect/common.proto\x1a\x1fspark/connect/expressions.proto\x1a\x1dspark/connect/relations.proto\x1a\x19spark/connect/types.proto"t\n\x04Plan\x12-\n\x04root\x18\x01
 
\x01(\x0b\x32\x17.spark.connect.RelationH\x00R\x04root\x12\x32\n\x07\x63ommand\x18\x02
 
\x01(\x0b\x32\x16.spark.connect.CommandH\x00R\x07\x63ommandB\t\n\x07op_type"z\n\x0bUserContext\x12\x17
 [...]
 )
 
 _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
@@ -202,17 +202,17 @@ if _descriptor._USE_C_DESCRIPTORS == False:
     _FETCHERRORDETAILSREQUEST._serialized_start = 11358
     _FETCHERRORDETAILSREQUEST._serialized_end = 11559
     _FETCHERRORDETAILSRESPONSE._serialized_start = 11562
-    _FETCHERRORDETAILSRESPONSE._serialized_end = 12789
+    _FETCHERRORDETAILSRESPONSE._serialized_end = 12837
     _FETCHERRORDETAILSRESPONSE_STACKTRACEELEMENT._serialized_start = 11707
     _FETCHERRORDETAILSRESPONSE_STACKTRACEELEMENT._serialized_end = 11881
     _FETCHERRORDETAILSRESPONSE_QUERYCONTEXT._serialized_start = 11884
     _FETCHERRORDETAILSRESPONSE_QUERYCONTEXT._serialized_end = 12056
     _FETCHERRORDETAILSRESPONSE_SPARKTHROWABLE._serialized_start = 12059
-    _FETCHERRORDETAILSRESPONSE_SPARKTHROWABLE._serialized_end = 12420
-    
_FETCHERRORDETAILSRESPONSE_SPARKTHROWABLE_MESSAGEPARAMETERSENTRY._serialized_start
 = 12336
-    
_FETCHERRORDETAILSRESPONSE_SPARKTHROWABLE_MESSAGEPARAMETERSENTRY._serialized_end
 = 12404
-    _FETCHERRORDETAILSRESPONSE_ERROR._serialized_start = 12423
-    _FETCHERRORDETAILSRESPONSE_ERROR._serialized_end = 12770
-    _SPARKCONNECTSERVICE._serialized_start = 12792
-    _SPARKCONNECTSERVICE._serialized_end = 13641
+    _FETCHERRORDETAILSRESPONSE_SPARKTHROWABLE._serialized_end = 12468
+    
_FETCHERRORDETAILSRESPONSE_SPARKTHROWABLE_MESSAGEPARAMETERSENTRY._serialized_start
 = 12370
+    
_FETCHERRORDETAILSRESPONSE_SPARKTHROWABLE_MESSAGEPARAMETERSENTRY._serialized_end
 = 12438
+    _FETCHERRORDETAILSRESPONSE_ERROR._serialized_start = 12471
+    _FETCHERRORDETAILSRESPONSE_ERROR._serialized_end = 12818
+    _SPARKCONNECTSERVICE._serialized_start = 12840
+    _SPARKCONNECTSERVICE._serialized_end = 13689
 # @@protoc_insertion_point(module_scope)
diff --git a/python/pyspark/sql/connect/proto/base_pb2.pyi 
b/python/pyspark/sql/connect/proto/base_pb2.pyi
index b1b09b1a2c2..0ad295dbe08 100644
--- a/python/pyspark/sql/connect/proto/base_pb2.pyi
+++ b/python/pyspark/sql/connect/proto/base_pb2.pyi
@@ -2947,6 +2947,7 @@ class 
FetchErrorDetailsResponse(google.protobuf.message.Message):
         ERROR_CLASS_FIELD_NUMBER: builtins.int
         MESSAGE_PARAMETERS_FIELD_NUMBER: builtins.int
         QUERY_CONTEXTS_FIELD_NUMBER: builtins.int
+        SQL_STATE_FIELD_NUMBER: builtins.int
         error_class: builtins.str
         """Succinct, human-readable, unique, and consistent representation of 
the error category."""
         @property
@@ -2961,6 +2962,10 @@ class 
FetchErrorDetailsResponse(google.protobuf.message.Message):
             global___FetchErrorDetailsResponse.QueryContext
         ]:
             """The query context of a SparkThrowable."""
+        sql_state: builtins.str
+        """Portable error identifier across SQL engines
+        If null, error class or SQLSTATE is not set.
+        """
         def __init__(
             self,
             *,
@@ -2970,11 +2975,19 @@ class 
FetchErrorDetailsResponse(google.protobuf.message.Message):
                 global___FetchErrorDetailsResponse.QueryContext
             ]
             | None = ...,
+            sql_state: builtins.str | None = ...,
         ) -> None: ...
         def HasField(
             self,
             field_name: typing_extensions.Literal[
-                "_error_class", b"_error_class", "error_class", b"error_class"
+                "_error_class",
+                b"_error_class",
+                "_sql_state",
+                b"_sql_state",
+                "error_class",
+                b"error_class",
+                "sql_state",
+                b"sql_state",
             ],
         ) -> builtins.bool: ...
         def ClearField(
@@ -2982,17 +2995,26 @@ class 
FetchErrorDetailsResponse(google.protobuf.message.Message):
             field_name: typing_extensions.Literal[
                 "_error_class",
                 b"_error_class",
+                "_sql_state",
+                b"_sql_state",
                 "error_class",
                 b"error_class",
                 "message_parameters",
                 b"message_parameters",
                 "query_contexts",
                 b"query_contexts",
+                "sql_state",
+                b"sql_state",
             ],
         ) -> None: ...
+        @typing.overload
         def WhichOneof(
             self, oneof_group: typing_extensions.Literal["_error_class", 
b"_error_class"]
         ) -> typing_extensions.Literal["error_class"] | None: ...
+        @typing.overload
+        def WhichOneof(
+            self, oneof_group: typing_extensions.Literal["_sql_state", 
b"_sql_state"]
+        ) -> typing_extensions.Literal["sql_state"] | None: ...
 
     class Error(google.protobuf.message.Message):
         """Error defines the schema for the representing exception."""


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to