This is an automated email from the ASF dual-hosted git repository.

gurwls223 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 9df43ac6af88 [SPARK-52216][SQL][CONNECT] Make InvalidCommandInput a 
user-facing error
9df43ac6af88 is described below

commit 9df43ac6af8819938757b55e22799e55424d1792
Author: Yihong He <[email protected]>
AuthorDate: Thu May 22 08:11:01 2025 +0900

    [SPARK-52216][SQL][CONNECT] Make InvalidCommandInput a user-facing error
    
    ### What changes were proposed in this pull request?
    
    - Specify a standardized SQLSTATE, an error condition, and message 
parameters for each InvalidCommandInput error
    
    ### Why are the changes needed?
    
    - Support new error framework
    
    ### Does this PR introduce _any_ user-facing change?
    
    No
    
    ### How was this patch tested?
    
    `build/sbt "connect/testOnly *SparkConnectProtoSuite"`
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    No
    
    Closes #50934 from heyihong/SPARK-52216.
    
    Authored-by: Yihong He <[email protected]>
    Signed-off-by: Hyukjin Kwon <[email protected]>
---
 .../utils/src/main/resources/error/error-conditions.json  |  6 ++++++
 .../spark/sql/connect/common/InvalidCommandInput.scala    | 15 +++++++++++++--
 .../spark/sql/connect/planner/InvalidInputErrors.scala    |  2 +-
 .../sql/connect/planner/SparkConnectProtoSuite.scala      |  9 ++++++---
 4 files changed, 26 insertions(+), 6 deletions(-)

diff --git a/common/utils/src/main/resources/error/error-conditions.json 
b/common/utils/src/main/resources/error/error-conditions.json
index 68234c440235..79e8ab4567f0 100644
--- a/common/utils/src/main/resources/error/error-conditions.json
+++ b/common/utils/src/main/resources/error/error-conditions.json
@@ -2386,6 +2386,12 @@
     ],
     "sqlState" : "42601"
   },
+  "INVALID_BUCKET_COUNT" : {
+    "message" : [
+      "BucketBy must specify a bucket count > 0, received <numBuckets> 
instead."
+    ],
+    "sqlState" : "22003"
+  },
   "INVALID_BUCKET_FILE" : {
     "message" : [
       "Invalid bucket file: <path>."
diff --git 
a/sql/connect/common/src/main/scala/org/apache/spark/sql/connect/common/InvalidCommandInput.scala
 
b/sql/connect/common/src/main/scala/org/apache/spark/sql/connect/common/InvalidCommandInput.scala
index 313fe7262a10..38efa547e9dc 100644
--- 
a/sql/connect/common/src/main/scala/org/apache/spark/sql/connect/common/InvalidCommandInput.scala
+++ 
b/sql/connect/common/src/main/scala/org/apache/spark/sql/connect/common/InvalidCommandInput.scala
@@ -16,10 +16,21 @@
  */
 package org.apache.spark.sql.connect.common
 
+import scala.jdk.CollectionConverters._
+
+import org.apache.spark.{SparkThrowable, SparkThrowableHelper}
+
 /**
  * Error thrown when a connect command is not valid.
  */
 final case class InvalidCommandInput(
-    private val message: String = "",
+    private val errorCondition: String,
+    private val messageParameters: Map[String, String] = Map.empty,
     private val cause: Throwable = null)
-    extends Exception(message, cause)
+    extends Exception(SparkThrowableHelper.getMessage(errorCondition, 
messageParameters), cause)
+    with SparkThrowable {
+
+  override def getCondition: String = errorCondition
+
+  override def getMessageParameters: java.util.Map[String, String] = 
messageParameters.asJava
+}
diff --git 
a/sql/connect/server/src/main/scala/org/apache/spark/sql/connect/planner/InvalidInputErrors.scala
 
b/sql/connect/server/src/main/scala/org/apache/spark/sql/connect/planner/InvalidInputErrors.scala
index 38f762af2438..dc39e0c7a6a9 100644
--- 
a/sql/connect/server/src/main/scala/org/apache/spark/sql/connect/planner/InvalidInputErrors.scala
+++ 
b/sql/connect/server/src/main/scala/org/apache/spark/sql/connect/planner/InvalidInputErrors.scala
@@ -211,7 +211,7 @@ object InvalidInputErrors {
     InvalidPlanInput("UnionByName `allowMissingCol` can be true only if 
`byName` is true.")
 
   def invalidBucketCount(numBuckets: Int): InvalidCommandInput =
-    InvalidCommandInput("BucketBy must specify a bucket count > 0, received 
$numBuckets instead.")
+    InvalidCommandInput("INVALID_BUCKET_COUNT", Map("numBuckets" -> 
numBuckets.toString))
 
   def invalidPythonUdtfReturnType(actualType: String): InvalidPlanInput =
     InvalidPlanInput(
diff --git 
a/sql/connect/server/src/test/scala/org/apache/spark/sql/connect/planner/SparkConnectProtoSuite.scala
 
b/sql/connect/server/src/test/scala/org/apache/spark/sql/connect/planner/SparkConnectProtoSuite.scala
index 494aceb2fb58..e5f19e714895 100644
--- 
a/sql/connect/server/src/test/scala/org/apache/spark/sql/connect/planner/SparkConnectProtoSuite.scala
+++ 
b/sql/connect/server/src/test/scala/org/apache/spark/sql/connect/planner/SparkConnectProtoSuite.scala
@@ -658,9 +658,12 @@ class SparkConnectProtoSuite extends PlanTest with 
SparkConnectPlanTest {
 
   test("Write with invalid bucketBy configuration") {
     val cmd = localRelation.write(bucketByCols = Seq("id"), numBuckets = 
Some(0))
-    assertThrows[InvalidCommandInput] {
-      transform(cmd)
-    }
+    checkError(
+      exception = intercept[InvalidCommandInput] {
+        transform(cmd)
+      },
+      condition = "INVALID_BUCKET_COUNT",
+      parameters = Map("numBuckets" -> "0"))
   }
 
   test("Write to Path") {


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to