This is an automated email from the ASF dual-hosted git repository.

gengliang pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new dbb1437d8734 [SPARK-55202] Fix UNEXPECTED_USE_OF_PARAMETER_MARKER when 
using param…
dbb1437d8734 is described below

commit dbb1437d873470fd7ec1efa1fa9c2423a72397a2
Author: Serge Rielau <[email protected]>
AuthorDate: Tue Jan 27 11:37:09 2026 -0800

    [SPARK-55202] Fix UNEXPECTED_USE_OF_PARAMETER_MARKER when using param…
    
    …eter marke in spark-sql
    
    ### What changes were proposed in this pull request?
    
    Ensure pre-parser for parameter markers is called by spark-sql CLI
    
    ### Why are the changes needed?
    
    Without invoking the pre-parser, the presence of a parameter marker results 
in an internal error (UNEXPECTED_USE_OF_PARAMETER_MARKER).
    By calling the pre-parser, this case is correctly detected and reported as 
UNBOUND_PARAMETER_MARKER, which is the expected behavior.
    
    ### Does this PR introduce _any_ user-facing change?
    
    No
    
    ### How was this patch tested?
    
    New test added to CLI suite
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    Clause Sonnet
    
    Closes #53982 from srielau/SPARK-55202-UNEXPECTED_USE_OF_PARAMETER_MARKER.
    
    Lead-authored-by: Serge Rielau <[email protected]>
    Co-authored-by: Serge Rielau <[email protected]>
    Signed-off-by: Gengliang Wang <[email protected]>
---
 .../apache/spark/sql/hive/thriftserver/SparkSQLDriver.scala |  8 +++++++-
 .../org/apache/spark/sql/hive/thriftserver/CliSuite.scala   | 13 +++++++++++++
 2 files changed, 20 insertions(+), 1 deletion(-)

diff --git 
a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLDriver.scala
 
b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLDriver.scala
index baa8f8cc07cf..2040f8f565a2 100644
--- 
a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLDriver.scala
+++ 
b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLDriver.scala
@@ -29,6 +29,7 @@ import org.apache.spark.SparkThrowable
 import org.apache.spark.internal.Logging
 import org.apache.spark.internal.LogKeys.COMMAND
 import org.apache.spark.sql.SparkSession
+import org.apache.spark.sql.catalyst.parser.NamedParameterContext
 import org.apache.spark.sql.catalyst.plans.logical.CommandResult
 import org.apache.spark.sql.execution.{QueryExecution, 
QueryExecutionException, SQLExecution}
 import org.apache.spark.sql.execution.HiveResult.hiveResultString
@@ -67,7 +68,12 @@ private[hive] class SparkSQLDriver(val sparkSession: 
SparkSession = SparkSQLEnv.
       }
       sparkSession.sparkContext.setJobDescription(substitutorCommand)
 
-      val logicalPlan = 
sparkSession.sessionState.sqlParser.parsePlan(substitutorCommand)
+      // Parse with an empty parameter context to enable pre-parsing phase 
that scans for
+      // parameter markers. If any parameter markers (:name or ?) are found in 
the SQL,
+      // the pre-parser will throw UNBOUND_SQL_PARAMETER with proper position 
information.
+      val emptyParamContext = NamedParameterContext(Map.empty)
+      val logicalPlan = 
sparkSession.sessionState.sqlParser.parsePlanWithParameters(
+        substitutorCommand, emptyParamContext)
       val conf = sparkSession.sessionState.conf
 
       val shuffleCleanupMode =
diff --git 
a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/CliSuite.scala
 
b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/CliSuite.scala
index 6a00eae0e478..8278ab14dd68 100644
--- 
a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/CliSuite.scala
+++ 
b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/CliSuite.scala
@@ -863,4 +863,17 @@ class CliSuite extends SparkFunSuite {
       extraArgs = "--conf" :: 
s"spark.plugins=${classOf[RedirectConsolePlugin].getName}" :: Nil)(
       "SELECT 1;" -> "1")
   }
+
+  test("unbound parameter markers in CLI are detected and reported") {
+    // Test that parameter markers without parameters are properly detected in 
spark-sql CLI
+    // and throw UNBOUND_SQL_PARAMETER error instead of internal errors.
+    // This guards against regression where SparkSQLDriver wasn't using 
pre-parser.
+    runCliWithin(
+      2.minute,
+      errorResponses = Seq("UNBOUND_SQL_PARAMETER"))(
+      "SELECT :param;" -> "param",
+      "SELECT 'hello' :parm;" -> "parm",
+      "SELECT ?;" -> ""
+    )
+  }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to