miland-db commented on code in PR #52334:
URL: https://github.com/apache/spark/pull/52334#discussion_r2407254114


##########
sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala:
##########
@@ -49,10 +52,110 @@ class SparkSqlParser extends AbstractSqlParser {
   val astBuilder = new SparkSqlAstBuilder()
 
   private val substitutor = new VariableSubstitution()
+  private[execution] val parameterHandler = new ParameterHandler()
+
+
+  // Thread-local flag to track whether we're in a top-level parse operation
+  // This is used to prevent parameter substitution during identifier/data 
type parsing
+  private val isTopLevelParse = new ThreadLocal[Boolean] {
+    override def initialValue(): Boolean = true
+  }
+
 
   protected override def parse[T](command: String)(toResult: SqlBaseParser => 
T): T = {
-    super.parse(substitutor.substitute(command))(toResult)
+    val wasTopLevel = isTopLevelParse.get()
+
+    // Step 1: Check if parameter substitution should occur
+    val (paramSubstituted, substitutionOccurred) =
+      if (SQLConf.get.legacyParameterSubstitutionConstantsOnly) {
+        // Legacy mode: skip parameter substitution
+        (command, false)
+      } else {
+        // Modern mode: check if we have a parameterized query context
+        org.apache.spark.sql.catalyst.parser.ThreadLocalParameterContext.get() 
match {
+          case Some(context) =>
+            val substituted = substituteParametersIfNeeded(command, context)
+            // Position mapper is stored by the parameter handler for later 
retrieval
+            (substituted, substituted != command) // Track if substitution 
actually occurred
+          case None =>
+            (command, false)  // No parameters to substitute
+        }
+      }
+
+    // Step 2: Apply existing variable substitution
+    val variableSubstituted = substitutor.substitute(paramSubstituted)
+
+    // Step 3: Only set special origin if parameter substitution actually 
occurred
+    val currentOrigin = org.apache.spark.sql.catalyst.trees.CurrentOrigin.get
+    val originToUse = if (substitutionOccurred && wasTopLevel) {
+      // Parameter substitution occurred - set original SQL text for position 
mapping
+      currentOrigin.copy(
+        sqlText = Some(command), // Use original SQL text, not substituted
+        startIndex = Some(0),
+        stopIndex = Some(command.length - 1)
+      )
+    } else {
+      // No substitution or nested call - use existing origin unchanged
+      currentOrigin
+    }
+
+    org.apache.spark.sql.catalyst.trees.CurrentOrigin.withOrigin(originToUse) {

Review Comment:
   There are multiple places in code where fully qualified name is used instead 
of importing it. Maybe we can improve that.



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to