This is an automated email from the ASF dual-hosted git repository.

wenchen pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 21b1d11ddd6a [SPARK-53385][SQL] Refactor Identifier evaluation out
21b1d11ddd6a is described below

commit 21b1d11ddd6a229714dfd0db2828a1f80ca11a70
Author: Vladimir Golubev <vladimir.golu...@databricks.com>
AuthorDate: Wed Aug 27 14:54:27 2025 +0800

    [SPARK-53385][SQL] Refactor Identifier evaluation out
    
    ### What changes were proposed in this pull request?
    
    Refactor Identifier evaluation out.
    
    ### Why are the changes needed?
    
    To have compatibility between Analyzers.
    
    ### Does this PR introduce _any_ user-facing change?
    
    No.
    
    ### How was this patch tested?
    
    Existing tests.
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    No.
    
    Closes #52126 from 
vladimirg-db/vladimir-golubev_data/refactor-identifier-evaluation-out.
    
    Authored-by: Vladimir Golubev <vladimir.golu...@databricks.com>
    Signed-off-by: Wenchen Fan <wenc...@databricks.com>
---
 .../catalyst/analysis/IdentifierResolution.scala   | 51 ++++++++++++++++++++++
 .../analysis/ResolveIdentifierClause.scala         | 39 +++--------------
 2 files changed, 57 insertions(+), 33 deletions(-)

diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/IdentifierResolution.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/IdentifierResolution.scala
new file mode 100644
index 000000000000..e274c1e2162a
--- /dev/null
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/IdentifierResolution.scala
@@ -0,0 +1,51 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.catalyst.analysis
+
+import org.apache.spark.sql.catalyst.expressions.{AliasHelper, EvalHelper, 
Expression}
+import org.apache.spark.sql.catalyst.parser.CatalystSqlParser
+import org.apache.spark.sql.types.StringType
+
+object IdentifierResolution extends AliasHelper with EvalHelper {
+  def evalIdentifierExpr(expr: Expression): Seq[String] = {
+    trimAliases(prepareForEval(expr)) match {
+      case e if !e.foldable =>
+        expr.failAnalysis(
+          errorClass = "NOT_A_CONSTANT_STRING.NOT_CONSTANT",
+          messageParameters = Map("name" -> "IDENTIFIER", "expr" -> expr.sql)
+        )
+      case e if e.dataType != StringType =>
+        expr.failAnalysis(
+          errorClass = "NOT_A_CONSTANT_STRING.WRONG_TYPE",
+          messageParameters =
+            Map("name" -> "IDENTIFIER", "expr" -> expr.sql, "dataType" -> 
e.dataType.catalogString)
+        )
+      case e =>
+        e.eval() match {
+          case null =>
+            expr.failAnalysis(
+              errorClass = "NOT_A_CONSTANT_STRING.NULL",
+              messageParameters = Map("name" -> "IDENTIFIER", "expr" -> 
expr.sql)
+            )
+          case other =>
+            // Parse the identifier string to name parts.
+            CatalystSqlParser.parseMultipartIdentifier(other.toString)
+        }
+    }
+  }
+}
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveIdentifierClause.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveIdentifierClause.scala
index 96ed8a63585d..7150c81ad64e 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveIdentifierClause.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveIdentifierClause.scala
@@ -19,20 +19,18 @@ package org.apache.spark.sql.catalyst.analysis
 
 import scala.collection.mutable
 
-import org.apache.spark.sql.catalyst.expressions.{AliasHelper, EvalHelper, 
Expression, SubqueryExpression, VariableReference}
-import org.apache.spark.sql.catalyst.parser.CatalystSqlParser
+import org.apache.spark.sql.catalyst.expressions.{Expression, 
SubqueryExpression, VariableReference}
 import org.apache.spark.sql.catalyst.plans.logical.{CreateView, LogicalPlan}
 import org.apache.spark.sql.catalyst.rules.{Rule, RuleExecutor}
 import org.apache.spark.sql.catalyst.trees.TreePattern._
 import org.apache.spark.sql.errors.QueryCompilationErrors
 import org.apache.spark.sql.internal.SQLConf
-import org.apache.spark.sql.types.StringType
 
 /**
  * Resolves the identifier expressions and builds the original 
plans/expressions.
  */
 class ResolveIdentifierClause(earlyBatches: 
Seq[RuleExecutor[LogicalPlan]#Batch])
-  extends Rule[LogicalPlan] with AliasHelper with EvalHelper {
+  extends Rule[LogicalPlan] {
 
   private val executor = new RuleExecutor[LogicalPlan] {
     override def batches: Seq[Batch] = earlyBatches.asInstanceOf[Seq[Batch]]
@@ -70,7 +68,8 @@ class ResolveIdentifierClause(earlyBatches: 
Seq[RuleExecutor[LogicalPlan]#Batch]
           referredTempVars.get ++= collectTemporaryVariablesInLogicalPlan(p)
         }
 
-        
executor.execute(p.planBuilder.apply(evalIdentifierExpr(p.identifierExpr), 
p.children))
+        executor.execute(p.planBuilder.apply(
+          IdentifierResolution.evalIdentifierExpr(p.identifierExpr), 
p.children))
       case other =>
         
other.transformExpressionsWithPruning(_.containsAnyPattern(UNRESOLVED_IDENTIFIER))
 {
           case e: ExpressionWithUnresolvedIdentifier if 
e.identifierExpr.resolved =>
@@ -79,7 +78,8 @@ class ResolveIdentifierClause(earlyBatches: 
Seq[RuleExecutor[LogicalPlan]#Batch]
               referredTempVars.get ++= 
collectTemporaryVariablesInExpressionTree(e)
             }
 
-            e.exprBuilder.apply(evalIdentifierExpr(e.identifierExpr), 
e.otherExprs)
+            e.exprBuilder.apply(
+              IdentifierResolution.evalIdentifierExpr(e.identifierExpr), 
e.otherExprs)
         }
     }
 
@@ -104,31 +104,4 @@ class ResolveIdentifierClause(earlyBatches: 
Seq[RuleExecutor[LogicalPlan]#Batch]
     }
     collectTempVars(child)
   }
-
-  private def evalIdentifierExpr(expr: Expression): Seq[String] = {
-    trimAliases(prepareForEval(expr)) match {
-      case e if !e.foldable => expr.failAnalysis(
-        errorClass = "NOT_A_CONSTANT_STRING.NOT_CONSTANT",
-        messageParameters = Map(
-          "name" -> "IDENTIFIER",
-          "expr" -> expr.sql))
-      case e if e.dataType != StringType => expr.failAnalysis(
-        errorClass = "NOT_A_CONSTANT_STRING.WRONG_TYPE",
-        messageParameters = Map(
-          "name" -> "IDENTIFIER",
-          "expr" -> expr.sql,
-          "dataType" -> e.dataType.catalogString))
-      case e =>
-        e.eval() match {
-          case null => expr.failAnalysis(
-            errorClass = "NOT_A_CONSTANT_STRING.NULL",
-            messageParameters = Map(
-              "name" -> "IDENTIFIER",
-              "expr" -> expr.sql))
-          case other =>
-            // Parse the identifier string to name parts.
-            CatalystSqlParser.parseMultipartIdentifier(other.toString)
-        }
-    }
-  }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to