gengliangwang commented on code in PR #41385:
URL: https://github.com/apache/spark/pull/41385#discussion_r1217278997


##########
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveIdentifierClause.scala:
##########
@@ -18,39 +18,51 @@
 package org.apache.spark.sql.catalyst.analysis
 
 import org.apache.spark.sql.catalyst.expressions.Expression
+import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
+import org.apache.spark.sql.catalyst.rules.Rule
+import org.apache.spark.sql.catalyst.trees.TreePattern.UNRESOLVED_IDENTIFIER
 import org.apache.spark.sql.types.StringType
 
 /**
- * Resolves the catalog of the name parts for table/view/function/namespace.
+ * Resolves the identifier expressions and builds the original 
plans/expressions.
  */
-object IdentifierClauseUtil {
-  private def getNotNullFoldableString(clauseName: String, expr: Expression): 
String = {
+object ResolveIdentifierClause extends Rule[LogicalPlan] {
+
+  override def apply(plan: LogicalPlan): LogicalPlan = 
plan.resolveOperatorsUpWithPruning(
+    _.containsAnyPattern(UNRESOLVED_IDENTIFIER)) {
+    case p: PlanWithUnresolvedIdentifier if p.identifierExpr.resolved =>
+      p.planBuilder.apply(evalIdentifierExpr(p.identifierExpr))
+    case other =>
+      
other.transformExpressionsWithPruning(_.containsAnyPattern(UNRESOLVED_IDENTIFIER))
 {
+        case e: ExpressionWithUnresolvedIdentifier if 
e.identifierExpr.resolved =>
+          e.exprBuilder.apply(evalIdentifierExpr(e.identifierExpr))
+      }
+  }
+
+  private def evalIdentifierExpr(expr: Expression): Seq[String] = {
     expr match {
       case e if !e.foldable => expr.failAnalysis(
         errorClass = "NOT_A_CONSTANT_STRING.NOT_CONSTANT",
         messageParameters = Map(
-          "name" -> clauseName,
+          "name" -> "IDENTIFIER",
           "expr" -> expr.sql))
       case e if e.dataType != StringType => expr.failAnalysis(
         errorClass = "NOT_A_CONSTANT_STRING.WRONG_TYPE",
         messageParameters = Map(
-          "name" -> clauseName,
+          "name" -> "IDENTIFIER",
           "expr" -> expr.sql,
           "dataType" -> e.dataType.catalogString))
       case e =>
         e.eval() match {
           case null => expr.failAnalysis(
             errorClass = "NOT_A_CONSTANT_STRING.NULL",
             messageParameters = Map(
-              "name" -> clauseName,
+              "name" -> "IDENTIFIER",
               "expr" -> expr.sql))
-          case other => other.toString // OK
+          case other =>
+            // Parse the identifier string to name parts.
+            UnresolvedAttribute(other.toString).nameParts

Review Comment:
   This one is a bit confusing. Shall we just use 
   ```
   CatalystSqlParser.parseMultipartIdentifier(name)
   ```



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to