cloud-fan commented on a change in pull request #29485:
URL: https://github.com/apache/spark/pull/29485#discussion_r475346278



##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercion.scala
##########
@@ -328,27 +328,46 @@ object TypeCoercion {
    */
   object WidenSetOperationTypes extends Rule[LogicalPlan] {
 
-    def apply(plan: LogicalPlan): LogicalPlan = plan resolveOperatorsUp {
-      case s @ Except(left, right, isAll) if s.childrenResolved &&
-        left.output.length == right.output.length && !s.resolved =>
-        val newChildren: Seq[LogicalPlan] = 
buildNewChildrenWithWiderTypes(left :: right :: Nil)
-        assert(newChildren.length == 2)
-        Except(newChildren.head, newChildren.last, isAll)
-
-      case s @ Intersect(left, right, isAll) if s.childrenResolved &&
-        left.output.length == right.output.length && !s.resolved =>
-        val newChildren: Seq[LogicalPlan] = 
buildNewChildrenWithWiderTypes(left :: right :: Nil)
-        assert(newChildren.length == 2)
-        Intersect(newChildren.head, newChildren.last, isAll)
-
-      case s: Union if s.childrenResolved && !s.byName &&
+    def apply(plan: LogicalPlan): LogicalPlan = {
+      val exprIdMapArray = mutable.ArrayBuffer[(ExprId, Attribute)]()
+      val newPlan = plan resolveOperatorsUp {
+        case s @ Except(left, right, isAll) if s.childrenResolved &&
+          left.output.length == right.output.length && !s.resolved =>
+          val (newChildren, newExprIds) = buildNewChildrenWithWiderTypes(left 
:: right :: Nil)
+          exprIdMapArray ++= newExprIds
+          assert(newChildren.length == 2)
+          Except(newChildren.head, newChildren.last, isAll)
+
+        case s @ Intersect(left, right, isAll) if s.childrenResolved &&
+          left.output.length == right.output.length && !s.resolved =>
+          val (newChildren, newExprIds) = buildNewChildrenWithWiderTypes(left 
:: right :: Nil)
+          exprIdMapArray ++= newExprIds
+          assert(newChildren.length == 2)
+          Intersect(newChildren.head, newChildren.last, isAll)
+
+        case s: Union if s.childrenResolved && !s.byName &&
           s.children.forall(_.output.length == s.children.head.output.length) 
&& !s.resolved =>
-        val newChildren: Seq[LogicalPlan] = 
buildNewChildrenWithWiderTypes(s.children)
-        s.copy(children = newChildren)
+          val (newChildren, newExprIds) = 
buildNewChildrenWithWiderTypes(s.children)
+          exprIdMapArray ++= newExprIds
+          s.copy(children = newChildren)
+      }
+
+      // Re-maps existing references to the new ones (exprId and dataType)
+      // for aliases added when widening columns' data types.

Review comment:
       Another common way to solve this issue is to create an `Alias` with the 
existing exprId, so that we don't need to rewrite the parent nodes.
   
   I think it's safer than rewriting the parent nodes. We rewrite parent nodes 
in `ResolveReferences.dedupRight`, we hit bugs and end up with a complicated 
solution.




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
[email protected]



---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to