maropu commented on a change in pull request #29485:
URL: https://github.com/apache/spark/pull/29485#discussion_r475371685



##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercion.scala
##########
@@ -328,27 +328,46 @@ object TypeCoercion {
    */
   object WidenSetOperationTypes extends Rule[LogicalPlan] {
 
-    def apply(plan: LogicalPlan): LogicalPlan = plan resolveOperatorsUp {
-      case s @ Except(left, right, isAll) if s.childrenResolved &&
-        left.output.length == right.output.length && !s.resolved =>
-        val newChildren: Seq[LogicalPlan] = 
buildNewChildrenWithWiderTypes(left :: right :: Nil)
-        assert(newChildren.length == 2)
-        Except(newChildren.head, newChildren.last, isAll)
-
-      case s @ Intersect(left, right, isAll) if s.childrenResolved &&
-        left.output.length == right.output.length && !s.resolved =>
-        val newChildren: Seq[LogicalPlan] = 
buildNewChildrenWithWiderTypes(left :: right :: Nil)
-        assert(newChildren.length == 2)
-        Intersect(newChildren.head, newChildren.last, isAll)
-
-      case s: Union if s.childrenResolved && !s.byName &&
+    def apply(plan: LogicalPlan): LogicalPlan = {
+      val exprIdMapArray = mutable.ArrayBuffer[(ExprId, Attribute)]()
+      val newPlan = plan resolveOperatorsUp {
+        case s @ Except(left, right, isAll) if s.childrenResolved &&
+          left.output.length == right.output.length && !s.resolved =>
+          val (newChildren, newExprIds) = buildNewChildrenWithWiderTypes(left 
:: right :: Nil)
+          exprIdMapArray ++= newExprIds
+          assert(newChildren.length == 2)
+          Except(newChildren.head, newChildren.last, isAll)
+
+        case s @ Intersect(left, right, isAll) if s.childrenResolved &&
+          left.output.length == right.output.length && !s.resolved =>
+          val (newChildren, newExprIds) = buildNewChildrenWithWiderTypes(left 
:: right :: Nil)
+          exprIdMapArray ++= newExprIds
+          assert(newChildren.length == 2)
+          Intersect(newChildren.head, newChildren.last, isAll)
+
+        case s: Union if s.childrenResolved && !s.byName &&
           s.children.forall(_.output.length == s.children.head.output.length) 
&& !s.resolved =>
-        val newChildren: Seq[LogicalPlan] = 
buildNewChildrenWithWiderTypes(s.children)
-        s.copy(children = newChildren)
+          val (newChildren, newExprIds) = 
buildNewChildrenWithWiderTypes(s.children)
+          exprIdMapArray ++= newExprIds
+          s.copy(children = newChildren)
+      }
+
+      // Re-maps existing references to the new ones (exprId and dataType)
+      // for aliases added when widening columns' data types.

Review comment:
       In case of `TimeWindowing`, it seems `RemoveNoopOperators` cannot remove 
a project having the same exprIds because the output numbers are different 
before/after transformation (L3622).
   
https://github.com/apache/spark/blob/11c6a23c13745a61c1a1cfc82e4f1ac95eaaa04a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala#L3610-L3623
   
   Looked around the related code though, I couldn't find a solution to avoid 
the case. Any suggestion?




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
[email protected]



---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to