maropu commented on a change in pull request #29485:
URL: https://github.com/apache/spark/pull/29485#discussion_r475352241
##########
File path:
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercion.scala
##########
@@ -328,27 +328,46 @@ object TypeCoercion {
*/
object WidenSetOperationTypes extends Rule[LogicalPlan] {
- def apply(plan: LogicalPlan): LogicalPlan = plan resolveOperatorsUp {
- case s @ Except(left, right, isAll) if s.childrenResolved &&
- left.output.length == right.output.length && !s.resolved =>
- val newChildren: Seq[LogicalPlan] =
buildNewChildrenWithWiderTypes(left :: right :: Nil)
- assert(newChildren.length == 2)
- Except(newChildren.head, newChildren.last, isAll)
-
- case s @ Intersect(left, right, isAll) if s.childrenResolved &&
- left.output.length == right.output.length && !s.resolved =>
- val newChildren: Seq[LogicalPlan] =
buildNewChildrenWithWiderTypes(left :: right :: Nil)
- assert(newChildren.length == 2)
- Intersect(newChildren.head, newChildren.last, isAll)
-
- case s: Union if s.childrenResolved && !s.byName &&
+ def apply(plan: LogicalPlan): LogicalPlan = {
+ val exprIdMapArray = mutable.ArrayBuffer[(ExprId, Attribute)]()
+ val newPlan = plan resolveOperatorsUp {
+ case s @ Except(left, right, isAll) if s.childrenResolved &&
+ left.output.length == right.output.length && !s.resolved =>
+ val (newChildren, newExprIds) = buildNewChildrenWithWiderTypes(left
:: right :: Nil)
+ exprIdMapArray ++= newExprIds
+ assert(newChildren.length == 2)
+ Except(newChildren.head, newChildren.last, isAll)
+
+ case s @ Intersect(left, right, isAll) if s.childrenResolved &&
+ left.output.length == right.output.length && !s.resolved =>
+ val (newChildren, newExprIds) = buildNewChildrenWithWiderTypes(left
:: right :: Nil)
+ exprIdMapArray ++= newExprIds
+ assert(newChildren.length == 2)
+ Intersect(newChildren.head, newChildren.last, isAll)
+
+ case s: Union if s.childrenResolved && !s.byName &&
s.children.forall(_.output.length == s.children.head.output.length)
&& !s.resolved =>
- val newChildren: Seq[LogicalPlan] =
buildNewChildrenWithWiderTypes(s.children)
- s.copy(children = newChildren)
+ val (newChildren, newExprIds) =
buildNewChildrenWithWiderTypes(s.children)
+ exprIdMapArray ++= newExprIds
+ s.copy(children = newChildren)
+ }
+
+ // Re-maps existing references to the new ones (exprId and dataType)
+ // for aliases added when widening columns' data types.
Review comment:
Yea, I tried it first, but `RemoveNoopOperators` will remove a `Project`
with a rewritten alias
https://github.com/apache/spark/blob/master/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/Optimizer.scala#L480
Because it assumes projects having common exprIds have the semantic-equally
output. There may be a way to avoid the case and I'll check `TimeWindowing`.
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]