Github user hvanhovell commented on a diff in the pull request: https://github.com/apache/spark/pull/11106#discussion_r52272371 --- Diff: sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/Optimizer.scala --- @@ -1059,19 +1059,24 @@ object ReplaceDistinctWithAggregate extends Rule[LogicalPlan] { * {{{ * SELECT a1, a2 FROM Tab1 INTERSECT SELECT b1, b2 FROM Tab2 * ==> SELECT DISTINCT a1, a2 FROM Tab1 LEFT SEMI JOIN Tab2 ON a1<=>b1 AND a2<=>b2 + * SELECT a1, a2 FROM Tab1 INTERSECT ALL SELECT b1, b2 FROM Tab2 + * ==> SELECT a1, a2 FROM Tab1 LEFT SEMI JOIN Tab2 ON a1<=>b1 AND a2<=>b2 * }}} * - * Note: - * 1. This rule is only applicable to INTERSECT DISTINCT. Do not use it for INTERSECT ALL. - * 2. This rule has to be done after de-duplicating the attributes; otherwise, the generated + * Note: This rule has to be done after de-duplicating the attributes; otherwise, the generated * join conditions will be incorrect. */ object ReplaceIntersectWithSemiJoin extends Rule[LogicalPlan] { def apply(plan: LogicalPlan): LogicalPlan = plan transform { - case Intersect(left, right) => + case Intersect(left, right, distinct) => assert(left.output.size == right.output.size) - val joinCond = left.output.zip(right.output).map { case (l, r) => EqualNullSafe(l, r) } - Distinct(Join(left, right, LeftSemi, joinCond.reduceLeftOption(And))) + val joinCond = left.output.zip(right.output).map(EqualNullSafe.tupled) + if (distinct) { + Distinct(Join(left, right, LeftSemi, joinCond.reduceLeftOption(And))) --- End diff -- Minor: you can move the `joinCond.reduceLeftOption(And)` into the construction of the `joinCond`.
--- If your project is set up for it, you can reply to this email and have your reply appear on GitHub as well. If your project does not have this feature enabled and wishes so, or if the feature is enabled but not working, please contact infrastructure at infrastruct...@apache.org or file a JIRA ticket with INFRA. --- --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org