Github user gatorsmile commented on a diff in the pull request:

    https://github.com/apache/spark/pull/19451#discussion_r144456109
  
    --- Diff: 
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/Optimizer.scala
 ---
    @@ -1242,6 +1244,53 @@ object ReplaceIntersectWithSemiJoin extends 
Rule[LogicalPlan] {
     }
     
     /**
    + * If one or both of the datasets in the logical [[Except]] operator are 
purely transformed using
    + * [[Filter]], this rule will replace logical [[Except]] operator with a 
[[Filter]] operator by
    + * flipping the filter condition of the right child.
    + * {{{
    + *   SELECT a1, a2 FROM Tab1 WHERE a2 = 12 EXCEPT SELECT a1, a2 FROM Tab1 
WHERE a1 = 5
    + *   ==>  SELECT DISTINCT a1, a2 FROM Tab1 WHERE a2 = 12 AND a1 <> 5
    + * }}}
    + *
    + * Note:
    + * 1. We should combine all the [[Filter]] of the right node before 
flipping it using NOT operator.
    + */
    +object ReplaceExceptWithFilter extends Rule[LogicalPlan] {
    +
    +  def apply(plan: LogicalPlan): LogicalPlan = plan transform {
    +    case Except(left, right) if isEligible(left, right) =>
    +      val filterCondition = 
combineFilters(right).asInstanceOf[Filter].condition
    +      Distinct(
    +        Filter(Not(replaceAttributesIn(filterCondition, left)), left)
    +      )
    +  }
    +
    +  def isEligible(left: LogicalPlan, right: LogicalPlan): Boolean = (left, 
right) match {
    +    case (left, right: Filter) => 
nonFilterChild(left).sameResult(nonFilterChild(right))
    +    case _ => false
    +  }
    +
    +  def nonFilterChild(plan: LogicalPlan): LogicalPlan = 
plan.find(!_.isInstanceOf[Filter]).get
    --- End diff --
    
    Although it is impossible, but please use
    ```Scala
          plan.find(!_.isInstanceOf[Filter]).getOrElse {
            throw new AnalysisException("xyz")
          } 
    ```


---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to