maropu commented on a change in pull request #28733:
URL: https://github.com/apache/spark/pull/28733#discussion_r438150400



##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/predicates.scala
##########
@@ -198,6 +200,97 @@ trait PredicateHelper {
     case e: Unevaluable => false
     case e => e.children.forall(canEvaluateWithinJoin)
   }
+
+  /**
+   * Convert an expression into conjunctive normal form.
+   * Definition and algorithm: 
https://en.wikipedia.org/wiki/Conjunctive_normal_form
+   * CNF can explode exponentially in the size of the input expression when 
converting Or clauses.
+   * Use a configuration MAX_CNF_NODE_COUNT to prevent such cases.
+   *
+   * @param condition to be conversed into CNF.

Review comment:
       nit: `conversed` -> `converted`?

##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/predicates.scala
##########
@@ -198,6 +200,97 @@ trait PredicateHelper {
     case e: Unevaluable => false
     case e => e.children.forall(canEvaluateWithinJoin)
   }
+
+  /**
+   * Convert an expression into conjunctive normal form.
+   * Definition and algorithm: 
https://en.wikipedia.org/wiki/Conjunctive_normal_form
+   * CNF can explode exponentially in the size of the input expression when 
converting Or clauses.

Review comment:
       nit: `Or` -> `[[Or]]` for code jump?

##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/predicates.scala
##########
@@ -198,6 +200,97 @@ trait PredicateHelper {
     case e: Unevaluable => false
     case e => e.children.forall(canEvaluateWithinJoin)
   }
+
+  /**
+   * Convert an expression into conjunctive normal form.
+   * Definition and algorithm: 
https://en.wikipedia.org/wiki/Conjunctive_normal_form
+   * CNF can explode exponentially in the size of the input expression when 
converting Or clauses.
+   * Use a configuration MAX_CNF_NODE_COUNT to prevent such cases.

Review comment:
       nit: `MAX_CNF_NODE_COUNT` -> [[SQLConf.MAX_CNF_NODE_COUNT]]`

##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/predicates.scala
##########
@@ -198,6 +200,97 @@ trait PredicateHelper {
     case e: Unevaluable => false
     case e => e.children.forall(canEvaluateWithinJoin)
   }
+
+  /**
+   * Convert an expression into conjunctive normal form.
+   * Definition and algorithm: 
https://en.wikipedia.org/wiki/Conjunctive_normal_form
+   * CNF can explode exponentially in the size of the input expression when 
converting Or clauses.
+   * Use a configuration MAX_CNF_NODE_COUNT to prevent such cases.
+   *
+   * @param condition to be conversed into CNF.
+   * @return If the number of expressions exceeds threshold on converting Or, 
return Seq.empty.
+   *         If the conversion repeatedly expands nondeterministic 
expressions, return Seq.empty.
+   *         Otherwise, return the converted result as sequence of disjunctive 
expressions.
+   */
+  def conjunctiveNormalForm(condition: Expression): Seq[Expression] = {
+    val postOrderNodes = postOrderTraversal(condition)
+    val resultStack = new mutable.Stack[Seq[Expression]]
+    val maxCnfNodeCount = SQLConf.get.maxCnfNodeCount
+    // Bottom up approach to get CNF of sub-expressions
+    while (postOrderNodes.nonEmpty) {
+      val cnf = postOrderNodes.pop() match {
+        case _: And =>
+          val right: Seq[Expression] = resultStack.pop()
+          val left: Seq[Expression] = resultStack.pop()
+          left ++ right
+        case _: Or =>
+          // For each side, there is no need to expand predicates of the same 
references.
+          // So here we can aggregate predicates of the same references as one 
single predicate,
+          // for reducing the size of pushed down predicates and corresponding 
codegen.
+          val right = groupExpressionsByQualifier(resultStack.pop())
+          val left = groupExpressionsByQualifier(resultStack.pop())
+          // Stop the loop whenever the result exceeds the `maxCnfNodeCount`
+          if (left.size * right.size > maxCnfNodeCount) {
+            return Seq.empty
+          } else {
+            for {x <- left; y <- right} yield Or(x, y)
+          }
+        case other => other :: Nil
+      }
+      resultStack.push(cnf)
+    }
+    if (resultStack.length != 1) {
+      logWarning("The length of CNF conversion result stack is supposed to be 
1. There might " +
+        "be something wrong with CNF conversion.")
+      return Seq.empty
+    }
+    resultStack.top
+  }
+
+  private def groupExpressionsByQualifier(
+    expressions: Seq[Expression]): Seq[Expression] = {

Review comment:
       nit: it seems you don't need the line break;
   ```
     private def groupExpressionsByQualifier(expressions: Seq[Expression]): 
Seq[Expression] = {
   ```

##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/predicates.scala
##########
@@ -198,6 +200,97 @@ trait PredicateHelper {
     case e: Unevaluable => false
     case e => e.children.forall(canEvaluateWithinJoin)
   }
+
+  /**
+   * Convert an expression into conjunctive normal form.
+   * Definition and algorithm: 
https://en.wikipedia.org/wiki/Conjunctive_normal_form
+   * CNF can explode exponentially in the size of the input expression when 
converting Or clauses.
+   * Use a configuration MAX_CNF_NODE_COUNT to prevent such cases.
+   *
+   * @param condition to be conversed into CNF.
+   * @return If the number of expressions exceeds threshold on converting Or, 
return Seq.empty.
+   *         If the conversion repeatedly expands nondeterministic 
expressions, return Seq.empty.
+   *         Otherwise, return the converted result as sequence of disjunctive 
expressions.
+   */
+  def conjunctiveNormalForm(condition: Expression): Seq[Expression] = {
+    val postOrderNodes = postOrderTraversal(condition)
+    val resultStack = new mutable.Stack[Seq[Expression]]
+    val maxCnfNodeCount = SQLConf.get.maxCnfNodeCount
+    // Bottom up approach to get CNF of sub-expressions
+    while (postOrderNodes.nonEmpty) {
+      val cnf = postOrderNodes.pop() match {
+        case _: And =>
+          val right: Seq[Expression] = resultStack.pop()
+          val left: Seq[Expression] = resultStack.pop()
+          left ++ right
+        case _: Or =>
+          // For each side, there is no need to expand predicates of the same 
references.
+          // So here we can aggregate predicates of the same references as one 
single predicate,
+          // for reducing the size of pushed down predicates and corresponding 
codegen.
+          val right = groupExpressionsByQualifier(resultStack.pop())
+          val left = groupExpressionsByQualifier(resultStack.pop())
+          // Stop the loop whenever the result exceeds the `maxCnfNodeCount`
+          if (left.size * right.size > maxCnfNodeCount) {

Review comment:
       Could we print some messages here by `logInfo` just like 
`WholeStageCodegenExec `? 
https://github.com/apache/spark/blob/master/sql/core/src/main/scala/org/apache/spark/sql/execution/WholeStageCodegenExec.scala#L729-L733

##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/predicates.scala
##########
@@ -198,6 +200,97 @@ trait PredicateHelper {
     case e: Unevaluable => false
     case e => e.children.forall(canEvaluateWithinJoin)
   }
+
+  /**
+   * Convert an expression into conjunctive normal form.
+   * Definition and algorithm: 
https://en.wikipedia.org/wiki/Conjunctive_normal_form
+   * CNF can explode exponentially in the size of the input expression when 
converting Or clauses.
+   * Use a configuration MAX_CNF_NODE_COUNT to prevent such cases.
+   *
+   * @param condition to be conversed into CNF.
+   * @return If the number of expressions exceeds threshold on converting Or, 
return Seq.empty.
+   *         If the conversion repeatedly expands nondeterministic 
expressions, return Seq.empty.
+   *         Otherwise, return the converted result as sequence of disjunctive 
expressions.
+   */
+  def conjunctiveNormalForm(condition: Expression): Seq[Expression] = {
+    val postOrderNodes = postOrderTraversal(condition)
+    val resultStack = new mutable.Stack[Seq[Expression]]
+    val maxCnfNodeCount = SQLConf.get.maxCnfNodeCount
+    // Bottom up approach to get CNF of sub-expressions
+    while (postOrderNodes.nonEmpty) {
+      val cnf = postOrderNodes.pop() match {
+        case _: And =>
+          val right: Seq[Expression] = resultStack.pop()
+          val left: Seq[Expression] = resultStack.pop()
+          left ++ right
+        case _: Or =>
+          // For each side, there is no need to expand predicates of the same 
references.
+          // So here we can aggregate predicates of the same references as one 
single predicate,
+          // for reducing the size of pushed down predicates and corresponding 
codegen.
+          val right = groupExpressionsByQualifier(resultStack.pop())
+          val left = groupExpressionsByQualifier(resultStack.pop())
+          // Stop the loop whenever the result exceeds the `maxCnfNodeCount`
+          if (left.size * right.size > maxCnfNodeCount) {
+            return Seq.empty
+          } else {
+            for {x <- left; y <- right} yield Or(x, y)

Review comment:
       nit format: `{x <- left; y <- right}` -> `{ x <- left; y <- right }`?

##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/predicates.scala
##########
@@ -198,6 +200,97 @@ trait PredicateHelper {
     case e: Unevaluable => false
     case e => e.children.forall(canEvaluateWithinJoin)
   }
+
+  /**
+   * Convert an expression into conjunctive normal form.
+   * Definition and algorithm: 
https://en.wikipedia.org/wiki/Conjunctive_normal_form
+   * CNF can explode exponentially in the size of the input expression when 
converting Or clauses.
+   * Use a configuration MAX_CNF_NODE_COUNT to prevent such cases.
+   *
+   * @param condition to be conversed into CNF.
+   * @return If the number of expressions exceeds threshold on converting Or, 
return Seq.empty.
+   *         If the conversion repeatedly expands nondeterministic 
expressions, return Seq.empty.
+   *         Otherwise, return the converted result as sequence of disjunctive 
expressions.
+   */
+  def conjunctiveNormalForm(condition: Expression): Seq[Expression] = {

Review comment:
       Based on  the the `@return` statement above, how about 
`conjunctiveNormalForm ` -> `convertToDisjunctiveExpressions`?

##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/predicates.scala
##########
@@ -198,6 +200,97 @@ trait PredicateHelper {
     case e: Unevaluable => false
     case e => e.children.forall(canEvaluateWithinJoin)
   }
+
+  /**
+   * Convert an expression into conjunctive normal form.
+   * Definition and algorithm: 
https://en.wikipedia.org/wiki/Conjunctive_normal_form
+   * CNF can explode exponentially in the size of the input expression when 
converting Or clauses.
+   * Use a configuration MAX_CNF_NODE_COUNT to prevent such cases.
+   *
+   * @param condition to be conversed into CNF.
+   * @return If the number of expressions exceeds threshold on converting Or, 
return Seq.empty.
+   *         If the conversion repeatedly expands nondeterministic 
expressions, return Seq.empty.
+   *         Otherwise, return the converted result as sequence of disjunctive 
expressions.

Review comment:
       nit: how about reordering statements (w/ some fixes) like this?
   ```
      * @return the CNF result as sequence of disjunctive expressions. If the 
number of expressions
      *         exceeds threshold on converting `Or` or the conversion 
repeatedly expands
      *         nondeterministic expressions, returns `Seq.empty`.
   ```

##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/predicates.scala
##########
@@ -198,6 +200,97 @@ trait PredicateHelper {
     case e: Unevaluable => false
     case e => e.children.forall(canEvaluateWithinJoin)
   }
+
+  /**
+   * Convert an expression into conjunctive normal form.
+   * Definition and algorithm: 
https://en.wikipedia.org/wiki/Conjunctive_normal_form
+   * CNF can explode exponentially in the size of the input expression when 
converting Or clauses.
+   * Use a configuration MAX_CNF_NODE_COUNT to prevent such cases.
+   *
+   * @param condition to be conversed into CNF.
+   * @return If the number of expressions exceeds threshold on converting Or, 
return Seq.empty.
+   *         If the conversion repeatedly expands nondeterministic 
expressions, return Seq.empty.
+   *         Otherwise, return the converted result as sequence of disjunctive 
expressions.
+   */
+  def conjunctiveNormalForm(condition: Expression): Seq[Expression] = {
+    val postOrderNodes = postOrderTraversal(condition)
+    val resultStack = new mutable.Stack[Seq[Expression]]
+    val maxCnfNodeCount = SQLConf.get.maxCnfNodeCount
+    // Bottom up approach to get CNF of sub-expressions
+    while (postOrderNodes.nonEmpty) {
+      val cnf = postOrderNodes.pop() match {
+        case _: And =>
+          val right: Seq[Expression] = resultStack.pop()
+          val left: Seq[Expression] = resultStack.pop()

Review comment:
       ```
             val right = resultStack.pop()
             val left = resultStack.pop()
   ```

##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/PushCNFPredicateThroughJoin.scala
##########
@@ -0,0 +1,62 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.catalyst.optimizer
+
+import org.apache.spark.sql.catalyst.expressions.{And, PredicateHelper}
+import org.apache.spark.sql.catalyst.plans._
+import org.apache.spark.sql.catalyst.plans.logical.{Filter, Join, LogicalPlan}
+import org.apache.spark.sql.catalyst.rules.Rule
+
+/**
+ * Try converting join condition to conjunctive normal form expression so that 
more predicates may
+ * be able to be pushed down.
+ * To avoid expanding the join condition, the join condition will be kept in 
the original form even
+ * when predicate pushdown happens.
+ */
+object PushCNFPredicateThroughJoin extends Rule[LogicalPlan] with 
PredicateHelper {
+  def apply(plan: LogicalPlan): LogicalPlan = plan transform {
+    case j @ Join(left, right, joinType, Some(joinCondition), hint) =>
+      val predicates = conjunctiveNormalForm(joinCondition)
+      if (predicates.isEmpty) {
+        j
+      } else {
+        val pushDownCandidates = predicates.filter(_.deterministic)
+        lazy val leftFilterConditions =
+          pushDownCandidates.filter(_.references.subsetOf(left.outputSet))
+        lazy val rightFilterConditions =
+          pushDownCandidates.filter(_.references.subsetOf(right.outputSet))
+
+        lazy val newLeft =
+          leftFilterConditions.reduceLeftOption(And).map(Filter(_, 
left)).getOrElse(left)
+        lazy val newRight =
+          rightFilterConditions.reduceLeftOption(And).map(Filter(_, 
right)).getOrElse(right)
+
+        joinType match {
+          case _: InnerLike | LeftSemi =>
+            Join(newLeft, newRight, joinType, Some(joinCondition), hint)
+          case RightOuter =>
+            Join(newLeft, right, RightOuter, Some(joinCondition), hint)
+          case LeftOuter | LeftAnti | ExistenceJoin(_) =>
+            Join(left, newRight, joinType, Some(joinCondition), hint)
+          case FullOuter => j
+          case NaturalJoin(_) => sys.error("Untransformed NaturalJoin node")
+          case UsingJoin(_, _) => sys.error("Untransformed Using join node")

Review comment:
       ```
             case NaturalJoin(_) => sys.error("Untransformed NaturalJoin node")
             case UsingJoin(_, _) => sys.error("Untransformed Using join node")
   ```
   We need this error checks inside this rule?

##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/predicates.scala
##########
@@ -198,6 +200,97 @@ trait PredicateHelper {
     case e: Unevaluable => false
     case e => e.children.forall(canEvaluateWithinJoin)
   }
+
+  /**
+   * Convert an expression into conjunctive normal form.
+   * Definition and algorithm: 
https://en.wikipedia.org/wiki/Conjunctive_normal_form
+   * CNF can explode exponentially in the size of the input expression when 
converting Or clauses.
+   * Use a configuration MAX_CNF_NODE_COUNT to prevent such cases.
+   *
+   * @param condition to be conversed into CNF.
+   * @return If the number of expressions exceeds threshold on converting Or, 
return Seq.empty.
+   *         If the conversion repeatedly expands nondeterministic 
expressions, return Seq.empty.
+   *         Otherwise, return the converted result as sequence of disjunctive 
expressions.
+   */
+  def conjunctiveNormalForm(condition: Expression): Seq[Expression] = {
+    val postOrderNodes = postOrderTraversal(condition)
+    val resultStack = new mutable.Stack[Seq[Expression]]
+    val maxCnfNodeCount = SQLConf.get.maxCnfNodeCount
+    // Bottom up approach to get CNF of sub-expressions
+    while (postOrderNodes.nonEmpty) {
+      val cnf = postOrderNodes.pop() match {
+        case _: And =>
+          val right: Seq[Expression] = resultStack.pop()
+          val left: Seq[Expression] = resultStack.pop()
+          left ++ right
+        case _: Or =>
+          // For each side, there is no need to expand predicates of the same 
references.

Review comment:
       `the same references.` -> `the same table references.`?

##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/PushCNFPredicateThroughJoin.scala
##########
@@ -0,0 +1,62 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.catalyst.optimizer
+
+import org.apache.spark.sql.catalyst.expressions.{And, PredicateHelper}
+import org.apache.spark.sql.catalyst.plans._
+import org.apache.spark.sql.catalyst.plans.logical.{Filter, Join, LogicalPlan}
+import org.apache.spark.sql.catalyst.rules.Rule
+
+/**
+ * Try converting join condition to conjunctive normal form expression so that 
more predicates may
+ * be able to be pushed down.
+ * To avoid expanding the join condition, the join condition will be kept in 
the original form even
+ * when predicate pushdown happens.
+ */
+object PushCNFPredicateThroughJoin extends Rule[LogicalPlan] with 
PredicateHelper {
+  def apply(plan: LogicalPlan): LogicalPlan = plan transform {
+    case j @ Join(left, right, joinType, Some(joinCondition), hint) =>
+      val predicates = conjunctiveNormalForm(joinCondition)
+      if (predicates.isEmpty) {
+        j
+      } else {
+        val pushDownCandidates = predicates.filter(_.deterministic)
+        lazy val leftFilterConditions =
+          pushDownCandidates.filter(_.references.subsetOf(left.outputSet))
+        lazy val rightFilterConditions =
+          pushDownCandidates.filter(_.references.subsetOf(right.outputSet))
+
+        lazy val newLeft =
+          leftFilterConditions.reduceLeftOption(And).map(Filter(_, 
left)).getOrElse(left)
+        lazy val newRight =
+          rightFilterConditions.reduceLeftOption(And).map(Filter(_, 
right)).getOrElse(right)
+
+        joinType match {
+          case _: InnerLike | LeftSemi =>
+            Join(newLeft, newRight, joinType, Some(joinCondition), hint)
+          case RightOuter =>
+            Join(newLeft, right, RightOuter, Some(joinCondition), hint)
+          case LeftOuter | LeftAnti | ExistenceJoin(_) =>
+            Join(left, newRight, joinType, Some(joinCondition), hint)
+          case FullOuter => j

Review comment:
       To avoid unnecessary computation, could we filter out this case in line 
33? 
https://github.com/apache/spark/pull/28733/files#diff-a1c24ad0e0caa15f53ff24287aeb09feR33




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
[email protected]



---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to