cloud-fan commented on a change in pull request #24068: [SPARK-27105][SQL] 
Optimize away exponential complexity in ORC predicate conversion
URL: https://github.com/apache/spark/pull/24068#discussion_r282046361
 
 

 ##########
 File path: 
sql/core/v2.3.4/src/main/scala/org/apache/spark/sql/execution/datasources/orc/OrcFilters.scala
 ##########
 @@ -153,92 +188,140 @@ private[sql] object OrcFilters extends OrcFiltersBase {
         // Pushing one side of AND down is only safe to do at the top level or 
in the child
         // AND before hitting NOT or OR conditions, and in this case, the 
unsupported predicate
         // can be safely removed.
-        val leftBuilderOption =
-          createBuilder(dataTypeMap, left, newBuilder, 
canPartialPushDownConjuncts)
-        val rightBuilderOption =
-          createBuilder(dataTypeMap, right, newBuilder, 
canPartialPushDownConjuncts)
-        (leftBuilderOption, rightBuilderOption) match {
-          case (Some(_), Some(_)) =>
-            for {
-              lhs <- createBuilder(dataTypeMap, left,
-                builder.startAnd(), canPartialPushDownConjuncts)
-              rhs <- createBuilder(dataTypeMap, right, lhs, 
canPartialPushDownConjuncts)
-            } yield rhs.end()
-
-          case (Some(_), None) if canPartialPushDownConjuncts =>
-            createBuilder(dataTypeMap, left, builder, 
canPartialPushDownConjuncts)
-
-          case (None, Some(_)) if canPartialPushDownConjuncts =>
-            createBuilder(dataTypeMap, right, builder, 
canPartialPushDownConjuncts)
-
+        val lhs =
+          trimNonConvertibleSubtreesImpl(dataTypeMap, left, 
canPartialPushDownConjuncts = true)
+        val rhs =
+          trimNonConvertibleSubtreesImpl(dataTypeMap, right, 
canPartialPushDownConjuncts = true)
+        (lhs, rhs) match {
+          case (Some(l), Some(r)) => Some(And(l, r))
+          case (Some(_), None) if canPartialPushDownConjuncts => lhs
+          case (None, Some(_)) if canPartialPushDownConjuncts => rhs
           case _ => None
         }
 
       case Or(left, right) =>
         for {
-          _ <- createBuilder(dataTypeMap, left, newBuilder, 
canPartialPushDownConjuncts = false)
-          _ <- createBuilder(dataTypeMap, right, newBuilder, 
canPartialPushDownConjuncts = false)
-          lhs <- createBuilder(dataTypeMap, left,
-            builder.startOr(), canPartialPushDownConjuncts = false)
-          rhs <- createBuilder(dataTypeMap, right, lhs, 
canPartialPushDownConjuncts = false)
-        } yield rhs.end()
+          lhs <-
+            trimNonConvertibleSubtreesImpl(dataTypeMap, left, 
canPartialPushDownConjuncts = false)
+          rhs <-
+            trimNonConvertibleSubtreesImpl(dataTypeMap, right, 
canPartialPushDownConjuncts = false)
+        } yield Or(lhs, rhs)
 
       case Not(child) =>
-        for {
-          _ <- createBuilder(dataTypeMap, child, newBuilder, 
canPartialPushDownConjuncts = false)
-          negate <- createBuilder(dataTypeMap,
-            child, builder.startNot(), canPartialPushDownConjuncts = false)
-        } yield negate.end()
+        trimNonConvertibleSubtreesImpl(dataTypeMap, child, 
canPartialPushDownConjuncts = false)
+            .map(Not)
 
       // NOTE: For all case branches dealing with leaf predicates below, the 
additional `startAnd()`
       // call is mandatory.  ORC `SearchArgument` builder requires that all 
leaf predicates must be
       // wrapped by a "parent" predicate (`And`, `Or`, or `Not`).
+      case EqualTo(attribute, value) if 
isSearchableType(dataTypeMap(attribute)) => Some(expression)
+      case EqualNullSafe(attribute, value) if 
isSearchableType(dataTypeMap(attribute)) =>
+        Some(expression)
+      case LessThan(attribute, value) if 
isSearchableType(dataTypeMap(attribute)) =>
+        Some(expression)
+      case LessThanOrEqual(attribute, value) if 
isSearchableType(dataTypeMap(attribute)) =>
+        Some(expression)
+      case GreaterThan(attribute, value) if 
isSearchableType(dataTypeMap(attribute)) =>
+        Some(expression)
+      case GreaterThanOrEqual(attribute, value) if 
isSearchableType(dataTypeMap(attribute)) =>
+        Some(expression)
+      case IsNull(attribute) if isSearchableType(dataTypeMap(attribute)) => 
Some(expression)
+      case IsNotNull(attribute) if isSearchableType(dataTypeMap(attribute)) => 
Some(expression)
+      case In(attribute, values) if isSearchableType(dataTypeMap(attribute)) 
=> Some(expression)
+
+      case _ => None
+    }
+  }
+
+  /**
+   * Build a SearchArgument for a Filter that has already been trimmed so as 
to only contain
+   * expressions that are convertible to a `SearchArgument`. This allows for a 
more efficient and
+   * more readable implementation since there's no need to check every node 
before converting it.
+   *
+   * NOTE: If you change the set of supported `Filter` types here, you need to 
modify
+   * `trimNonConvertibleSubtreesImpl` accordingly!
+   *
+   * @param dataTypeMap a map from the attribute name to its data type.
+   * @param expression the trimmed input filter predicates.
+   * @param builder the builder so far.
+   * @return
+   */
+  private def createBuilder(
+      dataTypeMap: Map[String, DataType],
+      expression: TrimmedFilter,
+      builder: Builder): Builder = {
+    def getType(attribute: String): PredicateLeaf.Type =
+      getPredicateLeafType(dataTypeMap(attribute))
+
+    import org.apache.spark.sql.sources._
+    def updateBuilder(subexpression: Filter): Unit = subexpression match {
+      case And(left, right) =>
+        builder.startAnd()
+        updateBuilder(left)
+        updateBuilder(right)
+        builder.end()
+
+      case Or(left, right) =>
+        builder.startOr()
+        updateBuilder(left)
+        updateBuilder(right)
+        builder.end()
+
+      case Not(child) =>
+        builder.startNot()
+        updateBuilder(child)
+        builder.end()
 
       case EqualTo(attribute, value) if 
isSearchableType(dataTypeMap(attribute)) =>
 
 Review comment:
   we can remove the `if isSearchableType(dataTypeMap(attribute))` now?

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
[email protected]


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to