cloud-fan commented on a change in pull request #24068: [SPARK-27105][SQL]
Optimize away exponential complexity in ORC predicate conversion
URL: https://github.com/apache/spark/pull/24068#discussion_r284113800
##########
File path:
sql/core/v1.2.1/src/main/scala/org/apache/spark/sql/execution/datasources/orc/OrcFilters.scala
##########
@@ -153,92 +189,143 @@ private[sql] object OrcFilters extends OrcFiltersBase {
// Pushing one side of AND down is only safe to do at the top level or
in the child
// AND before hitting NOT or OR conditions, and in this case, the
unsupported predicate
// can be safely removed.
- val leftBuilderOption =
- createBuilder(dataTypeMap, left, newBuilder,
canPartialPushDownConjuncts)
- val rightBuilderOption =
- createBuilder(dataTypeMap, right, newBuilder,
canPartialPushDownConjuncts)
- (leftBuilderOption, rightBuilderOption) match {
- case (Some(_), Some(_)) =>
- for {
- lhs <- createBuilder(dataTypeMap, left,
- builder.startAnd(), canPartialPushDownConjuncts)
- rhs <- createBuilder(dataTypeMap, right, lhs,
canPartialPushDownConjuncts)
- } yield rhs.end()
-
- case (Some(_), None) if canPartialPushDownConjuncts =>
- createBuilder(dataTypeMap, left, builder,
canPartialPushDownConjuncts)
-
- case (None, Some(_)) if canPartialPushDownConjuncts =>
- createBuilder(dataTypeMap, right, builder,
canPartialPushDownConjuncts)
-
+ val lhs =
+ trimNonConvertibleSubtreesImpl(dataTypeMap, left,
canPartialPushDownConjuncts = true)
+ val rhs =
+ trimNonConvertibleSubtreesImpl(dataTypeMap, right,
canPartialPushDownConjuncts = true)
+ (lhs, rhs) match {
+ case (Some(l), Some(r)) => Some(And(l, r))
+ case (Some(_), None) if canPartialPushDownConjuncts => lhs
+ case (None, Some(_)) if canPartialPushDownConjuncts => rhs
case _ => None
}
case Or(left, right) =>
for {
- _ <- createBuilder(dataTypeMap, left, newBuilder,
canPartialPushDownConjuncts = false)
- _ <- createBuilder(dataTypeMap, right, newBuilder,
canPartialPushDownConjuncts = false)
- lhs <- createBuilder(dataTypeMap, left,
- builder.startOr(), canPartialPushDownConjuncts = false)
- rhs <- createBuilder(dataTypeMap, right, lhs,
canPartialPushDownConjuncts = false)
- } yield rhs.end()
+ lhs: Filter <-
+ trimNonConvertibleSubtreesImpl(dataTypeMap, left,
canPartialPushDownConjuncts = false)
+ rhs: Filter <-
+ trimNonConvertibleSubtreesImpl(dataTypeMap, right,
canPartialPushDownConjuncts = false)
+ } yield Or(lhs, rhs)
case Not(child) =>
- for {
- _ <- createBuilder(dataTypeMap, child, newBuilder,
canPartialPushDownConjuncts = false)
- negate <- createBuilder(dataTypeMap,
- child, builder.startNot(), canPartialPushDownConjuncts = false)
- } yield negate.end()
+ val filteredSubtree =
+ trimNonConvertibleSubtreesImpl(dataTypeMap, child,
canPartialPushDownConjuncts = false)
+ filteredSubtree.map(Not(_))
+
+ case EqualTo(attribute, value) if
isSearchableType(dataTypeMap(attribute)) => Some(expression)
+ case EqualNullSafe(attribute, value) if
isSearchableType(dataTypeMap(attribute)) =>
+ Some(expression)
+ case LessThan(attribute, value) if
isSearchableType(dataTypeMap(attribute)) =>
+ Some(expression)
+ case LessThanOrEqual(attribute, value) if
isSearchableType(dataTypeMap(attribute)) =>
+ Some(expression)
+ case GreaterThan(attribute, value) if
isSearchableType(dataTypeMap(attribute)) =>
+ Some(expression)
+ case GreaterThanOrEqual(attribute, value) if
isSearchableType(dataTypeMap(attribute)) =>
+ Some(expression)
+ case IsNull(attribute) if isSearchableType(dataTypeMap(attribute)) =>
Some(expression)
+ case IsNotNull(attribute) if isSearchableType(dataTypeMap(attribute)) =>
Some(expression)
+ case In(attribute, values) if isSearchableType(dataTypeMap(attribute))
=> Some(expression)
+
+ case _ => None
+ }
+ }
+
+ /**
+ * Build a SearchArgument for a Filter that has already been trimmed so as
to only contain
+ * expressions that are convertible to a `SearchArgument`. This allows for a
more efficient and
+ * more readable implementation since there's no need to check every node
before converting it.
+ *
+ * NOTE: If you change the set of supported `Filter` types here, you need to
modify
+ * `trimNonConvertibleSubtreesImpl` accordingly!
+ *
+ * @param dataTypeMap a map from the attribute name to its data type.
+ * @param expression the trimmed input filter predicates.
+ * @param builder the builder so far.
+ * @return
+ */
+ private def createBuilder(
+ dataTypeMap: Map[String, DataType],
+ expression: TrimmedFilter,
+ builder: Builder): Builder = {
+ def getType(attribute: String): PredicateLeaf.Type =
+ getPredicateLeafType(dataTypeMap(attribute))
+
+ import org.apache.spark.sql.sources._
+ def updateBuilder(subexpression: Filter): Unit = subexpression match {
+ case And(left, right) =>
+ builder.startAnd()
+ updateBuilder(left)
+ updateBuilder(right)
+ builder.end()
+
+ case Or(left, right) =>
+ builder.startOr()
+ updateBuilder(left)
+ updateBuilder(right)
+ builder.end()
+
+ case Not(child) =>
+ builder.startNot()
+ updateBuilder(child)
+ builder.end()
// NOTE: For all case branches dealing with leaf predicates below, the
additional `startAnd()`
// call is mandatory. ORC `SearchArgument` builder requires that all
leaf predicates must be
// wrapped by a "parent" predicate (`And`, `Or`, or `Not`).
- case EqualTo(attribute, value) if
isSearchableType(dataTypeMap(attribute)) =>
+ case EqualTo(attribute, value) =>
val quotedName = quoteAttributeNameIfNeeded(attribute)
Review comment:
> Add the assert to every single case. This seems a bit too verbose.
I think it's as verbose as adding a `if isSearchableType(...)` for every
case and I'm ok with it.
> The TrimmedFilter is a way to alert the developer at compile time that
they shouldn't be calling this method with anything that's not trimmed
Personally I think it's just a way to force the developers to look at the
doc and see what they should do before calling this method. I don't see a big
value of it, as I don't think there will be more callers of this method. It's
acceptable to fail some tests and alter the developers to change their code.
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
With regards,
Apache Git Services
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]