JulianJaffePinterest commented on a change in pull request #11823: URL: https://github.com/apache/druid/pull/11823#discussion_r749909190
########## File path: spark/src/main/scala/org/apache/druid/spark/utils/FilterUtils.scala ########## @@ -0,0 +1,303 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.druid.spark.utils + +import org.apache.druid.java.util.common.{ISE, JodaUtils} +import org.apache.druid.query.filter.{AndDimFilter, BoundDimFilter, DimFilter, InDimFilter, + NotDimFilter, OrDimFilter, RegexDimFilter, SelectorDimFilter} +import org.apache.druid.query.ordering.{StringComparator, StringComparators} +import org.apache.spark.sql.sources.{And, EqualNullSafe, EqualTo, Filter, GreaterThan, + GreaterThanOrEqual, In, IsNotNull, IsNull, LessThan, LessThanOrEqual, Not, Or, StringContains, + StringEndsWith, StringStartsWith} +import org.apache.spark.sql.types.{ArrayType, DataType, DoubleType, FloatType, IntegerType, + LongType, StringType, StructType, TimestampType} + +import scala.collection.JavaConverters.{seqAsJavaListConverter, setAsJavaSetConverter} + +/** + * Converters and utilities for working with Spark and Druid Filters. + */ +object FilterUtils { + /** + * Map an array of Spark filters FILTERS to a Druid dim filter or None if filters is empty. + * + * We return a DimFilter instead of a Filter and force callers to call .toFilter + * or .toOptimizedFilter to get a filter because callers can't covert back to a DimFilter from a + * Filter. + * + * @param filters The spark filters to map to a Druid filter. + * @return A Druid filter corresponding to the union of filter conditions enumerated in FILTERS. + */ + def mapFilters(filters: Array[Filter], schema: StructType): Option[DimFilter] = { + if (filters.isEmpty) { + Option.empty[DimFilter] + } else { + Some(new AndDimFilter(filters.map(mapFilter(_, schema)).toList.asJava).optimize()) + } + } + + /** + * Convert a Spark-style filter FILTER to a Druid-style filter. + * + * @param filter The Spark filter to map to a Druid filter. + * @return The Druid filter corresponding to the filter condition described by FILTER. + */ + def mapFilter(filter: Filter, schema: StructType): DimFilter = { // scalastyle:ignore method.length Review comment: There aren't. The main reason to push down some Spark filters to the readers is to be able to parse out bounds on `__time` that we could use to further reduce which segments we open. As a happy benefit, in some cases we can push down filters to the readers and filter out rows before returning them to Spark, but we're not aiming to run the Druid query execution engine in a Spark executor. Any operation that would be evaluated in a Druid `Expression` or `Javascript` filter should instead be handled by Spark. (As an aside, I wouldn't expect Spark to even attempt to push down any predicates that needed UDFs or custom code to be executed. You can see the set of Spark Filters that can be pushed down in the `isSupportedFilter` function.) -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: [email protected] For queries about this service, please contact Infrastructure at: [email protected] --------------------------------------------------------------------- To unsubscribe, e-mail: [email protected] For additional commands, e-mail: [email protected]
