dongjoon-hyun commented on a change in pull request #23383: [SPARK-23817][SQL]
Create file source V2 framework and migrate ORC read path
URL: https://github.com/apache/spark/pull/23383#discussion_r247023858
##########
File path:
sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/orc/OrcFilters.scala
##########
@@ -82,22 +82,22 @@ private[sql] object OrcFilters {
*/
def createFilter(schema: StructType, filters: Seq[Filter]):
Option[SearchArgument] = {
val dataTypeMap = schema.map(f => f.name -> f.dataType).toMap
-
- // First, tries to convert each filter individually to see whether it's
convertible, and then
- // collect all convertible ones to build the final `SearchArgument`.
- val convertibleFilters = for {
- filter <- filters
- _ <- buildSearchArgument(dataTypeMap, filter, newBuilder)
- } yield filter
-
for {
// Combines all convertible filters using `And` to produce a single
conjunction
- conjunction <- buildTree(convertibleFilters)
+ conjunction <- buildTree(convertibleFilters(schema, filters))
// Then tries to build a single ORC `SearchArgument` for the conjunction
predicate
builder <- buildSearchArgument(dataTypeMap, conjunction, newBuilder)
} yield builder.build()
}
+ def convertibleFilters(schema: StructType, filters: Seq[Filter]):
Seq[Filter] = {
+ val dataTypeMap = schema.map(f => f.name -> f.dataType).toMap
Review comment:
Let's add `dataTypeMap` parameter to`convertibleFilters`. Otherwise, this
will be a performance regression.
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
With regards,
Apache Git Services
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]