guykhazma commented on a change in pull request #27112: [SPARK-30428][SQL] File
source V2: support partition pruning
URL: https://github.com/apache/spark/pull/27112#discussion_r364669869
##########
File path:
sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/PruneFileSourcePartitions.scala
##########
@@ -39,31 +72,35 @@ private[sql] object PruneFileSourcePartitions extends
Rule[LogicalPlan] {
_,
_))
if filters.nonEmpty && fsRelation.partitionSchemaOption.isDefined =>
- val normalizedFilters = DataSourceStrategy.normalizeExprs(
- filters.filterNot(SubqueryExpression.hasSubquery),
logicalRelation.output)
-
- val sparkSession = fsRelation.sparkSession
- val partitionColumns =
- logicalRelation.resolve(
- partitionSchema, sparkSession.sessionState.analyzer.resolver)
- val partitionSet = AttributeSet(partitionColumns)
- val partitionKeyFilters = ExpressionSet(normalizedFilters.filter { f =>
- f.references.subsetOf(partitionSet)
- })
-
+ val partitionKeyFilters = getPartitionKeyFilters(
+ fsRelation.sparkSession, logicalRelation, partitionSchema, filters,
logicalRelation.output)
if (partitionKeyFilters.nonEmpty) {
val prunedFileIndex =
catalogFileIndex.filterPartitions(partitionKeyFilters.toSeq)
val prunedFsRelation =
- fsRelation.copy(location = prunedFileIndex)(sparkSession)
+ fsRelation.copy(location = prunedFileIndex)(fsRelation.sparkSession)
Review comment:
@gengliangwang this is useful for enabling data skipping on all file formats
including formats which doesn't support pushdown (e.g CSV, JSON) by replacing
the FileIndex implementation with a FileIndex which use also the `dataFilters`
to filter the file listing.
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
With regards,
Apache Git Services
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]