cloud-fan commented on a change in pull request #32049:
URL: https://github.com/apache/spark/pull/32049#discussion_r667104587
##########
File path:
sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/V2ScanRelationPushDown.scala
##########
@@ -17,61 +17,179 @@
package org.apache.spark.sql.execution.datasources.v2
-import org.apache.spark.sql.catalyst.expressions.{And, Expression,
NamedExpression, ProjectionOverSchema, SubqueryExpression}
-import org.apache.spark.sql.catalyst.planning.ScanOperation
-import org.apache.spark.sql.catalyst.plans.logical.{Filter, LogicalPlan,
Project}
+import org.apache.spark.sql.catalyst.expressions._
+import org.apache.spark.sql.catalyst.expressions.aggregate.AggregateExpression
+import org.apache.spark.sql.catalyst.planning.{OperationHelper, ScanOperation}
+import org.apache.spark.sql.catalyst.plans.logical.{Aggregate, Filter,
LeafNode, LogicalPlan, Project}
import org.apache.spark.sql.catalyst.rules.Rule
-import org.apache.spark.sql.connector.read.{Scan, V1Scan}
+import org.apache.spark.sql.catalyst.util.toPrettySQL
+import org.apache.spark.sql.connector.expressions.Aggregation
+import org.apache.spark.sql.connector.read.{Scan, ScanBuilder,
SupportsPushDownAggregates, SupportsPushDownFilters, V1Scan}
import org.apache.spark.sql.execution.datasources.DataSourceStrategy
import org.apache.spark.sql.sources
import org.apache.spark.sql.types.StructType
-object V2ScanRelationPushDown extends Rule[LogicalPlan] {
+object V2ScanRelationPushDown extends Rule[LogicalPlan] with AliasHelper
+ with OperationHelper with PredicateHelper {
import DataSourceV2Implicits._
- override def apply(plan: LogicalPlan): LogicalPlan = plan transformDown {
- case ScanOperation(project, filters, relation: DataSourceV2Relation) =>
- val scanBuilder =
relation.table.asReadable.newScanBuilder(relation.options)
+ def apply(plan: LogicalPlan): LogicalPlan = {
+
applyColumnPruning(pushdownAggregate(pushDownFilters(createScanBuilder(plan))))
+ }
+
+ private def createScanBuilder(plan: LogicalPlan) = plan.transform {
+ case r: DataSourceV2Relation =>
+ ScanBuilderHolder(r.output, r,
r.table.asReadable.newScanBuilder(r.options))
+ }
- val normalizedFilters = DataSourceStrategy.normalizeExprs(filters,
relation.output)
+ private def pushDownFilters(plan: LogicalPlan) = plan.transform {
+ // update the scan builder with filter push down and return a new plan
with filter pushed
+ case filter @ Filter(_, sHolder: ScanBuilderHolder) =>
+ val (filters, _, _) = collectFilters(filter).get
Review comment:
why do we need `collectFilters`? The pattern match can only match one
Filter node anyway: `case filter @ Filter(_, sHolder: ScanBuilderHolder)`
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]