Github user HyukjinKwon commented on a diff in the pull request:

    https://github.com/apache/spark/pull/21388#discussion_r189756939
  
    --- Diff: 
sql/core/src/main/scala/org/apache/spark/sql/execution/SparkStrategies.scala ---
    @@ -474,8 +477,80 @@ abstract class SparkStrategies extends 
QueryPlanner[SparkPlan] {
         }
       }
     
    -  // Can we automate these 'pass through' operations?
       object BasicOperators extends Strategy {
    +
    +    import universe._
    +
    +    // Enumerate the pair of logical plan and physical plan which can be 
transformed via
    +    // 'pass-through', which can be achieved when the difference between 
parameters on
    +    // primary constructor in both plans is just LogicalPlan vs SparkPlan.
    +    // The map should exclude the pair which 'pass-through' needs to 
leverage default value of
    +    // constructor parameter.
    +    val passThroughOperators: Map[Class[_ <: LogicalPlan], Class[_ <: 
SparkPlan]] = Map(
    +      (classOf[logical.DeserializeToObject], 
classOf[execution.DeserializeToObjectExec]),
    +      (classOf[logical.SerializeFromObject], 
classOf[execution.SerializeFromObjectExec]),
    +      (classOf[logical.MapPartitions], 
classOf[execution.MapPartitionsExec]),
    +      (classOf[logical.FlatMapGroupsInR], 
classOf[execution.FlatMapGroupsInRExec]),
    +      (classOf[logical.FlatMapGroupsInPandas], 
classOf[execution.python.FlatMapGroupsInPandasExec]),
    +      (classOf[logical.AppendColumnsWithObject], 
classOf[execution.AppendColumnsWithObjectExec]),
    +      (classOf[logical.MapGroups], classOf[execution.MapGroupsExec]),
    +      (classOf[logical.CoGroup], classOf[execution.CoGroupExec]),
    +      (classOf[logical.Project], classOf[execution.ProjectExec]),
    +      (classOf[logical.Filter], classOf[execution.FilterExec]),
    +      (classOf[logical.Window], classOf[execution.window.WindowExec]),
    +      (classOf[logical.Sample], classOf[execution.SampleExec])
    +    )
    +
    +    lazy val operatorToConstructorParameters: Map[Class[_ <: LogicalPlan], 
Seq[(String, Type)]] =
    +      passThroughOperators.map {
    +        case (srcOpCls, _) =>
    --- End diff --
    
    nit:
    
    ```
    .map { case (srcOpCls, _) =>
    ```
    
    per https://github.com/databricks/scala-style-guide#pattern-matching


---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to