sunchao commented on code in PR #45267:
URL: https://github.com/apache/spark/pull/45267#discussion_r1538630610


##########
sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/functions/ReducibleFunction.java:
##########
@@ -0,0 +1,106 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.spark.sql.connector.catalog.functions;
+
+import org.apache.spark.annotation.Evolving;
+
+/**
+ * Base class for user-defined functions that can be 'reduced' on another 
function.
+ *
+ * A function f_source(x) is 'reducible' on another function f_target(x) if
+ * <ul>
+ *   <li> There exists a reducer function r(x) such that r(f_source(x)) = 
f_target(x)
+ *        for all input x, or </li>
+ *   <li> More generally, there exists reducer functions r1(x) and r2(x) such 
that
+ *        r1(f_source(x)) = r2(f_target(x)) for all input x. </li>
+ * </ul>
+ * <p>
+ * Examples:
+ * <ul>
+ *    <li>Bucket functions where one side has reducer
+ *    <ul>
+ *        <li>f_source(x) = bucket(4, x)</li>
+ *        <li>f_target(x) = bucket(2, x)</li>
+ *        <li>r(x) = x % 2</li>
+ *    </ul>
+ *
+ *    <li>Bucket functions where both sides have reducer
+ *    <ul>
+ *        <li>f_source(x) = bucket(16, x)</li>
+ *        <li>f_target(x) = bucket(12, x)</li>
+ *        <li>r1(x) = x % 4</li>
+ *        <li>r2(x) = x % 4</li>
+ *    </ul>
+ *
+ *    <li>Date functions
+ *    <ul>
+ *        <li>f_source(x) = days(x)</li>
+ *        <li>f_target(x) = hours(x)</li>
+ *        <li>r(x) = x / 24</li>
+ *     </ul>
+ * </ul>
+ * @param <I> reducer function input type
+ * @param <O> reducer function output type
+ * @since 4.0.0
+ */
+@Evolving
+public interface ReducibleFunction<I, O> {
+
+  /**
+   * This method is for parameterized functions.
+   *
+   * If this parameterized function is 'reducible' on another bucket function,
+   * return the {@link Reducer} function.
+   * <p>
+   * Example to return reducer for reducing f_source = bucket(4, x) on 
f_target = bucket(2, x)
+   * <ul>
+   *     <li>thisBucketFunction = bucket</li>
+   *     <li>thisNumBuckets = 4</li>
+   *     <li>otherBucketFunction = bucket</li>
+   *     <li>otherNumBuckets = 2</li>
+   * </ul>
+   *
+   * @param thisNumBuckets parameter for this function
+   * @param otherBucketFunction the other parameterized function
+   * @param otherNumBuckets parameter for the other function
+   * @return a reduction function if it is reducible, null if not
+   */
+  default Reducer<I, O> bucketReducer(
+    int thisNumBuckets,
+    ReducibleFunction<?, ?> otherBucketFunction,
+    int otherNumBuckets) {
+    throw new UnsupportedOperationException();
+  }
+
+  /**
+   * This method is for all other functions.
+   *
+   * If this function is 'reducible' on another function, return the {@link 
Reducer} function.
+   * <p>
+   * Example of reducing f_source = days(x) on f_target = hours(x)
+   * <ul>
+   *     <li>thisFunction = days</li>
+   *     <li>otherFunction = hours</li>
+   * </ul>
+   *
+   * @param otherFunction the other function
+   * @return a reduction function if it is reducible, null if not.
+   */
+  default Reducer<I, O> bucketReducer(ReducibleFunction<?, ?> otherFunction) {

Review Comment:
   should this just be `reducer`?



##########
sql/core/src/main/scala/org/apache/spark/sql/execution/exchange/EnsureRequirements.scala:
##########
@@ -527,25 +545,38 @@ case class EnsureRequirements(
         joinType == LeftAnti || joinType == LeftOuter
   }
 
-  // Populate the common partition values down to the scan nodes
-  private def populatePartitionValues(
+  // Populate the common partition information down to the scan nodes
+  private def populateCommonPartitionInfo(
       plan: SparkPlan,
       values: Seq[(InternalRow, Int)],
       joinKeyPositions: Option[Seq[Int]],
+      reducers: Option[Seq[Option[Reducer[_, _]]]],
       applyPartialClustering: Boolean,
       replicatePartitions: Boolean): SparkPlan = plan match {
     case scan: BatchScanExec =>
       scan.copy(
         spjParams = scan.spjParams.copy(
           commonPartitionValues = Some(values),
           joinKeyPositions = joinKeyPositions,
+          reducers = reducers,
           applyPartialClustering = applyPartialClustering,
           replicatePartitions = replicatePartitions
         )
       )
     case node =>
-      node.mapChildren(child => populatePartitionValues(
-        child, values, joinKeyPositions, applyPartialClustering, 
replicatePartitions))
+      node.mapChildren(child => populateCommonPartitionInfo(
+        child, values, joinKeyPositions, reducers, applyPartialClustering, 
replicatePartitions))
+  }
+
+  private def reduceCommonPartValues(commonPartValues: Seq[(InternalRow, Int)],

Review Comment:
   ditto, style



##########
sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/functions/ReducibleFunction.java:
##########
@@ -0,0 +1,106 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.spark.sql.connector.catalog.functions;
+
+import org.apache.spark.annotation.Evolving;
+
+/**
+ * Base class for user-defined functions that can be 'reduced' on another 
function.
+ *
+ * A function f_source(x) is 'reducible' on another function f_target(x) if
+ * <ul>
+ *   <li> There exists a reducer function r(x) such that r(f_source(x)) = 
f_target(x)
+ *        for all input x, or </li>
+ *   <li> More generally, there exists reducer functions r1(x) and r2(x) such 
that
+ *        r1(f_source(x)) = r2(f_target(x)) for all input x. </li>
+ * </ul>
+ * <p>
+ * Examples:
+ * <ul>
+ *    <li>Bucket functions where one side has reducer
+ *    <ul>
+ *        <li>f_source(x) = bucket(4, x)</li>
+ *        <li>f_target(x) = bucket(2, x)</li>
+ *        <li>r(x) = x % 2</li>
+ *    </ul>
+ *
+ *    <li>Bucket functions where both sides have reducer
+ *    <ul>
+ *        <li>f_source(x) = bucket(16, x)</li>
+ *        <li>f_target(x) = bucket(12, x)</li>
+ *        <li>r1(x) = x % 4</li>
+ *        <li>r2(x) = x % 4</li>
+ *    </ul>
+ *
+ *    <li>Date functions
+ *    <ul>
+ *        <li>f_source(x) = days(x)</li>
+ *        <li>f_target(x) = hours(x)</li>
+ *        <li>r(x) = x / 24</li>
+ *     </ul>
+ * </ul>
+ * @param <I> reducer function input type
+ * @param <O> reducer function output type
+ * @since 4.0.0
+ */
+@Evolving
+public interface ReducibleFunction<I, O> {
+
+  /**
+   * This method is for parameterized functions.
+   *
+   * If this parameterized function is 'reducible' on another bucket function,
+   * return the {@link Reducer} function.
+   * <p>
+   * Example to return reducer for reducing f_source = bucket(4, x) on 
f_target = bucket(2, x)
+   * <ul>
+   *     <li>thisBucketFunction = bucket</li>
+   *     <li>thisNumBuckets = 4</li>
+   *     <li>otherBucketFunction = bucket</li>
+   *     <li>otherNumBuckets = 2</li>
+   * </ul>
+   *
+   * @param thisNumBuckets parameter for this function
+   * @param otherBucketFunction the other parameterized function
+   * @param otherNumBuckets parameter for the other function
+   * @return a reduction function if it is reducible, null if not
+   */
+  default Reducer<I, O> bucketReducer(
+    int thisNumBuckets,

Review Comment:
   I think we may need 4 space indentation here too



##########
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/physical/partitioning.scala:
##########
@@ -846,6 +879,21 @@ case class KeyGroupedShuffleSpec(
   }
 }
 
+object KeyGroupedShuffleSpec {
+  def reducePartitionValue(
+    row: InternalRow,

Review Comment:
   nit: indentation is off: should be:
   ```scala
     def reducePartitionValue(
         row: InternalRow,
         expressions: Seq[Expression],
         reducers: Seq[Option[Reducer[_, _]]]):
       InternalRowComparableWrapper = {
       val partitionVals = row.toSeq(expressions.map(_.dataType))
       val reducedRow = partitionVals.zip(reducers).map{
         case (v, Some(reducer: Reducer[Any, Any])) => reducer.reduce(v)
         case (v, _) => v
   ```
   
   4 space indentation for method parameters, see 
https://github.com/databricks/scala-style-guide?tab=readme-ov-file#indent



##########
sql/core/src/test/scala/org/apache/spark/sql/connector/catalog/functions/transformFunctions.scala:
##########
@@ -85,6 +85,34 @@ object BucketFunction extends ScalarFunction[Int] {
   override def produceResult(input: InternalRow): Int = {
     (input.getLong(1) % input.getInt(0)).toInt
   }
+
+  override def bucketReducer(

Review Comment:
   ditto



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to