zhidongqu-db commented on code in PR #54134: URL: https://github.com/apache/spark/pull/54134#discussion_r2772139747
########## sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/MaxMinByK.scala: ########## @@ -0,0 +1,316 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.spark.sql.catalyst.expressions.aggregate + +import org.apache.spark.sql.catalyst.InternalRow +import org.apache.spark.sql.catalyst.analysis.{ExpressionBuilder, TypeCheckResult} +import org.apache.spark.sql.catalyst.analysis.TypeCheckResult.DataTypeMismatch +import org.apache.spark.sql.catalyst.expressions._ +import org.apache.spark.sql.catalyst.trees.TernaryLike +import org.apache.spark.sql.catalyst.util.{GenericArrayData, TypeUtils} +import org.apache.spark.sql.errors.DataTypeErrors.toSQLId +import org.apache.spark.sql.errors.QueryCompilationErrors +import org.apache.spark.sql.types._ + +/** + * Returns top/bottom K values ordered by orderingExpr. + * Uses a heap (min-heap for max_by, max-heap for min_by) to efficiently maintain K elements. + * This is the internal implementation used by max_by(x, y, k) and min_by(x, y, k). + */ +case class MaxMinByK( + valueExpr: Expression, + orderingExpr: Expression, + kExpr: Expression, + reverse: Boolean = false, + mutableAggBufferOffset: Int = 0, + inputAggBufferOffset: Int = 0) + extends ImperativeAggregate + with TernaryLike[Expression] + with ImplicitCastInputTypes { + + def this(valueExpr: Expression, orderingExpr: Expression, kExpr: Expression) = + this(valueExpr, orderingExpr, kExpr, false, 0, 0) + + def this(valueExpr: Expression, orderingExpr: Expression, kExpr: Expression, reverse: Boolean) = + this(valueExpr, orderingExpr, kExpr, reverse, 0, 0) + + final val MAX_K = 100000 + lazy val k: Int = { + if (!kExpr.foldable) { + throw new IllegalArgumentException( + s"The third argument of $prettyName must be a foldable expression, got: $kExpr") + } + val kValue = kExpr.eval() match { + case i: Int if i > 0 => i + case l: Long if l > 0 && l <= Int.MaxValue => l.toInt + case s: Short if s > 0 => s.toInt + case b: Byte if b > 0 => b.toInt + case other => + throw new IllegalArgumentException( + s"The third argument of $prettyName must be a positive integer, got: $other") + } + if (kValue > MAX_K) { + throw new IllegalArgumentException( + s"The third argument of $prettyName must be at most $MAX_K, got: $kValue") + } + kValue + } + + override def first: Expression = valueExpr + override def second: Expression = orderingExpr + override def third: Expression = kExpr + + override def prettyName: String = if (reverse) "min_by" else "max_by" + + // The default aggregation result is an empty array, which is not nullable. + override def nullable: Boolean = false + + override def dataType: DataType = ArrayType(valueExpr.dataType, containsNull = true) + + override def inputTypes: Seq[AbstractDataType] = Seq( + AnyDataType, + AnyDataType, + IntegralType + ) + + private lazy val valuesAttr = AttributeReference( + "values", + ArrayType(valueExpr.dataType, containsNull = true), + nullable = false + )() + private lazy val orderingsAttr = AttributeReference( + "orderings", + ArrayType(orderingExpr.dataType, containsNull = true), + nullable = false + )() + private lazy val heapIndicesAttr = AttributeReference( + "heapIndices", + ArrayType(IntegerType, containsNull = false), Review Comment: discussed offline, I do think we can still use `BINARY` for `heapIndices` if we want, it does offer more flexibility. But I get this will incur cost to serialize/deserialize `BINARY` into `ARRAY[INT]` for each row update (tbh we can also work around it by directly make heap ops operate on `BINARY`), but maybe it's more robust to use `ARRAY[INT]` so definitely not blocking. ########## sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/MaxMinByK.scala: ########## @@ -0,0 +1,316 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.spark.sql.catalyst.expressions.aggregate + +import org.apache.spark.sql.catalyst.InternalRow +import org.apache.spark.sql.catalyst.analysis.{ExpressionBuilder, TypeCheckResult} +import org.apache.spark.sql.catalyst.analysis.TypeCheckResult.DataTypeMismatch +import org.apache.spark.sql.catalyst.expressions._ +import org.apache.spark.sql.catalyst.trees.TernaryLike +import org.apache.spark.sql.catalyst.util.{GenericArrayData, TypeUtils} +import org.apache.spark.sql.errors.DataTypeErrors.toSQLId +import org.apache.spark.sql.errors.QueryCompilationErrors +import org.apache.spark.sql.types._ + +/** + * Returns top/bottom K values ordered by orderingExpr. + * Uses a heap (min-heap for max_by, max-heap for min_by) to efficiently maintain K elements. + * This is the internal implementation used by max_by(x, y, k) and min_by(x, y, k). + */ +case class MaxMinByK( + valueExpr: Expression, + orderingExpr: Expression, + kExpr: Expression, + reverse: Boolean = false, + mutableAggBufferOffset: Int = 0, + inputAggBufferOffset: Int = 0) + extends ImperativeAggregate + with TernaryLike[Expression] + with ImplicitCastInputTypes { + + def this(valueExpr: Expression, orderingExpr: Expression, kExpr: Expression) = + this(valueExpr, orderingExpr, kExpr, false, 0, 0) + + def this(valueExpr: Expression, orderingExpr: Expression, kExpr: Expression, reverse: Boolean) = + this(valueExpr, orderingExpr, kExpr, reverse, 0, 0) + + final val MAX_K = 100000 + lazy val k: Int = { + if (!kExpr.foldable) { + throw new IllegalArgumentException( + s"The third argument of $prettyName must be a foldable expression, got: $kExpr") + } + val kValue = kExpr.eval() match { + case i: Int if i > 0 => i + case l: Long if l > 0 && l <= Int.MaxValue => l.toInt + case s: Short if s > 0 => s.toInt + case b: Byte if b > 0 => b.toInt + case other => + throw new IllegalArgumentException( + s"The third argument of $prettyName must be a positive integer, got: $other") + } + if (kValue > MAX_K) { + throw new IllegalArgumentException( + s"The third argument of $prettyName must be at most $MAX_K, got: $kValue") + } + kValue + } + + override def first: Expression = valueExpr + override def second: Expression = orderingExpr + override def third: Expression = kExpr + + override def prettyName: String = if (reverse) "min_by" else "max_by" + + // The default aggregation result is an empty array, which is not nullable. + override def nullable: Boolean = false + + override def dataType: DataType = ArrayType(valueExpr.dataType, containsNull = true) + + override def inputTypes: Seq[AbstractDataType] = Seq( + AnyDataType, + AnyDataType, + IntegralType + ) + + private lazy val valuesAttr = AttributeReference( + "values", + ArrayType(valueExpr.dataType, containsNull = true), + nullable = false + )() + private lazy val orderingsAttr = AttributeReference( + "orderings", + ArrayType(orderingExpr.dataType, containsNull = true), + nullable = false + )() + private lazy val heapIndicesAttr = AttributeReference( + "heapIndices", + ArrayType(IntegerType, containsNull = false), + nullable = false + )() + + override lazy val aggBufferAttributes: Seq[AttributeReference] = + Seq(valuesAttr, orderingsAttr, heapIndicesAttr) + + private val VALUES_OFFSET = 0 + private val ORDERINGS_OFFSET = 1 + private val HEAP_OFFSET = 2 + + override lazy val inputAggBufferAttributes: Seq[AttributeReference] = + aggBufferAttributes.map(_.newInstance()) + + override def aggBufferSchema: StructType = + StructType(aggBufferAttributes.map(a => StructField(a.name, a.dataType, a.nullable))) Review Comment: `DataTypeUtils.fromAttributes(aggBufferAttributes)` -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: [email protected] For queries about this service, please contact Infrastructure at: [email protected] --------------------------------------------------------------------- To unsubscribe, e-mail: [email protected] For additional commands, e-mail: [email protected]
