gengliangwang commented on code in PR #54134:
URL: https://github.com/apache/spark/pull/54134#discussion_r2785722946


##########
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/MaxMinByK.scala:
##########
@@ -0,0 +1,314 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.catalyst.expressions.aggregate
+
+import org.apache.spark.sql.catalyst.InternalRow
+import org.apache.spark.sql.catalyst.analysis.{ExpressionBuilder, 
TypeCheckResult}
+import org.apache.spark.sql.catalyst.analysis.TypeCheckResult.DataTypeMismatch
+import org.apache.spark.sql.catalyst.expressions._
+import org.apache.spark.sql.catalyst.trees.TernaryLike
+import org.apache.spark.sql.catalyst.types.DataTypeUtils
+import org.apache.spark.sql.catalyst.util.{GenericArrayData, TypeUtils}
+import org.apache.spark.sql.errors.DataTypeErrors.toSQLId
+import org.apache.spark.sql.errors.QueryCompilationErrors
+import org.apache.spark.sql.types._
+
+/**
+ * Returns top/bottom K values ordered by orderingExpr.
+ * Uses a heap (min-heap for max_by, max-heap for min_by) to efficiently 
maintain K elements.
+ * This is the internal implementation used by max_by(x, y, k) and min_by(x, 
y, k).
+ */
+case class MaxMinByK(
+    valueExpr: Expression,
+    orderingExpr: Expression,
+    kExpr: Expression,
+    reverse: Boolean = false,
+    mutableAggBufferOffset: Int = 0,
+    inputAggBufferOffset: Int = 0)
+  extends ImperativeAggregate
+  with TernaryLike[Expression]
+  with ImplicitCastInputTypes {
+
+  def this(valueExpr: Expression, orderingExpr: Expression, kExpr: Expression) 
=
+    this(valueExpr, orderingExpr, kExpr, false, 0, 0)
+
+  def this(valueExpr: Expression, orderingExpr: Expression, kExpr: Expression, 
reverse: Boolean) =
+    this(valueExpr, orderingExpr, kExpr, reverse, 0, 0)
+
+  final val MAX_K = 100000
+  lazy val k: Int = {
+    if (!kExpr.foldable) {
+      throw new IllegalArgumentException(
+        s"The third argument of $prettyName must be a foldable expression, 
got: $kExpr")
+    }
+    val kValue = kExpr.eval() match {
+      case i: Int if i > 0 => i
+      case l: Long if l > 0 && l <= Int.MaxValue => l.toInt
+      case s: Short if s > 0 => s.toInt
+      case b: Byte if b > 0 => b.toInt
+      case other =>
+        throw new IllegalArgumentException(
+          s"The third argument of $prettyName must be a positive integer, got: 
$other")
+    }
+    if (kValue > MAX_K) {
+      throw new IllegalArgumentException(
+        s"The third argument of $prettyName must be at most $MAX_K, got: 
$kValue")
+    }
+    kValue
+  }
+
+  override def first: Expression = valueExpr
+  override def second: Expression = orderingExpr
+  override def third: Expression = kExpr
+
+  override def prettyName: String = if (reverse) "min_by" else "max_by"
+
+  // The default aggregation result is an empty array, which is not nullable.
+  override def nullable: Boolean = false
+
+  override def dataType: DataType = ArrayType(valueExpr.dataType, containsNull 
= true)
+
+  override def inputTypes: Seq[AbstractDataType] = Seq(
+    AnyDataType,
+    AnyDataType,
+    IntegralType

Review Comment:
   If this is `IntegerType`, Spark will auto add cast over kExpr and we can 
avoid the pattern match in
   ```
   val kValue = kExpr.eval() match {
         case i: Int if i > 0 => i
         case l: Long if l > 0 && l <= Int.MaxValue => l.toInt
         case s: Short if s > 0 => s.toInt
         case b: Byte if b > 0 => b.toInt
         case other =>
           throw new IllegalArgumentException(
             s"The third argument of $prettyName must be a positive integer, 
got: $other")
       }
   ```



##########
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/MaxMinByKHeap.scala:
##########
@@ -0,0 +1,172 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.catalyst.expressions.aggregate
+
+import java.nio.{ByteBuffer, ByteOrder}
+
+import org.apache.spark.sql.catalyst.InternalRow
+import org.apache.spark.sql.catalyst.util.{ArrayData, GenericArrayData}
+import org.apache.spark.sql.types.DataType
+
+/**
+ * Helper for MaxMinByK aggregate providing heap operations.
+ * Heap operates on indices to avoid copying large values.
+ *
+ * Binary heap layout: [size (4 bytes), idx0 (4 bytes), idx1 (4 bytes), ..., 
idx(k-1) (4 bytes)]
+ * Total size: (k + 1) * 4 bytes
+ *
+ * All integers are stored in little-endian byte order for direct binary 
manipulation.
+ */
+object MaxMinByKHeap {
+
+  def getSize(heap: Array[Byte]): Int =
+    ByteBuffer.wrap(heap, 0, 4).order(ByteOrder.LITTLE_ENDIAN).getInt
+
+  def setSize(heap: Array[Byte], size: Int): Unit =
+    ByteBuffer.wrap(heap, 0, 4).order(ByteOrder.LITTLE_ENDIAN).putInt(size)
+
+  def getIdx(heap: Array[Byte], pos: Int): Int =
+    ByteBuffer.wrap(heap, (pos + 1) * 4, 
4).order(ByteOrder.LITTLE_ENDIAN).getInt
+
+  def setIdx(heap: Array[Byte], pos: Int, idx: Int): Unit =
+    ByteBuffer.wrap(heap, (pos + 1) * 4, 
4).order(ByteOrder.LITTLE_ENDIAN).putInt(idx)
+
+  def swap(heap: Array[Byte], i: Int, j: Int): Unit = {
+    val tmp = getIdx(heap, i)
+    setIdx(heap, i, getIdx(heap, j))
+    setIdx(heap, j, tmp)
+  }
+
+  def siftUp(
+      heap: Array[Byte],
+      pos: Int,
+      orderings: Array[Any],
+      compare: (Any, Any) => Int): Unit = {
+    var current = pos
+    while (current > 0) {
+      val parent = (current - 1) / 2
+      val curOrd = orderings(getIdx(heap, current))
+      val parOrd = orderings(getIdx(heap, parent))
+
+      if (compare(curOrd, parOrd) < 0) {
+        swap(heap, current, parent)
+        current = parent
+      } else {
+        return
+      }
+    }
+  }
+
+  def siftDown(
+      heap: Array[Byte],
+      pos: Int,
+      size: Int,
+      orderings: Array[Any],
+      compare: (Any, Any) => Int): Unit = {
+    var current = pos
+    while (2 * current + 1 < size) {
+      val left = 2 * current + 1
+      val right = left + 1
+      val leftOrd = orderings(getIdx(heap, left))
+
+      val preferred = if (right < size) {
+        val rightOrd = orderings(getIdx(heap, right))
+        if (compare(rightOrd, leftOrd) < 0) right else left
+      } else {
+        left
+      }
+
+      val curOrd = orderings(getIdx(heap, current))
+      val prefOrd = orderings(getIdx(heap, preferred))
+      if (compare(curOrd, prefOrd) <= 0) {
+        return
+      }
+
+      swap(heap, current, preferred)
+      current = preferred
+    }
+  }
+
+  /**
+   * Insert element into heap. If heap is full, replaces root if new element 
is better.
+   */
+  def insert(
+      value: Any,
+      ord: Any,
+      k: Int,
+      valuesArr: Array[Any],
+      orderingsArr: Array[Any],
+      heap: Array[Byte],
+      compare: (Any, Any) => Int): Unit = {
+    val size = getSize(heap)
+    if (size < k) {
+      valuesArr(size) = InternalRow.copyValue(value)
+      orderingsArr(size) = InternalRow.copyValue(ord)
+
+      setIdx(heap, size, size)
+      siftUp(heap, size, orderingsArr, compare)
+      setSize(heap, size + 1)
+    } else if (compare(ord, orderingsArr(getIdx(heap, 0))) > 0) {
+      val idx = getIdx(heap, 0)
+      valuesArr(idx) = InternalRow.copyValue(value)
+      orderingsArr(idx) = InternalRow.copyValue(ord)
+
+      siftDown(heap, 0, size, orderingsArr, compare)
+    }
+  }
+
+  /**
+   * Get mutable array from buffer for in-place updates.
+   * Converts UnsafeArrayData (after spill) to GenericArrayData.
+   */
+  def getMutableArray(buffer: InternalRow, offset: Int, elementType: 
DataType): Array[Any] = {
+    buffer.getArray(offset) match {
+      case g: GenericArrayData =>
+        g.array.asInstanceOf[Array[Any]]
+      case other =>
+        val size = other.numElements()
+        val newArr = new Array[Any](size)
+
+        for (i <- 0 until size) {
+          if (!other.isNullAt(i)) {
+            newArr(i) = InternalRow.copyValue(other.get(i, elementType))
+          }
+        }
+
+        val newArrayData = new GenericArrayData(newArr)
+        buffer.update(offset, newArrayData)
+        newArr
+    }
+  }
+
+  /**
+   * Get mutable heap binary buffer from buffer for in-place updates.
+   * Copies the binary data if needed (e.g., after spill to UnsafeRow).

Review Comment:
   Is this true? With SpecificInternalRow, getBinary returns the direct 
reference and in-place mutations are visible.



##########
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/MaxMinByK.scala:
##########
@@ -0,0 +1,314 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.catalyst.expressions.aggregate
+
+import org.apache.spark.sql.catalyst.InternalRow
+import org.apache.spark.sql.catalyst.analysis.{ExpressionBuilder, 
TypeCheckResult}
+import org.apache.spark.sql.catalyst.analysis.TypeCheckResult.DataTypeMismatch
+import org.apache.spark.sql.catalyst.expressions._
+import org.apache.spark.sql.catalyst.trees.TernaryLike
+import org.apache.spark.sql.catalyst.types.DataTypeUtils
+import org.apache.spark.sql.catalyst.util.{GenericArrayData, TypeUtils}
+import org.apache.spark.sql.errors.DataTypeErrors.toSQLId
+import org.apache.spark.sql.errors.QueryCompilationErrors
+import org.apache.spark.sql.types._
+
+/**
+ * Returns top/bottom K values ordered by orderingExpr.
+ * Uses a heap (min-heap for max_by, max-heap for min_by) to efficiently 
maintain K elements.
+ * This is the internal implementation used by max_by(x, y, k) and min_by(x, 
y, k).
+ */
+case class MaxMinByK(
+    valueExpr: Expression,
+    orderingExpr: Expression,
+    kExpr: Expression,
+    reverse: Boolean = false,
+    mutableAggBufferOffset: Int = 0,
+    inputAggBufferOffset: Int = 0)
+  extends ImperativeAggregate
+  with TernaryLike[Expression]
+  with ImplicitCastInputTypes {
+
+  def this(valueExpr: Expression, orderingExpr: Expression, kExpr: Expression) 
=
+    this(valueExpr, orderingExpr, kExpr, false, 0, 0)
+
+  def this(valueExpr: Expression, orderingExpr: Expression, kExpr: Expression, 
reverse: Boolean) =
+    this(valueExpr, orderingExpr, kExpr, reverse, 0, 0)
+
+  final val MAX_K = 100000
+  lazy val k: Int = {
+    if (!kExpr.foldable) {
+      throw new IllegalArgumentException(
+        s"The third argument of $prettyName must be a foldable expression, 
got: $kExpr")
+    }
+    val kValue = kExpr.eval() match {
+      case i: Int if i > 0 => i
+      case l: Long if l > 0 && l <= Int.MaxValue => l.toInt
+      case s: Short if s > 0 => s.toInt
+      case b: Byte if b > 0 => b.toInt
+      case other =>
+        throw new IllegalArgumentException(
+          s"The third argument of $prettyName must be a positive integer, got: 
$other")
+    }
+    if (kValue > MAX_K) {
+      throw new IllegalArgumentException(
+        s"The third argument of $prettyName must be at most $MAX_K, got: 
$kValue")
+    }
+    kValue
+  }
+
+  override def first: Expression = valueExpr
+  override def second: Expression = orderingExpr
+  override def third: Expression = kExpr
+
+  override def prettyName: String = if (reverse) "min_by" else "max_by"
+
+  // The default aggregation result is an empty array, which is not nullable.
+  override def nullable: Boolean = false
+
+  override def dataType: DataType = ArrayType(valueExpr.dataType, containsNull 
= true)
+
+  override def inputTypes: Seq[AbstractDataType] = Seq(
+    AnyDataType,
+    AnyDataType,
+    IntegralType
+  )
+
+  private lazy val valuesAttr = AttributeReference(
+    "values",
+    ArrayType(valueExpr.dataType, containsNull = true),
+    nullable = false
+  )()
+  private lazy val orderingsAttr = AttributeReference(
+    "orderings",
+    ArrayType(orderingExpr.dataType, containsNull = true),
+    nullable = false
+  )()
+  private lazy val heapIndicesAttr = AttributeReference(
+    "heapIndices",
+    BinaryType,
+    nullable = false
+  )()
+
+  override lazy val aggBufferAttributes: Seq[AttributeReference] =
+    Seq(valuesAttr, orderingsAttr, heapIndicesAttr)
+
+  private val VALUES_OFFSET = 0
+  private val ORDERINGS_OFFSET = 1
+  private val HEAP_OFFSET = 2
+
+  override lazy val inputAggBufferAttributes: Seq[AttributeReference] =
+    aggBufferAttributes.map(_.newInstance())
+
+  override def aggBufferSchema: StructType = 
DataTypeUtils.fromAttributes(aggBufferAttributes)
+  override def defaultResult: Option[Literal] = Option(Literal.create(Array(), 
dataType))
+
+  override def checkInputDataTypes(): TypeCheckResult = {

Review Comment:
   We should at least keep all the`checkInputDataTypes` logic from 
`ImplicitCastInputTypes`  here



##########
sql/core/src/test/scala/org/apache/spark/sql/DataFrameAggregateSuite.scala:
##########
@@ -1134,6 +1134,169 @@ class DataFrameAggregateSuite extends QueryTest
     }
   }
 
+  test("max_by and min_by with k") {
+    // Basic: string values, integer ordering
+    checkAnswer(
+      sql("""SELECT max_by(x, y, 2), min_by(x, y, 2)

Review Comment:
   nit: let's reformat the string
   For example
   ```
           sql(
             """
               |SELECT x.str, COUNT(*)
               |FROM df x JOIN df y ON x.str = y.str
               |GROUP BY x.str
           """.stripMargin)
   ```
        



##########
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/MaxMinByK.scala:
##########
@@ -0,0 +1,314 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.catalyst.expressions.aggregate
+
+import org.apache.spark.sql.catalyst.InternalRow
+import org.apache.spark.sql.catalyst.analysis.{ExpressionBuilder, 
TypeCheckResult}
+import org.apache.spark.sql.catalyst.analysis.TypeCheckResult.DataTypeMismatch
+import org.apache.spark.sql.catalyst.expressions._
+import org.apache.spark.sql.catalyst.trees.TernaryLike
+import org.apache.spark.sql.catalyst.types.DataTypeUtils
+import org.apache.spark.sql.catalyst.util.{GenericArrayData, TypeUtils}
+import org.apache.spark.sql.errors.DataTypeErrors.toSQLId
+import org.apache.spark.sql.errors.QueryCompilationErrors
+import org.apache.spark.sql.types._
+
+/**
+ * Returns top/bottom K values ordered by orderingExpr.
+ * Uses a heap (min-heap for max_by, max-heap for min_by) to efficiently 
maintain K elements.
+ * This is the internal implementation used by max_by(x, y, k) and min_by(x, 
y, k).
+ */
+case class MaxMinByK(
+    valueExpr: Expression,
+    orderingExpr: Expression,
+    kExpr: Expression,
+    reverse: Boolean = false,
+    mutableAggBufferOffset: Int = 0,
+    inputAggBufferOffset: Int = 0)
+  extends ImperativeAggregate
+  with TernaryLike[Expression]
+  with ImplicitCastInputTypes {
+
+  def this(valueExpr: Expression, orderingExpr: Expression, kExpr: Expression) 
=
+    this(valueExpr, orderingExpr, kExpr, false, 0, 0)
+
+  def this(valueExpr: Expression, orderingExpr: Expression, kExpr: Expression, 
reverse: Boolean) =
+    this(valueExpr, orderingExpr, kExpr, reverse, 0, 0)
+
+  final val MAX_K = 100000
+  lazy val k: Int = {
+    if (!kExpr.foldable) {
+      throw new IllegalArgumentException(
+        s"The third argument of $prettyName must be a foldable expression, 
got: $kExpr")
+    }
+    val kValue = kExpr.eval() match {
+      case i: Int if i > 0 => i
+      case l: Long if l > 0 && l <= Int.MaxValue => l.toInt
+      case s: Short if s > 0 => s.toInt
+      case b: Byte if b > 0 => b.toInt
+      case other =>
+        throw new IllegalArgumentException(
+          s"The third argument of $prettyName must be a positive integer, got: 
$other")
+    }
+    if (kValue > MAX_K) {
+      throw new IllegalArgumentException(
+        s"The third argument of $prettyName must be at most $MAX_K, got: 
$kValue")
+    }
+    kValue
+  }
+
+  override def first: Expression = valueExpr
+  override def second: Expression = orderingExpr
+  override def third: Expression = kExpr
+
+  override def prettyName: String = if (reverse) "min_by" else "max_by"
+
+  // The default aggregation result is an empty array, which is not nullable.
+  override def nullable: Boolean = false
+
+  override def dataType: DataType = ArrayType(valueExpr.dataType, containsNull 
= true)
+
+  override def inputTypes: Seq[AbstractDataType] = Seq(
+    AnyDataType,

Review Comment:
   Does it support struct/array/map type?



##########
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/MaxMinByK.scala:
##########
@@ -0,0 +1,314 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.catalyst.expressions.aggregate
+
+import org.apache.spark.sql.catalyst.InternalRow
+import org.apache.spark.sql.catalyst.analysis.{ExpressionBuilder, 
TypeCheckResult}
+import org.apache.spark.sql.catalyst.analysis.TypeCheckResult.DataTypeMismatch
+import org.apache.spark.sql.catalyst.expressions._
+import org.apache.spark.sql.catalyst.trees.TernaryLike
+import org.apache.spark.sql.catalyst.types.DataTypeUtils
+import org.apache.spark.sql.catalyst.util.{GenericArrayData, TypeUtils}
+import org.apache.spark.sql.errors.DataTypeErrors.toSQLId
+import org.apache.spark.sql.errors.QueryCompilationErrors
+import org.apache.spark.sql.types._
+
+/**
+ * Returns top/bottom K values ordered by orderingExpr.
+ * Uses a heap (min-heap for max_by, max-heap for min_by) to efficiently 
maintain K elements.
+ * This is the internal implementation used by max_by(x, y, k) and min_by(x, 
y, k).
+ */
+case class MaxMinByK(
+    valueExpr: Expression,
+    orderingExpr: Expression,
+    kExpr: Expression,
+    reverse: Boolean = false,
+    mutableAggBufferOffset: Int = 0,
+    inputAggBufferOffset: Int = 0)
+  extends ImperativeAggregate
+  with TernaryLike[Expression]
+  with ImplicitCastInputTypes {
+
+  def this(valueExpr: Expression, orderingExpr: Expression, kExpr: Expression) 
=
+    this(valueExpr, orderingExpr, kExpr, false, 0, 0)
+
+  def this(valueExpr: Expression, orderingExpr: Expression, kExpr: Expression, 
reverse: Boolean) =
+    this(valueExpr, orderingExpr, kExpr, reverse, 0, 0)
+
+  final val MAX_K = 100000
+  lazy val k: Int = {
+    if (!kExpr.foldable) {
+      throw new IllegalArgumentException(
+        s"The third argument of $prettyName must be a foldable expression, 
got: $kExpr")
+    }
+    val kValue = kExpr.eval() match {

Review Comment:
   `ConstantFolding` rule will convert kExpr to a literal



##########
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/MaxMinByK.scala:
##########
@@ -0,0 +1,314 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.catalyst.expressions.aggregate
+
+import org.apache.spark.sql.catalyst.InternalRow
+import org.apache.spark.sql.catalyst.analysis.{ExpressionBuilder, 
TypeCheckResult}
+import org.apache.spark.sql.catalyst.analysis.TypeCheckResult.DataTypeMismatch
+import org.apache.spark.sql.catalyst.expressions._
+import org.apache.spark.sql.catalyst.trees.TernaryLike
+import org.apache.spark.sql.catalyst.types.DataTypeUtils
+import org.apache.spark.sql.catalyst.util.{GenericArrayData, TypeUtils}
+import org.apache.spark.sql.errors.DataTypeErrors.toSQLId
+import org.apache.spark.sql.errors.QueryCompilationErrors
+import org.apache.spark.sql.types._
+
+/**
+ * Returns top/bottom K values ordered by orderingExpr.
+ * Uses a heap (min-heap for max_by, max-heap for min_by) to efficiently 
maintain K elements.
+ * This is the internal implementation used by max_by(x, y, k) and min_by(x, 
y, k).
+ */
+case class MaxMinByK(
+    valueExpr: Expression,
+    orderingExpr: Expression,
+    kExpr: Expression,
+    reverse: Boolean = false,
+    mutableAggBufferOffset: Int = 0,
+    inputAggBufferOffset: Int = 0)
+  extends ImperativeAggregate
+  with TernaryLike[Expression]
+  with ImplicitCastInputTypes {
+
+  def this(valueExpr: Expression, orderingExpr: Expression, kExpr: Expression) 
=
+    this(valueExpr, orderingExpr, kExpr, false, 0, 0)
+
+  def this(valueExpr: Expression, orderingExpr: Expression, kExpr: Expression, 
reverse: Boolean) =
+    this(valueExpr, orderingExpr, kExpr, reverse, 0, 0)
+
+  final val MAX_K = 100000
+  lazy val k: Int = {
+    if (!kExpr.foldable) {
+      throw new IllegalArgumentException(
+        s"The third argument of $prettyName must be a foldable expression, 
got: $kExpr")
+    }
+    val kValue = kExpr.eval() match {
+      case i: Int if i > 0 => i
+      case l: Long if l > 0 && l <= Int.MaxValue => l.toInt
+      case s: Short if s > 0 => s.toInt
+      case b: Byte if b > 0 => b.toInt
+      case other =>
+        throw new IllegalArgumentException(
+          s"The third argument of $prettyName must be a positive integer, got: 
$other")
+    }
+    if (kValue > MAX_K) {
+      throw new IllegalArgumentException(
+        s"The third argument of $prettyName must be at most $MAX_K, got: 
$kValue")
+    }
+    kValue
+  }
+
+  override def first: Expression = valueExpr
+  override def second: Expression = orderingExpr
+  override def third: Expression = kExpr
+
+  override def prettyName: String = if (reverse) "min_by" else "max_by"
+
+  // The default aggregation result is an empty array, which is not nullable.
+  override def nullable: Boolean = false
+
+  override def dataType: DataType = ArrayType(valueExpr.dataType, containsNull 
= true)
+
+  override def inputTypes: Seq[AbstractDataType] = Seq(
+    AnyDataType,
+    AnyDataType,
+    IntegralType
+  )
+
+  private lazy val valuesAttr = AttributeReference(
+    "values",
+    ArrayType(valueExpr.dataType, containsNull = true),
+    nullable = false
+  )()
+  private lazy val orderingsAttr = AttributeReference(
+    "orderings",
+    ArrayType(orderingExpr.dataType, containsNull = true),
+    nullable = false
+  )()
+  private lazy val heapIndicesAttr = AttributeReference(
+    "heapIndices",
+    BinaryType,
+    nullable = false
+  )()
+
+  override lazy val aggBufferAttributes: Seq[AttributeReference] =
+    Seq(valuesAttr, orderingsAttr, heapIndicesAttr)
+
+  private val VALUES_OFFSET = 0
+  private val ORDERINGS_OFFSET = 1
+  private val HEAP_OFFSET = 2
+
+  override lazy val inputAggBufferAttributes: Seq[AttributeReference] =
+    aggBufferAttributes.map(_.newInstance())
+
+  override def aggBufferSchema: StructType = 
DataTypeUtils.fromAttributes(aggBufferAttributes)
+  override def defaultResult: Option[Literal] = Option(Literal.create(Array(), 
dataType))
+
+  override def checkInputDataTypes(): TypeCheckResult = {
+    if (!kExpr.foldable) {
+      DataTypeMismatch(
+        errorSubClass = "NON_FOLDABLE_INPUT",
+        messageParameters = Map(
+          "inputName" -> "k",
+          "inputType" -> kExpr.dataType.catalogString,
+          "inputExpr" -> kExpr.sql
+        )
+      )
+    } else {
+      val orderingCheck =
+        TypeUtils.checkForOrderingExpr(orderingExpr.dataType, prettyName)
+      if (orderingCheck.isSuccess) {
+        try {
+          val _ = k
+          TypeCheckResult.TypeCheckSuccess
+        } catch {
+          case _: IllegalArgumentException =>
+            DataTypeMismatch(
+              errorSubClass = "VALUE_OUT_OF_RANGE",
+              messageParameters = Map(
+                "exprName" -> toSQLId("k"),
+                "valueRange" -> s"[1, $MAX_K]",
+                "currentValue" -> kExpr.sql
+              )
+            )
+        }
+      } else {
+        orderingCheck
+      }
+    }
+  }
+
+  @transient private lazy val ordering: Ordering[Any] =
+    TypeUtils.getInterpretedOrdering(orderingExpr.dataType)
+
+  // max_by uses min-heap (smaller at top), min_by uses max-heap (larger at 
top)
+  private def heapCompare(ordA: Any, ordB: Any): Int =
+    if (reverse) -ordering.compare(ordA, ordB) else ordering.compare(ordA, 
ordB)
+
+  override def initialize(buffer: InternalRow): Unit = {
+    val offset = mutableAggBufferOffset
+    buffer.update(offset + VALUES_OFFSET, new GenericArrayData(new 
Array[Any](k)))
+    buffer.update(offset + ORDERINGS_OFFSET, new GenericArrayData(new 
Array[Any](k)))
+    // heapBytes is binary: [size (4 bytes), idx0 (4 bytes), ..., idx(k-1) (4 
bytes)]
+    val heapBytes = new Array[Byte]((k + 1) * 4)
+    // size is already 0 (zero-initialized)
+    buffer.update(offset + HEAP_OFFSET, heapBytes)
+  }
+
+  override def update(mutableAggBuffer: InternalRow, inputRow: InternalRow): 
Unit = {
+    val ord = orderingExpr.eval(inputRow)
+    if (ord == null) return
+
+    val value = valueExpr.eval(inputRow)
+    val offset = mutableAggBufferOffset
+
+    val valuesArr = MaxMinByKHeap.getMutableArray(
+      mutableAggBuffer, offset + VALUES_OFFSET, valueExpr.dataType)
+    val orderingsArr = MaxMinByKHeap.getMutableArray(
+      mutableAggBuffer, offset + ORDERINGS_OFFSET, orderingExpr.dataType)
+    val heap = MaxMinByKHeap.getMutableHeap(mutableAggBuffer, offset + 
HEAP_OFFSET)
+
+    MaxMinByKHeap.insert(value, ord, k, valuesArr, orderingsArr, heap, 
heapCompare)
+  }
+
+  override def merge(mutableAggBuffer: InternalRow, inputAggBuffer: 
InternalRow): Unit = {
+    val offset = mutableAggBufferOffset
+    val inOff = inputAggBufferOffset
+
+    val valuesArr = MaxMinByKHeap.getMutableArray(
+      mutableAggBuffer, offset + VALUES_OFFSET, valueExpr.dataType)
+    val orderingsArr = MaxMinByKHeap.getMutableArray(
+      mutableAggBuffer, offset + ORDERINGS_OFFSET, orderingExpr.dataType)
+    val heap = MaxMinByKHeap.getMutableHeap(mutableAggBuffer, offset + 
HEAP_OFFSET)
+
+    val inputValues = MaxMinByKHeap.readArray(
+      inputAggBuffer.getArray(inOff + VALUES_OFFSET), valueExpr.dataType)
+    val inputOrderings = MaxMinByKHeap.readArray(
+      inputAggBuffer.getArray(inOff + ORDERINGS_OFFSET), orderingExpr.dataType)
+    val inputHeap = inputAggBuffer.getBinary(inOff + HEAP_OFFSET)
+    val inputHeapSize = MaxMinByKHeap.getSize(inputHeap)
+
+    for (i <- 0 until inputHeapSize) {
+      val idx = MaxMinByKHeap.getIdx(inputHeap, i)
+      val inputOrd = inputOrderings(idx)
+      if (inputOrd != null) {
+        MaxMinByKHeap.insert(inputValues(idx), inputOrd, k, valuesArr, 
orderingsArr, heap,
+          heapCompare)
+      }
+    }
+  }
+
+  override def eval(buffer: InternalRow): Any = {
+    val offset = mutableAggBufferOffset
+
+    val valuesArr = MaxMinByKHeap.getMutableArray(
+      buffer, offset + VALUES_OFFSET, valueExpr.dataType)
+    val orderingsArr = MaxMinByKHeap.getMutableArray(
+      buffer, offset + ORDERINGS_OFFSET, orderingExpr.dataType)
+    val heap = MaxMinByKHeap.getMutableHeap(buffer, offset + HEAP_OFFSET)
+    val heapSize = MaxMinByKHeap.getSize(heap)
+
+    val elements = new Array[(Any, Any)](heapSize)
+    for (i <- 0 until heapSize) {
+      elements(i) = (InternalRow.copyValue(valuesArr(i)), orderingsArr(i))
+    }
+
+    // Sort result array (heap maintains K elements but not in sorted order).
+    val sorted = if (reverse) {
+      elements.sortWith { (a, b) => ordering.compare(a._2, b._2) < 0 }
+    } else {
+      elements.sortWith { (a, b) => ordering.compare(a._2, b._2) > 0 }
+    }
+    new GenericArrayData(sorted.map(_._1))
+  }
+
+  override def withNewMutableAggBufferOffset(newOffset: Int): 
ImperativeAggregate =
+    copy(mutableAggBufferOffset = newOffset)
+
+  override def withNewInputAggBufferOffset(newOffset: Int): 
ImperativeAggregate =
+    copy(inputAggBufferOffset = newOffset)
+
+  override protected def withNewChildrenInternal(
+      newFirst: Expression,
+      newSecond: Expression,
+      newThird: Expression): MaxMinByK =
+    copy(valueExpr = newFirst, orderingExpr = newSecond, kExpr = newThird)
+}
+
+// scalastyle:off line.size.limit
+@ExpressionDescription(
+  usage = """
+    _FUNC_(x, y) - Returns the value of `x` associated with the maximum value 
of `y`.
+    _FUNC_(x, y, k) - Returns an array of the `k` values of `x` associated 
with the
+    maximum values of `y`, sorted in descending order by `y` (since 4.2.0).
+  """,
+  examples = """
+    Examples:
+      > SELECT _FUNC_(x, y) FROM VALUES ('a', 10), ('b', 50), ('c', 20) AS 
tab(x, y);
+       b
+      > SELECT _FUNC_(x, y, 2) FROM VALUES ('a', 10), ('b', 50), ('c', 20) AS 
tab(x, y);
+       ["b","c"]
+  """,
+  note = """
+    The function is non-deterministic so the output order can be different for
+    those associated the same values of `y`.
+  """,
+  group = "agg_funcs",
+  since = "3.0.0")
+// scalastyle:on line.size.limit
+object MaxByBuilder extends ExpressionBuilder {
+  override def build(funcName: String, expressions: Seq[Expression]): 
Expression = {
+    expressions.length match {
+      case 2 => MaxBy(expressions(0), expressions(1))
+      case 3 => new MaxMinByK(expressions(0), expressions(1), expressions(2), 
reverse = false)
+      case n =>
+        throw QueryCompilationErrors.wrongNumArgsError(funcName, Seq(2, 3), n)
+    }
+  }
+}
+
+// scalastyle:off line.size.limit
+@ExpressionDescription(
+  usage = """
+    _FUNC_(x, y) - Returns the value of `x` associated with the minimum value 
of `y`.
+    _FUNC_(x, y, k) - Returns an array of the `k` values of `x` associated 
with the
+    minimum values of `y`, sorted in ascending order by `y` (since 4.2.0).
+  """,
+  examples = """
+    Examples:
+      > SELECT _FUNC_(x, y) FROM VALUES ('a', 10), ('b', 50), ('c', 20) AS 
tab(x, y);
+       a
+      > SELECT _FUNC_(x, y, 2) FROM VALUES ('a', 10), ('b', 50), ('c', 20) AS 
tab(x, y);
+       ["a","c"]
+  """,
+  note = """
+    The function is non-deterministic so the output order can be different for
+    those associated the same values of `y`.
+  """,
+  group = "agg_funcs",
+  since = "3.0.0")

Review Comment:
   spark 4.2?



##########
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/MaxMinByK.scala:
##########
@@ -0,0 +1,314 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.catalyst.expressions.aggregate
+
+import org.apache.spark.sql.catalyst.InternalRow
+import org.apache.spark.sql.catalyst.analysis.{ExpressionBuilder, 
TypeCheckResult}
+import org.apache.spark.sql.catalyst.analysis.TypeCheckResult.DataTypeMismatch
+import org.apache.spark.sql.catalyst.expressions._
+import org.apache.spark.sql.catalyst.trees.TernaryLike
+import org.apache.spark.sql.catalyst.types.DataTypeUtils
+import org.apache.spark.sql.catalyst.util.{GenericArrayData, TypeUtils}
+import org.apache.spark.sql.errors.DataTypeErrors.toSQLId
+import org.apache.spark.sql.errors.QueryCompilationErrors
+import org.apache.spark.sql.types._
+
+/**
+ * Returns top/bottom K values ordered by orderingExpr.
+ * Uses a heap (min-heap for max_by, max-heap for min_by) to efficiently 
maintain K elements.
+ * This is the internal implementation used by max_by(x, y, k) and min_by(x, 
y, k).
+ */
+case class MaxMinByK(
+    valueExpr: Expression,
+    orderingExpr: Expression,
+    kExpr: Expression,
+    reverse: Boolean = false,
+    mutableAggBufferOffset: Int = 0,
+    inputAggBufferOffset: Int = 0)
+  extends ImperativeAggregate
+  with TernaryLike[Expression]
+  with ImplicitCastInputTypes {
+
+  def this(valueExpr: Expression, orderingExpr: Expression, kExpr: Expression) 
=
+    this(valueExpr, orderingExpr, kExpr, false, 0, 0)
+
+  def this(valueExpr: Expression, orderingExpr: Expression, kExpr: Expression, 
reverse: Boolean) =
+    this(valueExpr, orderingExpr, kExpr, reverse, 0, 0)
+
+  final val MAX_K = 100000
+  lazy val k: Int = {
+    if (!kExpr.foldable) {
+      throw new IllegalArgumentException(
+        s"The third argument of $prettyName must be a foldable expression, 
got: $kExpr")
+    }
+    val kValue = kExpr.eval() match {
+      case i: Int if i > 0 => i
+      case l: Long if l > 0 && l <= Int.MaxValue => l.toInt
+      case s: Short if s > 0 => s.toInt
+      case b: Byte if b > 0 => b.toInt
+      case other =>
+        throw new IllegalArgumentException(
+          s"The third argument of $prettyName must be a positive integer, got: 
$other")
+    }
+    if (kValue > MAX_K) {
+      throw new IllegalArgumentException(
+        s"The third argument of $prettyName must be at most $MAX_K, got: 
$kValue")
+    }
+    kValue
+  }
+
+  override def first: Expression = valueExpr
+  override def second: Expression = orderingExpr
+  override def third: Expression = kExpr
+
+  override def prettyName: String = if (reverse) "min_by" else "max_by"
+
+  // The default aggregation result is an empty array, which is not nullable.
+  override def nullable: Boolean = false
+
+  override def dataType: DataType = ArrayType(valueExpr.dataType, containsNull 
= true)
+
+  override def inputTypes: Seq[AbstractDataType] = Seq(
+    AnyDataType,
+    AnyDataType,
+    IntegralType
+  )
+
+  private lazy val valuesAttr = AttributeReference(
+    "values",
+    ArrayType(valueExpr.dataType, containsNull = true),
+    nullable = false
+  )()
+  private lazy val orderingsAttr = AttributeReference(
+    "orderings",
+    ArrayType(orderingExpr.dataType, containsNull = true),
+    nullable = false
+  )()
+  private lazy val heapIndicesAttr = AttributeReference(
+    "heapIndices",
+    BinaryType,
+    nullable = false
+  )()
+
+  override lazy val aggBufferAttributes: Seq[AttributeReference] =
+    Seq(valuesAttr, orderingsAttr, heapIndicesAttr)
+
+  private val VALUES_OFFSET = 0
+  private val ORDERINGS_OFFSET = 1
+  private val HEAP_OFFSET = 2
+
+  override lazy val inputAggBufferAttributes: Seq[AttributeReference] =
+    aggBufferAttributes.map(_.newInstance())
+
+  override def aggBufferSchema: StructType = 
DataTypeUtils.fromAttributes(aggBufferAttributes)
+  override def defaultResult: Option[Literal] = Option(Literal.create(Array(), 
dataType))
+
+  override def checkInputDataTypes(): TypeCheckResult = {
+    if (!kExpr.foldable) {
+      DataTypeMismatch(
+        errorSubClass = "NON_FOLDABLE_INPUT",
+        messageParameters = Map(
+          "inputName" -> "k",
+          "inputType" -> kExpr.dataType.catalogString,
+          "inputExpr" -> kExpr.sql
+        )
+      )
+    } else {
+      val orderingCheck =
+        TypeUtils.checkForOrderingExpr(orderingExpr.dataType, prettyName)
+      if (orderingCheck.isSuccess) {
+        try {
+          val _ = k
+          TypeCheckResult.TypeCheckSuccess
+        } catch {
+          case _: IllegalArgumentException =>
+            DataTypeMismatch(
+              errorSubClass = "VALUE_OUT_OF_RANGE",
+              messageParameters = Map(
+                "exprName" -> toSQLId("k"),
+                "valueRange" -> s"[1, $MAX_K]",
+                "currentValue" -> kExpr.sql
+              )
+            )
+        }
+      } else {
+        orderingCheck
+      }
+    }
+  }
+
+  @transient private lazy val ordering: Ordering[Any] =
+    TypeUtils.getInterpretedOrdering(orderingExpr.dataType)
+
+  // max_by uses min-heap (smaller at top), min_by uses max-heap (larger at 
top)
+  private def heapCompare(ordA: Any, ordB: Any): Int =
+    if (reverse) -ordering.compare(ordA, ordB) else ordering.compare(ordA, 
ordB)
+
+  override def initialize(buffer: InternalRow): Unit = {
+    val offset = mutableAggBufferOffset
+    buffer.update(offset + VALUES_OFFSET, new GenericArrayData(new 
Array[Any](k)))
+    buffer.update(offset + ORDERINGS_OFFSET, new GenericArrayData(new 
Array[Any](k)))
+    // heapBytes is binary: [size (4 bytes), idx0 (4 bytes), ..., idx(k-1) (4 
bytes)]
+    val heapBytes = new Array[Byte]((k + 1) * 4)
+    // size is already 0 (zero-initialized)
+    buffer.update(offset + HEAP_OFFSET, heapBytes)
+  }
+
+  override def update(mutableAggBuffer: InternalRow, inputRow: InternalRow): 
Unit = {
+    val ord = orderingExpr.eval(inputRow)
+    if (ord == null) return
+
+    val value = valueExpr.eval(inputRow)
+    val offset = mutableAggBufferOffset
+
+    val valuesArr = MaxMinByKHeap.getMutableArray(
+      mutableAggBuffer, offset + VALUES_OFFSET, valueExpr.dataType)
+    val orderingsArr = MaxMinByKHeap.getMutableArray(
+      mutableAggBuffer, offset + ORDERINGS_OFFSET, orderingExpr.dataType)
+    val heap = MaxMinByKHeap.getMutableHeap(mutableAggBuffer, offset + 
HEAP_OFFSET)
+
+    MaxMinByKHeap.insert(value, ord, k, valuesArr, orderingsArr, heap, 
heapCompare)
+  }
+
+  override def merge(mutableAggBuffer: InternalRow, inputAggBuffer: 
InternalRow): Unit = {
+    val offset = mutableAggBufferOffset
+    val inOff = inputAggBufferOffset
+
+    val valuesArr = MaxMinByKHeap.getMutableArray(
+      mutableAggBuffer, offset + VALUES_OFFSET, valueExpr.dataType)
+    val orderingsArr = MaxMinByKHeap.getMutableArray(
+      mutableAggBuffer, offset + ORDERINGS_OFFSET, orderingExpr.dataType)
+    val heap = MaxMinByKHeap.getMutableHeap(mutableAggBuffer, offset + 
HEAP_OFFSET)
+
+    val inputValues = MaxMinByKHeap.readArray(
+      inputAggBuffer.getArray(inOff + VALUES_OFFSET), valueExpr.dataType)
+    val inputOrderings = MaxMinByKHeap.readArray(
+      inputAggBuffer.getArray(inOff + ORDERINGS_OFFSET), orderingExpr.dataType)
+    val inputHeap = inputAggBuffer.getBinary(inOff + HEAP_OFFSET)
+    val inputHeapSize = MaxMinByKHeap.getSize(inputHeap)
+
+    for (i <- 0 until inputHeapSize) {
+      val idx = MaxMinByKHeap.getIdx(inputHeap, i)
+      val inputOrd = inputOrderings(idx)
+      if (inputOrd != null) {
+        MaxMinByKHeap.insert(inputValues(idx), inputOrd, k, valuesArr, 
orderingsArr, heap,
+          heapCompare)
+      }
+    }
+  }
+
+  override def eval(buffer: InternalRow): Any = {
+    val offset = mutableAggBufferOffset
+
+    val valuesArr = MaxMinByKHeap.getMutableArray(
+      buffer, offset + VALUES_OFFSET, valueExpr.dataType)
+    val orderingsArr = MaxMinByKHeap.getMutableArray(
+      buffer, offset + ORDERINGS_OFFSET, orderingExpr.dataType)
+    val heap = MaxMinByKHeap.getMutableHeap(buffer, offset + HEAP_OFFSET)
+    val heapSize = MaxMinByKHeap.getSize(heap)
+
+    val elements = new Array[(Any, Any)](heapSize)
+    for (i <- 0 until heapSize) {
+      elements(i) = (InternalRow.copyValue(valuesArr(i)), orderingsArr(i))
+    }
+
+    // Sort result array (heap maintains K elements but not in sorted order).
+    val sorted = if (reverse) {
+      elements.sortWith { (a, b) => ordering.compare(a._2, b._2) < 0 }
+    } else {
+      elements.sortWith { (a, b) => ordering.compare(a._2, b._2) > 0 }
+    }
+    new GenericArrayData(sorted.map(_._1))
+  }
+
+  override def withNewMutableAggBufferOffset(newOffset: Int): 
ImperativeAggregate =
+    copy(mutableAggBufferOffset = newOffset)
+
+  override def withNewInputAggBufferOffset(newOffset: Int): 
ImperativeAggregate =
+    copy(inputAggBufferOffset = newOffset)
+
+  override protected def withNewChildrenInternal(
+      newFirst: Expression,
+      newSecond: Expression,
+      newThird: Expression): MaxMinByK =
+    copy(valueExpr = newFirst, orderingExpr = newSecond, kExpr = newThird)
+}
+
+// scalastyle:off line.size.limit
+@ExpressionDescription(
+  usage = """
+    _FUNC_(x, y) - Returns the value of `x` associated with the maximum value 
of `y`.
+    _FUNC_(x, y, k) - Returns an array of the `k` values of `x` associated 
with the
+    maximum values of `y`, sorted in descending order by `y` (since 4.2.0).
+  """,
+  examples = """
+    Examples:
+      > SELECT _FUNC_(x, y) FROM VALUES ('a', 10), ('b', 50), ('c', 20) AS 
tab(x, y);
+       b
+      > SELECT _FUNC_(x, y, 2) FROM VALUES ('a', 10), ('b', 50), ('c', 20) AS 
tab(x, y);
+       ["b","c"]
+  """,
+  note = """
+    The function is non-deterministic so the output order can be different for
+    those associated the same values of `y`.
+  """,
+  group = "agg_funcs",
+  since = "3.0.0")

Review Comment:
   spark 4.2?



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]


Reply via email to