cboumalh commented on code in PR #52883:
URL: https://github.com/apache/spark/pull/52883#discussion_r2550362774


##########
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/tuplesketchesAggregates.scala:
##########
@@ -0,0 +1,843 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.catalyst.expressions.aggregate
+
+import org.apache.datasketches.tuple.{Intersection, Sketch, Summary, Union, 
UpdatableSketch, UpdatableSketchBuilder, UpdatableSummary}
+
+import org.apache.spark.SparkUnsupportedOperationException
+import org.apache.spark.sql.catalyst.InternalRow
+import org.apache.spark.sql.catalyst.expressions.{ExpectsInputTypes, 
Expression, ExpressionDescription, Literal}
+import 
org.apache.spark.sql.catalyst.expressions.aggregate.TypedImperativeAggregate
+import org.apache.spark.sql.catalyst.trees.{QuaternaryLike, TernaryLike}
+import org.apache.spark.sql.catalyst.util.{ArrayData, CollationFactory, 
ThetaSketchUtils}
+import org.apache.spark.sql.errors.QueryExecutionErrors
+import org.apache.spark.sql.internal.types.StringTypeWithCollation
+import org.apache.spark.sql.types.{AbstractDataType, ArrayType, BinaryType, 
DataType, DoubleType, FloatType, IntegerType, LongType, StringType, StructType}
+import org.apache.spark.unsafe.types.UTF8String
+
+sealed trait TupleSketchState {
+  def serialize(): Array[Byte]
+  def eval(): Array[Byte]
+}
+case class UpdatableTupleSketchBuffer[U, S <: UpdatableSummary[U]](sketch: 
UpdatableSketch[U, S])
+    extends TupleSketchState {
+  override def serialize(): Array[Byte] = sketch.compact.toByteArray
+  override def eval(): Array[Byte] = sketch.compact.toByteArray
+}
+case class UnionTupleAggregationBuffer[S <: Summary](union: Union[S]) extends 
TupleSketchState {
+  override def serialize(): Array[Byte] = union.getResult.toByteArray
+  override def eval(): Array[Byte] = union.getResult.toByteArray
+}
+case class IntersectionTupleAggregationBuffer[S <: Summary](intersection: 
Intersection[S])
+    extends TupleSketchState {
+  override def serialize(): Array[Byte] = intersection.getResult.toByteArray
+  override def eval(): Array[Byte] = intersection.getResult.toByteArray
+}
+case class FinalizedTupleSketch[S <: Summary](sketch: Sketch[S]) extends 
TupleSketchState {
+  override def serialize(): Array[Byte] = sketch.toByteArray
+  override def eval(): Array[Byte] = sketch.toByteArray
+}
+
+/**
+ * The TupleSketchAgg function utilizes a Datasketches TupleSketch instance to 
count a
+ * probabilistic approximation of the number of unique values in a given 
column with associated
+ * summary values, and outputs the binary representation of the TupleSketch.
+ *
+ * See [[https://datasketches.apache.org/docs/Tuple/TupleSketches.html]] for 
more information.
+ *
+ * @param child
+ *   child expression (struct with key and summary value) against which unique 
counting will occur
+ * @param lgNomEntriesExpr
+ *   the log-base-2 of nomEntries decides the number of buckets for the sketch
+ * @param summaryType
+ *   the type of summary (double, integer, string)
+ * @param mode
+ *   the aggregation mode for numeric summaries (sum, min, max, alwaysone)
+ * @param mutableAggBufferOffset
+ *   offset for mutable aggregation buffer
+ * @param inputAggBufferOffset
+ *   offset for input aggregation buffer
+ */
+// scalastyle:off line.size.limit
+@ExpressionDescription(
+  usage = """
+    _FUNC_(expr[, lgNomEntries[, summaryType[, mode]]]) - Returns the 
TupleSketch compact binary representation.
+      `expr` should be a struct with key and summary value fields.
+      `lgNomEntries` (optional) is the log-base-2 of nominal entries, with 
nominal entries deciding
+      the number buckets or slots for the TupleSketch. Default is 12.
+      `summaryType` (optional) is the type of summary (double, integer, 
string). Default is double.
+      `mode` (optional) is the aggregation mode for numeric summaries (sum, 
min, max, alwaysone). Default is sum. """,
+  examples = """
+    Examples:
+      > SELECT tuple_sketch_estimate(_FUNC_(struct(col, 1.0D), 12, 'double', 
'sum')) FROM VALUES (1), (1), (2), (2), (3) tab(col);
+       3.0
+  """,
+  group = "agg_funcs",
+  since = "4.2.0")
+// scalastyle:on line.size.limit
+case class TupleSketchAgg(
+    child: Expression,
+    lgNomEntriesExpr: Option[Expression],
+    summaryTypeExpr: Expression,
+    modeExpr: Expression,
+    override val mutableAggBufferOffset: Int,
+    override val inputAggBufferOffset: Int)
+    extends TypedImperativeAggregate[TupleSketchState]
+    with TupleSketchAggregateBase
+    with QuaternaryLike[Expression]
+    with ExpectsInputTypes {
+
+  // Constructors
+
+  def this(child: Expression) = {
+    this(
+      child,
+      Some(Literal(ThetaSketchUtils.DEFAULT_LG_NOM_LONGS)),
+      Literal(ThetaSketchUtils.SUMMARY_TYPE_DOUBLE),
+      Literal(ThetaSketchUtils.MODE_SUM),
+      0,
+      0)
+  }
+
+  def this(child: Expression, lgNomEntriesExpr: Expression) = {
+    this(
+      child,
+      Some(lgNomEntriesExpr),
+      Literal(ThetaSketchUtils.SUMMARY_TYPE_DOUBLE),
+      Literal(ThetaSketchUtils.MODE_SUM),
+      0,
+      0)
+  }
+
+  def this(child: Expression, lgNomEntriesExpr: Expression, summaryTypeExpr: 
Expression) = {
+    this(child, Some(lgNomEntriesExpr), summaryTypeExpr, 
Literal(ThetaSketchUtils.MODE_SUM), 0, 0)
+  }
+
+  def this(
+      child: Expression,
+      lgNomEntriesExpr: Expression,
+      summaryTypeExpr: Expression,
+      modeExpr: Expression) = {
+    this(child, Some(lgNomEntriesExpr), summaryTypeExpr, modeExpr, 0, 0)
+  }
+
+  // Copy constructors required by ImperativeAggregate
+
+  override def withNewMutableAggBufferOffset(newMutableAggBufferOffset: Int): 
TupleSketchAgg =
+    copy(mutableAggBufferOffset = newMutableAggBufferOffset)
+
+  override def withNewInputAggBufferOffset(newInputAggBufferOffset: Int): 
TupleSketchAgg =
+    copy(inputAggBufferOffset = newInputAggBufferOffset)
+
+  override protected def withNewChildrenInternal(
+      newFirst: Expression,
+      newSecond: Expression,
+      newThird: Expression,
+      newFourth: Expression): TupleSketchAgg =
+    copy(
+      child = newFirst,
+      lgNomEntriesExpr = Some(newSecond),
+      summaryTypeExpr = newThird,
+      modeExpr = newFourth)
+
+  // Overrides for TypedImperativeAggregate
+
+  override def prettyName: String = "tuple_sketch_agg"
+
+  override def inputTypes: Seq[AbstractDataType] =
+    Seq(
+      StructType,
+      IntegerType,
+      StringTypeWithCollation(supportsTrimCollation = true),
+      StringTypeWithCollation(supportsTrimCollation = true))
+
+  override def dataType: DataType = BinaryType
+
+  override def nullable: Boolean = false
+
+  override def first: Expression = child
+  override def second: Expression =
+    lgNomEntriesExpr.getOrElse(Literal(ThetaSketchUtils.DEFAULT_LG_NOM_LONGS))
+  override def third: Expression = summaryTypeExpr
+  override def fourth: Expression = modeExpr
+
+  /**
+   * Extract and cache the key and summary value types from the input struct. 
Field 0 is the key
+   * type, Field 1 is the summary value type.
+   *
+   * Note: The asInstanceOf[StructType] cast is safe because inputTypes 
enforces that the first
+   * parameter must be StructType. This is validated during query analysis 
before execution.
+   */
+  private lazy val structType = child.dataType.asInstanceOf[StructType]
+  private lazy val keyType = structType.fields(0).dataType
+  private lazy val valueType = structType.fields(1).dataType
+
+  /**
+   * Factory for creating summary objects based on the input summary type and 
aggregation mode.
+   */
+  private lazy val summaryFactoryInput =
+    ThetaSketchUtils.getSummaryFactory(summaryTypeInput, modeInput)
+
+  /**
+   * Instantiate an UpdatableSketch instance using the lgNomEntries param and 
summary factory.
+   *
+   * @return
+   *   an UpdatableSketch instance wrapped with UpdatableTupleSketchBuffer
+   */
+  override def createAggregationBuffer(): TupleSketchState = {
+    val builder = new UpdatableSketchBuilder[Any, 
UpdatableSummary[Any]](summaryFactoryInput)
+    builder.setNominalEntries(1 << lgNomEntriesInput)
+    val sketch = builder.build()
+    UpdatableTupleSketchBuffer(sketch)
+  }
+
+  /**
+   * Evaluate the input row and update the UpdatableSketch instance with the 
row's key and summary
+   * value. The update function only supports a subset of Spark SQL types, and 
an exception will
+   * be thrown for unsupported types. Notes:
+   *   - Null values are ignored.
+   *   - Empty byte arrays are ignored
+   *   - Empty arrays of supported element types are ignored
+   *   - Strings that are collation-equal to the empty string are ignored.
+   *
+   * @param updateBuffer
+   *   A previously initialized UpdatableSketch instance
+   * @param input
+   *   An input row
+   */
+  override def update(updateBuffer: TupleSketchState, input: InternalRow): 
TupleSketchState = {
+    // Return early for null values.
+    val structValue = child.eval(input)
+    if (structValue == null) return updateBuffer
+
+    // Safe: child.eval() returns InternalRow when child.dataType is StructType

Review Comment:
   yes, checkInputDataTypes will throw an error if it is not a struct type, 
example below:
   
   ```
   val result = spark.sql("""
        |   SELECT tuple_sketch_agg(col, 15)
        |   FROM VALUES (50, 1.0D), (60, 2.0D), (60, 3.0D) tab(col, val)
        | """)
        | 
        | result.show()
   org.apache.spark.sql.catalyst.ExtendedAnalysisException: 
[DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE] Cannot resolve "tuple_sketch_agg(col, 
15, double, sum)" due to data type mismatch: The first parameter requires the 
"STRUCT" type, however "col" has the type "INT". SQLSTATE: 42K09; line 2 pos 9;
   ```



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to