dtenedor commented on code in PR #52883:
URL: https://github.com/apache/spark/pull/52883#discussion_r2548089933


##########
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/tuplesketchesAggregates.scala:
##########
@@ -0,0 +1,843 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.catalyst.expressions.aggregate
+
+import org.apache.datasketches.tuple.{Intersection, Sketch, Summary, Union, 
UpdatableSketch, UpdatableSketchBuilder, UpdatableSummary}
+
+import org.apache.spark.SparkUnsupportedOperationException
+import org.apache.spark.sql.catalyst.InternalRow
+import org.apache.spark.sql.catalyst.expressions.{ExpectsInputTypes, 
Expression, ExpressionDescription, Literal}
+import 
org.apache.spark.sql.catalyst.expressions.aggregate.TypedImperativeAggregate
+import org.apache.spark.sql.catalyst.trees.{QuaternaryLike, TernaryLike}
+import org.apache.spark.sql.catalyst.util.{ArrayData, CollationFactory, 
ThetaSketchUtils}
+import org.apache.spark.sql.errors.QueryExecutionErrors
+import org.apache.spark.sql.internal.types.StringTypeWithCollation
+import org.apache.spark.sql.types.{AbstractDataType, ArrayType, BinaryType, 
DataType, DoubleType, FloatType, IntegerType, LongType, StringType, StructType}
+import org.apache.spark.unsafe.types.UTF8String
+
+sealed trait TupleSketchState {
+  def serialize(): Array[Byte]
+  def eval(): Array[Byte]
+}
+case class UpdatableTupleSketchBuffer[U, S <: UpdatableSummary[U]](sketch: 
UpdatableSketch[U, S])
+    extends TupleSketchState {
+  override def serialize(): Array[Byte] = sketch.compact.toByteArray
+  override def eval(): Array[Byte] = sketch.compact.toByteArray
+}
+case class UnionTupleAggregationBuffer[S <: Summary](union: Union[S]) extends 
TupleSketchState {
+  override def serialize(): Array[Byte] = union.getResult.toByteArray
+  override def eval(): Array[Byte] = union.getResult.toByteArray
+}
+case class IntersectionTupleAggregationBuffer[S <: Summary](intersection: 
Intersection[S])
+    extends TupleSketchState {
+  override def serialize(): Array[Byte] = intersection.getResult.toByteArray
+  override def eval(): Array[Byte] = intersection.getResult.toByteArray
+}
+case class FinalizedTupleSketch[S <: Summary](sketch: Sketch[S]) extends 
TupleSketchState {
+  override def serialize(): Array[Byte] = sketch.toByteArray
+  override def eval(): Array[Byte] = sketch.toByteArray
+}
+
+/**
+ * The TupleSketchAgg function utilizes a Datasketches TupleSketch instance to 
count a
+ * probabilistic approximation of the number of unique values in a given 
column with associated
+ * summary values, and outputs the binary representation of the TupleSketch.
+ *
+ * See [[https://datasketches.apache.org/docs/Tuple/TupleSketches.html]] for 
more information.
+ *
+ * @param child
+ *   child expression (struct with key and summary value) against which unique 
counting will occur

Review Comment:
   Where is the user expected to get both struct values from? In the example 
below, we have 
   
   ```
   _FUNC_(struct(col, 1.0D), 12, 'double', 'sum')
   ```
   
   Is the `1.0D` argument expected to be a constant most of the time, or where 
will it generally come from?



##########
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/tuplesketchesAggregates.scala:
##########
@@ -0,0 +1,843 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.catalyst.expressions.aggregate
+
+import org.apache.datasketches.tuple.{Intersection, Sketch, Summary, Union, 
UpdatableSketch, UpdatableSketchBuilder, UpdatableSummary}
+
+import org.apache.spark.SparkUnsupportedOperationException
+import org.apache.spark.sql.catalyst.InternalRow
+import org.apache.spark.sql.catalyst.expressions.{ExpectsInputTypes, 
Expression, ExpressionDescription, Literal}
+import 
org.apache.spark.sql.catalyst.expressions.aggregate.TypedImperativeAggregate
+import org.apache.spark.sql.catalyst.trees.{QuaternaryLike, TernaryLike}
+import org.apache.spark.sql.catalyst.util.{ArrayData, CollationFactory, 
ThetaSketchUtils}
+import org.apache.spark.sql.errors.QueryExecutionErrors
+import org.apache.spark.sql.internal.types.StringTypeWithCollation
+import org.apache.spark.sql.types.{AbstractDataType, ArrayType, BinaryType, 
DataType, DoubleType, FloatType, IntegerType, LongType, StringType, StructType}
+import org.apache.spark.unsafe.types.UTF8String
+
+sealed trait TupleSketchState {
+  def serialize(): Array[Byte]
+  def eval(): Array[Byte]
+}
+case class UpdatableTupleSketchBuffer[U, S <: UpdatableSummary[U]](sketch: 
UpdatableSketch[U, S])
+    extends TupleSketchState {
+  override def serialize(): Array[Byte] = sketch.compact.toByteArray
+  override def eval(): Array[Byte] = sketch.compact.toByteArray
+}
+case class UnionTupleAggregationBuffer[S <: Summary](union: Union[S]) extends 
TupleSketchState {
+  override def serialize(): Array[Byte] = union.getResult.toByteArray
+  override def eval(): Array[Byte] = union.getResult.toByteArray
+}
+case class IntersectionTupleAggregationBuffer[S <: Summary](intersection: 
Intersection[S])
+    extends TupleSketchState {
+  override def serialize(): Array[Byte] = intersection.getResult.toByteArray
+  override def eval(): Array[Byte] = intersection.getResult.toByteArray
+}
+case class FinalizedTupleSketch[S <: Summary](sketch: Sketch[S]) extends 
TupleSketchState {
+  override def serialize(): Array[Byte] = sketch.toByteArray
+  override def eval(): Array[Byte] = sketch.toByteArray
+}
+
+/**
+ * The TupleSketchAgg function utilizes a Datasketches TupleSketch instance to 
count a
+ * probabilistic approximation of the number of unique values in a given 
column with associated
+ * summary values, and outputs the binary representation of the TupleSketch.
+ *
+ * See [[https://datasketches.apache.org/docs/Tuple/TupleSketches.html]] for 
more information.
+ *
+ * @param child
+ *   child expression (struct with key and summary value) against which unique 
counting will occur
+ * @param lgNomEntriesExpr
+ *   the log-base-2 of nomEntries decides the number of buckets for the sketch
+ * @param summaryType
+ *   the type of summary (double, integer, string)

Review Comment:
   I am curious here as to why we are deciding to accept the data type of the 
input value as an explicit argument. With the KLL quantiles functions, we 
encoded this type into the function names themselves, e.g. 
   
   ```
   kll_sketch_agg_bigint(col, k)
   kll_sketch_agg_float(col, k)
   kll_sketch_agg_double(col, k)
   ```
   
   Applying the same idea here, we'd have:
   
   ```
   tuple_sketch_agg_bigint
   tuple_sketch_agg_float
   tuple_sketch_agg_double
   ```
   
   This would bring additional consistency and prevent the need for parsing and 
processing the additional argument. 



##########
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala:
##########
@@ -539,21 +539,9 @@ object FunctionRegistry {
     expression[KllSketchAggBigint]("kll_sketch_agg_bigint"),
     expression[KllSketchAggFloat]("kll_sketch_agg_float"),
     expression[KllSketchAggDouble]("kll_sketch_agg_double"),
-    expression[KllSketchToStringBigint]("kll_sketch_to_string_bigint"),
-    expression[KllSketchToStringFloat]("kll_sketch_to_string_float"),
-    expression[KllSketchToStringDouble]("kll_sketch_to_string_double"),
-    expression[KllSketchGetNBigint]("kll_sketch_get_n_bigint"),
-    expression[KllSketchGetNFloat]("kll_sketch_get_n_float"),
-    expression[KllSketchGetNDouble]("kll_sketch_get_n_double"),
-    expression[KllSketchMergeBigint]("kll_sketch_merge_bigint"),
-    expression[KllSketchMergeFloat]("kll_sketch_merge_float"),
-    expression[KllSketchMergeDouble]("kll_sketch_merge_double"),
-    expression[KllSketchGetQuantileBigint]("kll_sketch_get_quantile_bigint"),
-    expression[KllSketchGetQuantileFloat]("kll_sketch_get_quantile_float"),
-    expression[KllSketchGetQuantileDouble]("kll_sketch_get_quantile_double"),
-    expression[KllSketchGetRankBigint]("kll_sketch_get_rank_bigint"),
-    expression[KllSketchGetRankFloat]("kll_sketch_get_rank_float"),
-    expression[KllSketchGetRankDouble]("kll_sketch_get_rank_double"),

Review Comment:
   Good catch on this, I mistakenly added the scalar KLL quantiles sketch 
functions in the aggregate-functions section; the correct place for them is 
below in the scalar-functions section.



##########
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/tuplesketchesAggregates.scala:
##########
@@ -0,0 +1,843 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.catalyst.expressions.aggregate
+
+import org.apache.datasketches.tuple.{Intersection, Sketch, Summary, Union, 
UpdatableSketch, UpdatableSketchBuilder, UpdatableSummary}
+
+import org.apache.spark.SparkUnsupportedOperationException
+import org.apache.spark.sql.catalyst.InternalRow
+import org.apache.spark.sql.catalyst.expressions.{ExpectsInputTypes, 
Expression, ExpressionDescription, Literal}
+import 
org.apache.spark.sql.catalyst.expressions.aggregate.TypedImperativeAggregate
+import org.apache.spark.sql.catalyst.trees.{QuaternaryLike, TernaryLike}
+import org.apache.spark.sql.catalyst.util.{ArrayData, CollationFactory, 
ThetaSketchUtils}
+import org.apache.spark.sql.errors.QueryExecutionErrors
+import org.apache.spark.sql.internal.types.StringTypeWithCollation
+import org.apache.spark.sql.types.{AbstractDataType, ArrayType, BinaryType, 
DataType, DoubleType, FloatType, IntegerType, LongType, StringType, StructType}
+import org.apache.spark.unsafe.types.UTF8String
+
+sealed trait TupleSketchState {
+  def serialize(): Array[Byte]
+  def eval(): Array[Byte]
+}
+case class UpdatableTupleSketchBuffer[U, S <: UpdatableSummary[U]](sketch: 
UpdatableSketch[U, S])
+    extends TupleSketchState {
+  override def serialize(): Array[Byte] = sketch.compact.toByteArray
+  override def eval(): Array[Byte] = sketch.compact.toByteArray
+}
+case class UnionTupleAggregationBuffer[S <: Summary](union: Union[S]) extends 
TupleSketchState {
+  override def serialize(): Array[Byte] = union.getResult.toByteArray
+  override def eval(): Array[Byte] = union.getResult.toByteArray
+}
+case class IntersectionTupleAggregationBuffer[S <: Summary](intersection: 
Intersection[S])
+    extends TupleSketchState {
+  override def serialize(): Array[Byte] = intersection.getResult.toByteArray
+  override def eval(): Array[Byte] = intersection.getResult.toByteArray
+}
+case class FinalizedTupleSketch[S <: Summary](sketch: Sketch[S]) extends 
TupleSketchState {
+  override def serialize(): Array[Byte] = sketch.toByteArray
+  override def eval(): Array[Byte] = sketch.toByteArray
+}
+
+/**
+ * The TupleSketchAgg function utilizes a Datasketches TupleSketch instance to 
count a
+ * probabilistic approximation of the number of unique values in a given 
column with associated
+ * summary values, and outputs the binary representation of the TupleSketch.
+ *
+ * See [[https://datasketches.apache.org/docs/Tuple/TupleSketches.html]] for 
more information.
+ *
+ * @param child
+ *   child expression (struct with key and summary value) against which unique 
counting will occur
+ * @param lgNomEntriesExpr
+ *   the log-base-2 of nomEntries decides the number of buckets for the sketch
+ * @param summaryType
+ *   the type of summary (double, integer, string)
+ * @param mode
+ *   the aggregation mode for numeric summaries (sum, min, max, alwaysone)
+ * @param mutableAggBufferOffset
+ *   offset for mutable aggregation buffer
+ * @param inputAggBufferOffset
+ *   offset for input aggregation buffer
+ */
+// scalastyle:off line.size.limit
+@ExpressionDescription(
+  usage = """
+    _FUNC_(expr[, lgNomEntries[, summaryType[, mode]]]) - Returns the 
TupleSketch compact binary representation.
+      `expr` should be a struct with key and summary value fields.
+      `lgNomEntries` (optional) is the log-base-2 of nominal entries, with 
nominal entries deciding
+      the number buckets or slots for the TupleSketch. Default is 12.
+      `summaryType` (optional) is the type of summary (double, integer, 
string). Default is double.
+      `mode` (optional) is the aggregation mode for numeric summaries (sum, 
min, max, alwaysone). Default is sum. """,

Review Comment:
   Can you explain more what this means? Why would we want to use the different 
aggregation modes?



##########
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/tuplesketchesAggregates.scala:
##########
@@ -0,0 +1,843 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.catalyst.expressions.aggregate
+
+import org.apache.datasketches.tuple.{Intersection, Sketch, Summary, Union, 
UpdatableSketch, UpdatableSketchBuilder, UpdatableSummary}
+
+import org.apache.spark.SparkUnsupportedOperationException
+import org.apache.spark.sql.catalyst.InternalRow
+import org.apache.spark.sql.catalyst.expressions.{ExpectsInputTypes, 
Expression, ExpressionDescription, Literal}
+import 
org.apache.spark.sql.catalyst.expressions.aggregate.TypedImperativeAggregate
+import org.apache.spark.sql.catalyst.trees.{QuaternaryLike, TernaryLike}
+import org.apache.spark.sql.catalyst.util.{ArrayData, CollationFactory, 
ThetaSketchUtils}
+import org.apache.spark.sql.errors.QueryExecutionErrors
+import org.apache.spark.sql.internal.types.StringTypeWithCollation
+import org.apache.spark.sql.types.{AbstractDataType, ArrayType, BinaryType, 
DataType, DoubleType, FloatType, IntegerType, LongType, StringType, StructType}
+import org.apache.spark.unsafe.types.UTF8String
+
+sealed trait TupleSketchState {
+  def serialize(): Array[Byte]
+  def eval(): Array[Byte]
+}
+case class UpdatableTupleSketchBuffer[U, S <: UpdatableSummary[U]](sketch: 
UpdatableSketch[U, S])
+    extends TupleSketchState {
+  override def serialize(): Array[Byte] = sketch.compact.toByteArray
+  override def eval(): Array[Byte] = sketch.compact.toByteArray
+}
+case class UnionTupleAggregationBuffer[S <: Summary](union: Union[S]) extends 
TupleSketchState {
+  override def serialize(): Array[Byte] = union.getResult.toByteArray
+  override def eval(): Array[Byte] = union.getResult.toByteArray
+}
+case class IntersectionTupleAggregationBuffer[S <: Summary](intersection: 
Intersection[S])
+    extends TupleSketchState {
+  override def serialize(): Array[Byte] = intersection.getResult.toByteArray
+  override def eval(): Array[Byte] = intersection.getResult.toByteArray
+}
+case class FinalizedTupleSketch[S <: Summary](sketch: Sketch[S]) extends 
TupleSketchState {
+  override def serialize(): Array[Byte] = sketch.toByteArray
+  override def eval(): Array[Byte] = sketch.toByteArray
+}
+
+/**
+ * The TupleSketchAgg function utilizes a Datasketches TupleSketch instance to 
count a
+ * probabilistic approximation of the number of unique values in a given 
column with associated
+ * summary values, and outputs the binary representation of the TupleSketch.
+ *
+ * See [[https://datasketches.apache.org/docs/Tuple/TupleSketches.html]] for 
more information.
+ *
+ * @param child
+ *   child expression (struct with key and summary value) against which unique 
counting will occur
+ * @param lgNomEntriesExpr
+ *   the log-base-2 of nomEntries decides the number of buckets for the sketch
+ * @param summaryType
+ *   the type of summary (double, integer, string)
+ * @param mode
+ *   the aggregation mode for numeric summaries (sum, min, max, alwaysone)
+ * @param mutableAggBufferOffset
+ *   offset for mutable aggregation buffer
+ * @param inputAggBufferOffset
+ *   offset for input aggregation buffer
+ */
+// scalastyle:off line.size.limit
+@ExpressionDescription(
+  usage = """
+    _FUNC_(expr[, lgNomEntries[, summaryType[, mode]]]) - Returns the 
TupleSketch compact binary representation.
+      `expr` should be a struct with key and summary value fields.
+      `lgNomEntries` (optional) is the log-base-2 of nominal entries, with 
nominal entries deciding
+      the number buckets or slots for the TupleSketch. Default is 12.
+      `summaryType` (optional) is the type of summary (double, integer, 
string). Default is double.
+      `mode` (optional) is the aggregation mode for numeric summaries (sum, 
min, max, alwaysone). Default is sum. """,
+  examples = """
+    Examples:
+      > SELECT tuple_sketch_estimate(_FUNC_(struct(col, 1.0D), 12, 'double', 
'sum')) FROM VALUES (1), (1), (2), (2), (3) tab(col);
+       3.0
+  """,
+  group = "agg_funcs",
+  since = "4.2.0")
+// scalastyle:on line.size.limit
+case class TupleSketchAgg(
+    child: Expression,
+    lgNomEntriesExpr: Option[Expression],
+    summaryTypeExpr: Expression,
+    modeExpr: Expression,
+    override val mutableAggBufferOffset: Int,
+    override val inputAggBufferOffset: Int)
+    extends TypedImperativeAggregate[TupleSketchState]
+    with TupleSketchAggregateBase
+    with QuaternaryLike[Expression]
+    with ExpectsInputTypes {
+
+  // Constructors
+
+  def this(child: Expression) = {
+    this(
+      child,
+      Some(Literal(ThetaSketchUtils.DEFAULT_LG_NOM_LONGS)),
+      Literal(ThetaSketchUtils.SUMMARY_TYPE_DOUBLE),
+      Literal(ThetaSketchUtils.MODE_SUM),
+      0,
+      0)
+  }
+
+  def this(child: Expression, lgNomEntriesExpr: Expression) = {
+    this(
+      child,
+      Some(lgNomEntriesExpr),
+      Literal(ThetaSketchUtils.SUMMARY_TYPE_DOUBLE),
+      Literal(ThetaSketchUtils.MODE_SUM),
+      0,
+      0)
+  }
+
+  def this(child: Expression, lgNomEntriesExpr: Expression, summaryTypeExpr: 
Expression) = {
+    this(child, Some(lgNomEntriesExpr), summaryTypeExpr, 
Literal(ThetaSketchUtils.MODE_SUM), 0, 0)
+  }
+
+  def this(
+      child: Expression,
+      lgNomEntriesExpr: Expression,
+      summaryTypeExpr: Expression,
+      modeExpr: Expression) = {
+    this(child, Some(lgNomEntriesExpr), summaryTypeExpr, modeExpr, 0, 0)
+  }
+
+  // Copy constructors required by ImperativeAggregate
+
+  override def withNewMutableAggBufferOffset(newMutableAggBufferOffset: Int): 
TupleSketchAgg =
+    copy(mutableAggBufferOffset = newMutableAggBufferOffset)
+
+  override def withNewInputAggBufferOffset(newInputAggBufferOffset: Int): 
TupleSketchAgg =
+    copy(inputAggBufferOffset = newInputAggBufferOffset)
+
+  override protected def withNewChildrenInternal(
+      newFirst: Expression,
+      newSecond: Expression,
+      newThird: Expression,
+      newFourth: Expression): TupleSketchAgg =
+    copy(
+      child = newFirst,
+      lgNomEntriesExpr = Some(newSecond),
+      summaryTypeExpr = newThird,
+      modeExpr = newFourth)
+
+  // Overrides for TypedImperativeAggregate
+
+  override def prettyName: String = "tuple_sketch_agg"
+
+  override def inputTypes: Seq[AbstractDataType] =
+    Seq(
+      StructType,
+      IntegerType,
+      StringTypeWithCollation(supportsTrimCollation = true),
+      StringTypeWithCollation(supportsTrimCollation = true))
+
+  override def dataType: DataType = BinaryType
+
+  override def nullable: Boolean = false
+
+  override def first: Expression = child
+  override def second: Expression =
+    lgNomEntriesExpr.getOrElse(Literal(ThetaSketchUtils.DEFAULT_LG_NOM_LONGS))
+  override def third: Expression = summaryTypeExpr
+  override def fourth: Expression = modeExpr
+
+  /**
+   * Extract and cache the key and summary value types from the input struct. 
Field 0 is the key
+   * type, Field 1 is the summary value type.
+   *
+   * Note: The asInstanceOf[StructType] cast is safe because inputTypes 
enforces that the first
+   * parameter must be StructType. This is validated during query analysis 
before execution.
+   */
+  private lazy val structType = child.dataType.asInstanceOf[StructType]
+  private lazy val keyType = structType.fields(0).dataType
+  private lazy val valueType = structType.fields(1).dataType
+
+  /**
+   * Factory for creating summary objects based on the input summary type and 
aggregation mode.
+   */
+  private lazy val summaryFactoryInput =
+    ThetaSketchUtils.getSummaryFactory(summaryTypeInput, modeInput)
+
+  /**
+   * Instantiate an UpdatableSketch instance using the lgNomEntries param and 
summary factory.
+   *
+   * @return
+   *   an UpdatableSketch instance wrapped with UpdatableTupleSketchBuffer
+   */
+  override def createAggregationBuffer(): TupleSketchState = {
+    val builder = new UpdatableSketchBuilder[Any, 
UpdatableSummary[Any]](summaryFactoryInput)
+    builder.setNominalEntries(1 << lgNomEntriesInput)
+    val sketch = builder.build()
+    UpdatableTupleSketchBuffer(sketch)
+  }
+
+  /**
+   * Evaluate the input row and update the UpdatableSketch instance with the 
row's key and summary
+   * value. The update function only supports a subset of Spark SQL types, and 
an exception will
+   * be thrown for unsupported types. Notes:
+   *   - Null values are ignored.
+   *   - Empty byte arrays are ignored
+   *   - Empty arrays of supported element types are ignored
+   *   - Strings that are collation-equal to the empty string are ignored.
+   *
+   * @param updateBuffer
+   *   A previously initialized UpdatableSketch instance
+   * @param input
+   *   An input row
+   */
+  override def update(updateBuffer: TupleSketchState, input: InternalRow): 
TupleSketchState = {
+    // Return early for null values.
+    val structValue = child.eval(input)
+    if (structValue == null) return updateBuffer
+
+    // Safe: child.eval() returns InternalRow when child.dataType is StructType
+    val struct = structValue.asInstanceOf[InternalRow]
+    val key = struct.get(0, this.keyType)
+    val summaryValue = struct.get(1, this.valueType)
+
+    if (key == null || summaryValue == null) return updateBuffer
+
+    // Initialized buffer should be UpdatableTupleSketchBuffer, else error out.
+    val sketch = updateBuffer match {
+      case UpdatableTupleSketchBuffer(s) => s
+      case _ => throw 
QueryExecutionErrors.tupleInvalidInputSketchBuffer(prettyName)
+    }
+
+    // Convert summary value based on summaryTypeInput.
+    val summary = ThetaSketchUtils.convertSummaryValue(summaryTypeInput, 
summaryValue, prettyName)
+
+    // Handle the different data types for sketch updates.
+    this.keyType match {
+      case ArrayType(IntegerType, _) =>
+        val arr = key.asInstanceOf[ArrayData].toIntArray()
+        sketch.update(arr, summary)
+      case ArrayType(LongType, _) =>
+        val arr = key.asInstanceOf[ArrayData].toLongArray()
+        sketch.update(arr, summary)
+      case BinaryType =>
+        val bytes = key.asInstanceOf[Array[Byte]]
+        sketch.update(bytes, summary)
+      case DoubleType =>
+        sketch.update(key.asInstanceOf[Double], summary)
+      case FloatType =>
+        sketch.update(key.asInstanceOf[Float].toDouble, summary)
+      case IntegerType =>
+        sketch.update(key.asInstanceOf[Int].toLong, summary)
+      case LongType =>
+        sketch.update(key.asInstanceOf[Long], summary)
+      case st: StringType =>
+        val collation = CollationFactory.fetchCollation(st.collationId)
+        val str = key.asInstanceOf[UTF8String]
+        if (!collation.equalsFunction(str, UTF8String.EMPTY_UTF8)) {
+          sketch.update(collation.sortKeyFunction.apply(str), summary)
+        }
+      case _ =>
+        throw new SparkUnsupportedOperationException(
+          errorClass = "_LEGACY_ERROR_TEMP_3121",
+          messageParameters = Map("dataType" -> child.dataType.toString))
+    }
+
+    UpdatableTupleSketchBuffer(sketch)
+  }
+
+  /**
+   * Merges an input CompactSketch into the UpdatableSketch which is acting as 
the aggregation
+   * buffer.
+   *
+   * @param updateBuffer
+   *   The UpdatableSketch or Union instance used to store the aggregation 
result
+   * @param input
+   *   An input UpdatableSketch, Union, or CompactSketch instance
+   */
+  override def merge(
+      updateBuffer: TupleSketchState,
+      input: TupleSketchState): TupleSketchState = {
+
+    def createUnionWith(
+        sketch1: Sketch[Summary],
+        sketch2: Sketch[Summary]): UnionTupleAggregationBuffer[Summary] = {
+      val summarySetOps = 
ThetaSketchUtils.getSummarySetOperations(summaryTypeInput, modeInput)
+      val union = new Union(1 << lgNomEntriesInput, summarySetOps)

Review Comment:
   Can this overflow? Do we check bounds on this parameter anywhere?



##########
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/tuplesketchesAggregates.scala:
##########
@@ -0,0 +1,843 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.catalyst.expressions.aggregate
+
+import org.apache.datasketches.tuple.{Intersection, Sketch, Summary, Union, 
UpdatableSketch, UpdatableSketchBuilder, UpdatableSummary}
+
+import org.apache.spark.SparkUnsupportedOperationException
+import org.apache.spark.sql.catalyst.InternalRow
+import org.apache.spark.sql.catalyst.expressions.{ExpectsInputTypes, 
Expression, ExpressionDescription, Literal}
+import 
org.apache.spark.sql.catalyst.expressions.aggregate.TypedImperativeAggregate
+import org.apache.spark.sql.catalyst.trees.{QuaternaryLike, TernaryLike}
+import org.apache.spark.sql.catalyst.util.{ArrayData, CollationFactory, 
ThetaSketchUtils}
+import org.apache.spark.sql.errors.QueryExecutionErrors
+import org.apache.spark.sql.internal.types.StringTypeWithCollation
+import org.apache.spark.sql.types.{AbstractDataType, ArrayType, BinaryType, 
DataType, DoubleType, FloatType, IntegerType, LongType, StringType, StructType}
+import org.apache.spark.unsafe.types.UTF8String
+
+sealed trait TupleSketchState {
+  def serialize(): Array[Byte]
+  def eval(): Array[Byte]
+}
+case class UpdatableTupleSketchBuffer[U, S <: UpdatableSummary[U]](sketch: 
UpdatableSketch[U, S])
+    extends TupleSketchState {
+  override def serialize(): Array[Byte] = sketch.compact.toByteArray
+  override def eval(): Array[Byte] = sketch.compact.toByteArray
+}
+case class UnionTupleAggregationBuffer[S <: Summary](union: Union[S]) extends 
TupleSketchState {
+  override def serialize(): Array[Byte] = union.getResult.toByteArray
+  override def eval(): Array[Byte] = union.getResult.toByteArray
+}
+case class IntersectionTupleAggregationBuffer[S <: Summary](intersection: 
Intersection[S])
+    extends TupleSketchState {
+  override def serialize(): Array[Byte] = intersection.getResult.toByteArray
+  override def eval(): Array[Byte] = intersection.getResult.toByteArray
+}
+case class FinalizedTupleSketch[S <: Summary](sketch: Sketch[S]) extends 
TupleSketchState {
+  override def serialize(): Array[Byte] = sketch.toByteArray
+  override def eval(): Array[Byte] = sketch.toByteArray
+}
+
+/**
+ * The TupleSketchAgg function utilizes a Datasketches TupleSketch instance to 
count a
+ * probabilistic approximation of the number of unique values in a given 
column with associated
+ * summary values, and outputs the binary representation of the TupleSketch.
+ *
+ * See [[https://datasketches.apache.org/docs/Tuple/TupleSketches.html]] for 
more information.
+ *
+ * @param child
+ *   child expression (struct with key and summary value) against which unique 
counting will occur
+ * @param lgNomEntriesExpr
+ *   the log-base-2 of nomEntries decides the number of buckets for the sketch
+ * @param summaryType
+ *   the type of summary (double, integer, string)
+ * @param mode
+ *   the aggregation mode for numeric summaries (sum, min, max, alwaysone)
+ * @param mutableAggBufferOffset
+ *   offset for mutable aggregation buffer
+ * @param inputAggBufferOffset
+ *   offset for input aggregation buffer
+ */
+// scalastyle:off line.size.limit
+@ExpressionDescription(
+  usage = """
+    _FUNC_(expr[, lgNomEntries[, summaryType[, mode]]]) - Returns the 
TupleSketch compact binary representation.
+      `expr` should be a struct with key and summary value fields.
+      `lgNomEntries` (optional) is the log-base-2 of nominal entries, with 
nominal entries deciding
+      the number buckets or slots for the TupleSketch. Default is 12.
+      `summaryType` (optional) is the type of summary (double, integer, 
string). Default is double.
+      `mode` (optional) is the aggregation mode for numeric summaries (sum, 
min, max, alwaysone). Default is sum. """,
+  examples = """
+    Examples:
+      > SELECT tuple_sketch_estimate(_FUNC_(struct(col, 1.0D), 12, 'double', 
'sum')) FROM VALUES (1), (1), (2), (2), (3) tab(col);
+       3.0
+  """,
+  group = "agg_funcs",
+  since = "4.2.0")
+// scalastyle:on line.size.limit
+case class TupleSketchAgg(
+    child: Expression,
+    lgNomEntriesExpr: Option[Expression],
+    summaryTypeExpr: Expression,
+    modeExpr: Expression,
+    override val mutableAggBufferOffset: Int,
+    override val inputAggBufferOffset: Int)
+    extends TypedImperativeAggregate[TupleSketchState]
+    with TupleSketchAggregateBase
+    with QuaternaryLike[Expression]
+    with ExpectsInputTypes {
+
+  // Constructors
+
+  def this(child: Expression) = {
+    this(
+      child,
+      Some(Literal(ThetaSketchUtils.DEFAULT_LG_NOM_LONGS)),
+      Literal(ThetaSketchUtils.SUMMARY_TYPE_DOUBLE),
+      Literal(ThetaSketchUtils.MODE_SUM),
+      0,
+      0)
+  }
+
+  def this(child: Expression, lgNomEntriesExpr: Expression) = {
+    this(
+      child,
+      Some(lgNomEntriesExpr),
+      Literal(ThetaSketchUtils.SUMMARY_TYPE_DOUBLE),
+      Literal(ThetaSketchUtils.MODE_SUM),
+      0,
+      0)
+  }
+
+  def this(child: Expression, lgNomEntriesExpr: Expression, summaryTypeExpr: 
Expression) = {
+    this(child, Some(lgNomEntriesExpr), summaryTypeExpr, 
Literal(ThetaSketchUtils.MODE_SUM), 0, 0)
+  }
+
+  def this(
+      child: Expression,
+      lgNomEntriesExpr: Expression,
+      summaryTypeExpr: Expression,
+      modeExpr: Expression) = {
+    this(child, Some(lgNomEntriesExpr), summaryTypeExpr, modeExpr, 0, 0)
+  }
+
+  // Copy constructors required by ImperativeAggregate
+
+  override def withNewMutableAggBufferOffset(newMutableAggBufferOffset: Int): 
TupleSketchAgg =
+    copy(mutableAggBufferOffset = newMutableAggBufferOffset)
+
+  override def withNewInputAggBufferOffset(newInputAggBufferOffset: Int): 
TupleSketchAgg =
+    copy(inputAggBufferOffset = newInputAggBufferOffset)
+
+  override protected def withNewChildrenInternal(
+      newFirst: Expression,
+      newSecond: Expression,
+      newThird: Expression,
+      newFourth: Expression): TupleSketchAgg =
+    copy(
+      child = newFirst,
+      lgNomEntriesExpr = Some(newSecond),
+      summaryTypeExpr = newThird,
+      modeExpr = newFourth)
+
+  // Overrides for TypedImperativeAggregate
+
+  override def prettyName: String = "tuple_sketch_agg"
+
+  override def inputTypes: Seq[AbstractDataType] =
+    Seq(
+      StructType,
+      IntegerType,
+      StringTypeWithCollation(supportsTrimCollation = true),
+      StringTypeWithCollation(supportsTrimCollation = true))
+
+  override def dataType: DataType = BinaryType
+
+  override def nullable: Boolean = false
+
+  override def first: Expression = child
+  override def second: Expression =
+    lgNomEntriesExpr.getOrElse(Literal(ThetaSketchUtils.DEFAULT_LG_NOM_LONGS))
+  override def third: Expression = summaryTypeExpr
+  override def fourth: Expression = modeExpr
+
+  /**
+   * Extract and cache the key and summary value types from the input struct. 
Field 0 is the key
+   * type, Field 1 is the summary value type.
+   *
+   * Note: The asInstanceOf[StructType] cast is safe because inputTypes 
enforces that the first
+   * parameter must be StructType. This is validated during query analysis 
before execution.
+   */
+  private lazy val structType = child.dataType.asInstanceOf[StructType]
+  private lazy val keyType = structType.fields(0).dataType
+  private lazy val valueType = structType.fields(1).dataType
+
+  /**
+   * Factory for creating summary objects based on the input summary type and 
aggregation mode.
+   */
+  private lazy val summaryFactoryInput =
+    ThetaSketchUtils.getSummaryFactory(summaryTypeInput, modeInput)
+
+  /**
+   * Instantiate an UpdatableSketch instance using the lgNomEntries param and 
summary factory.
+   *
+   * @return
+   *   an UpdatableSketch instance wrapped with UpdatableTupleSketchBuffer
+   */
+  override def createAggregationBuffer(): TupleSketchState = {
+    val builder = new UpdatableSketchBuilder[Any, 
UpdatableSummary[Any]](summaryFactoryInput)
+    builder.setNominalEntries(1 << lgNomEntriesInput)
+    val sketch = builder.build()
+    UpdatableTupleSketchBuffer(sketch)
+  }
+
+  /**
+   * Evaluate the input row and update the UpdatableSketch instance with the 
row's key and summary
+   * value. The update function only supports a subset of Spark SQL types, and 
an exception will
+   * be thrown for unsupported types. Notes:
+   *   - Null values are ignored.
+   *   - Empty byte arrays are ignored
+   *   - Empty arrays of supported element types are ignored
+   *   - Strings that are collation-equal to the empty string are ignored.
+   *
+   * @param updateBuffer
+   *   A previously initialized UpdatableSketch instance
+   * @param input
+   *   An input row
+   */
+  override def update(updateBuffer: TupleSketchState, input: InternalRow): 
TupleSketchState = {
+    // Return early for null values.
+    val structValue = child.eval(input)
+    if (structValue == null) return updateBuffer
+
+    // Safe: child.eval() returns InternalRow when child.dataType is StructType
+    val struct = structValue.asInstanceOf[InternalRow]
+    val key = struct.get(0, this.keyType)
+    val summaryValue = struct.get(1, this.valueType)

Review Comment:
   Do we also know that the struct has the expected number and types of fields? 
Can we cover it with testing?



##########
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/tuplesketchesAggregates.scala:
##########
@@ -0,0 +1,843 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.catalyst.expressions.aggregate
+
+import org.apache.datasketches.tuple.{Intersection, Sketch, Summary, Union, 
UpdatableSketch, UpdatableSketchBuilder, UpdatableSummary}
+
+import org.apache.spark.SparkUnsupportedOperationException
+import org.apache.spark.sql.catalyst.InternalRow
+import org.apache.spark.sql.catalyst.expressions.{ExpectsInputTypes, 
Expression, ExpressionDescription, Literal}
+import 
org.apache.spark.sql.catalyst.expressions.aggregate.TypedImperativeAggregate
+import org.apache.spark.sql.catalyst.trees.{QuaternaryLike, TernaryLike}
+import org.apache.spark.sql.catalyst.util.{ArrayData, CollationFactory, 
ThetaSketchUtils}
+import org.apache.spark.sql.errors.QueryExecutionErrors
+import org.apache.spark.sql.internal.types.StringTypeWithCollation
+import org.apache.spark.sql.types.{AbstractDataType, ArrayType, BinaryType, 
DataType, DoubleType, FloatType, IntegerType, LongType, StringType, StructType}
+import org.apache.spark.unsafe.types.UTF8String
+
+sealed trait TupleSketchState {
+  def serialize(): Array[Byte]
+  def eval(): Array[Byte]
+}
+case class UpdatableTupleSketchBuffer[U, S <: UpdatableSummary[U]](sketch: 
UpdatableSketch[U, S])
+    extends TupleSketchState {
+  override def serialize(): Array[Byte] = sketch.compact.toByteArray
+  override def eval(): Array[Byte] = sketch.compact.toByteArray
+}
+case class UnionTupleAggregationBuffer[S <: Summary](union: Union[S]) extends 
TupleSketchState {
+  override def serialize(): Array[Byte] = union.getResult.toByteArray
+  override def eval(): Array[Byte] = union.getResult.toByteArray
+}
+case class IntersectionTupleAggregationBuffer[S <: Summary](intersection: 
Intersection[S])
+    extends TupleSketchState {
+  override def serialize(): Array[Byte] = intersection.getResult.toByteArray
+  override def eval(): Array[Byte] = intersection.getResult.toByteArray
+}
+case class FinalizedTupleSketch[S <: Summary](sketch: Sketch[S]) extends 
TupleSketchState {
+  override def serialize(): Array[Byte] = sketch.toByteArray
+  override def eval(): Array[Byte] = sketch.toByteArray
+}
+
+/**
+ * The TupleSketchAgg function utilizes a Datasketches TupleSketch instance to 
count a
+ * probabilistic approximation of the number of unique values in a given 
column with associated
+ * summary values, and outputs the binary representation of the TupleSketch.
+ *
+ * See [[https://datasketches.apache.org/docs/Tuple/TupleSketches.html]] for 
more information.
+ *
+ * @param child
+ *   child expression (struct with key and summary value) against which unique 
counting will occur
+ * @param lgNomEntriesExpr
+ *   the log-base-2 of nomEntries decides the number of buckets for the sketch
+ * @param summaryType
+ *   the type of summary (double, integer, string)
+ * @param mode
+ *   the aggregation mode for numeric summaries (sum, min, max, alwaysone)
+ * @param mutableAggBufferOffset
+ *   offset for mutable aggregation buffer
+ * @param inputAggBufferOffset
+ *   offset for input aggregation buffer
+ */
+// scalastyle:off line.size.limit
+@ExpressionDescription(
+  usage = """
+    _FUNC_(expr[, lgNomEntries[, summaryType[, mode]]]) - Returns the 
TupleSketch compact binary representation.
+      `expr` should be a struct with key and summary value fields.
+      `lgNomEntries` (optional) is the log-base-2 of nominal entries, with 
nominal entries deciding
+      the number buckets or slots for the TupleSketch. Default is 12.
+      `summaryType` (optional) is the type of summary (double, integer, 
string). Default is double.
+      `mode` (optional) is the aggregation mode for numeric summaries (sum, 
min, max, alwaysone). Default is sum. """,
+  examples = """
+    Examples:
+      > SELECT tuple_sketch_estimate(_FUNC_(struct(col, 1.0D), 12, 'double', 
'sum')) FROM VALUES (1), (1), (2), (2), (3) tab(col);
+       3.0
+  """,
+  group = "agg_funcs",
+  since = "4.2.0")
+// scalastyle:on line.size.limit
+case class TupleSketchAgg(
+    child: Expression,
+    lgNomEntriesExpr: Option[Expression],
+    summaryTypeExpr: Expression,
+    modeExpr: Expression,
+    override val mutableAggBufferOffset: Int,
+    override val inputAggBufferOffset: Int)
+    extends TypedImperativeAggregate[TupleSketchState]
+    with TupleSketchAggregateBase
+    with QuaternaryLike[Expression]
+    with ExpectsInputTypes {
+
+  // Constructors
+
+  def this(child: Expression) = {
+    this(
+      child,
+      Some(Literal(ThetaSketchUtils.DEFAULT_LG_NOM_LONGS)),
+      Literal(ThetaSketchUtils.SUMMARY_TYPE_DOUBLE),
+      Literal(ThetaSketchUtils.MODE_SUM),
+      0,
+      0)
+  }
+
+  def this(child: Expression, lgNomEntriesExpr: Expression) = {
+    this(
+      child,
+      Some(lgNomEntriesExpr),
+      Literal(ThetaSketchUtils.SUMMARY_TYPE_DOUBLE),
+      Literal(ThetaSketchUtils.MODE_SUM),
+      0,
+      0)
+  }
+
+  def this(child: Expression, lgNomEntriesExpr: Expression, summaryTypeExpr: 
Expression) = {
+    this(child, Some(lgNomEntriesExpr), summaryTypeExpr, 
Literal(ThetaSketchUtils.MODE_SUM), 0, 0)
+  }
+
+  def this(
+      child: Expression,
+      lgNomEntriesExpr: Expression,
+      summaryTypeExpr: Expression,
+      modeExpr: Expression) = {
+    this(child, Some(lgNomEntriesExpr), summaryTypeExpr, modeExpr, 0, 0)
+  }
+
+  // Copy constructors required by ImperativeAggregate
+
+  override def withNewMutableAggBufferOffset(newMutableAggBufferOffset: Int): 
TupleSketchAgg =
+    copy(mutableAggBufferOffset = newMutableAggBufferOffset)
+
+  override def withNewInputAggBufferOffset(newInputAggBufferOffset: Int): 
TupleSketchAgg =
+    copy(inputAggBufferOffset = newInputAggBufferOffset)
+
+  override protected def withNewChildrenInternal(
+      newFirst: Expression,
+      newSecond: Expression,
+      newThird: Expression,
+      newFourth: Expression): TupleSketchAgg =
+    copy(
+      child = newFirst,
+      lgNomEntriesExpr = Some(newSecond),
+      summaryTypeExpr = newThird,
+      modeExpr = newFourth)
+
+  // Overrides for TypedImperativeAggregate
+
+  override def prettyName: String = "tuple_sketch_agg"
+
+  override def inputTypes: Seq[AbstractDataType] =
+    Seq(
+      StructType,
+      IntegerType,
+      StringTypeWithCollation(supportsTrimCollation = true),
+      StringTypeWithCollation(supportsTrimCollation = true))
+
+  override def dataType: DataType = BinaryType
+
+  override def nullable: Boolean = false
+
+  override def first: Expression = child
+  override def second: Expression =
+    lgNomEntriesExpr.getOrElse(Literal(ThetaSketchUtils.DEFAULT_LG_NOM_LONGS))
+  override def third: Expression = summaryTypeExpr
+  override def fourth: Expression = modeExpr
+
+  /**
+   * Extract and cache the key and summary value types from the input struct. 
Field 0 is the key
+   * type, Field 1 is the summary value type.
+   *
+   * Note: The asInstanceOf[StructType] cast is safe because inputTypes 
enforces that the first
+   * parameter must be StructType. This is validated during query analysis 
before execution.
+   */
+  private lazy val structType = child.dataType.asInstanceOf[StructType]
+  private lazy val keyType = structType.fields(0).dataType
+  private lazy val valueType = structType.fields(1).dataType
+
+  /**
+   * Factory for creating summary objects based on the input summary type and 
aggregation mode.
+   */
+  private lazy val summaryFactoryInput =
+    ThetaSketchUtils.getSummaryFactory(summaryTypeInput, modeInput)
+
+  /**
+   * Instantiate an UpdatableSketch instance using the lgNomEntries param and 
summary factory.
+   *
+   * @return
+   *   an UpdatableSketch instance wrapped with UpdatableTupleSketchBuffer
+   */
+  override def createAggregationBuffer(): TupleSketchState = {
+    val builder = new UpdatableSketchBuilder[Any, 
UpdatableSummary[Any]](summaryFactoryInput)
+    builder.setNominalEntries(1 << lgNomEntriesInput)
+    val sketch = builder.build()
+    UpdatableTupleSketchBuffer(sketch)
+  }
+
+  /**
+   * Evaluate the input row and update the UpdatableSketch instance with the 
row's key and summary
+   * value. The update function only supports a subset of Spark SQL types, and 
an exception will
+   * be thrown for unsupported types. Notes:
+   *   - Null values are ignored.
+   *   - Empty byte arrays are ignored
+   *   - Empty arrays of supported element types are ignored
+   *   - Strings that are collation-equal to the empty string are ignored.
+   *
+   * @param updateBuffer
+   *   A previously initialized UpdatableSketch instance
+   * @param input
+   *   An input row
+   */
+  override def update(updateBuffer: TupleSketchState, input: InternalRow): 
TupleSketchState = {
+    // Return early for null values.
+    val structValue = child.eval(input)
+    if (structValue == null) return updateBuffer
+
+    // Safe: child.eval() returns InternalRow when child.dataType is StructType

Review Comment:
   Do we enforce this with `checkInputDataTypes` or somewhere else?



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]


Reply via email to