anishshri-db commented on code in PR #48401:
URL: https://github.com/apache/spark/pull/48401#discussion_r1833569812
##########
sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/StateStoreColumnFamilySchemaUtils.scala:
##########
@@ -17,35 +17,122 @@
package org.apache.spark.sql.execution.streaming
import org.apache.spark.sql.Encoder
+import org.apache.spark.sql.avro.{AvroDeserializer, AvroOptions,
AvroSerializer, SchemaConverters}
import org.apache.spark.sql.catalyst.encoders.ExpressionEncoder
import
org.apache.spark.sql.execution.streaming.TransformWithStateKeyValueRowSchemaUtils._
-import
org.apache.spark.sql.execution.streaming.state.{NoPrefixKeyStateEncoderSpec,
PrefixKeyScanStateEncoderSpec, StateStoreColFamilySchema}
-import org.apache.spark.sql.types.StructType
+import org.apache.spark.sql.execution.streaming.state.{AvroEncoderSpec,
NoPrefixKeyStateEncoderSpec, PrefixKeyScanStateEncoderSpec,
RangeKeyScanStateEncoderSpec, StateStoreColFamilySchema}
+import org.apache.spark.sql.types.{BinaryType, BooleanType, ByteType,
DataType, DoubleType, FloatType, IntegerType, LongType, NullType, ShortType,
StructField, StructType}
object StateStoreColumnFamilySchemaUtils {
+ def apply(initializeAvroSerde: Boolean): StateStoreColumnFamilySchemaUtils =
+ new StateStoreColumnFamilySchemaUtils(initializeAvroSerde)
+
+
+ /**
+ * Avro uses zig-zag encoding for some fixed-length types, like Longs and
Ints. For range scans
+ * we want to use big-endian encoding, so we need to convert the source
schema to replace these
+ * types with BinaryType.
+ *
+ * @param schema The schema to convert
+ * @param ordinals If non-empty, only convert fields at these ordinals.
+ * If empty, convert all fields.
+ */
+ def convertForRangeScan(schema: StructType, ordinals: Seq[Int] = Seq.empty):
StructType = {
+ val ordinalSet = ordinals.toSet
+ StructType(schema.fields.zipWithIndex.map { case (field, idx) =>
+ if ((ordinals.isEmpty || ordinalSet.contains(idx)) &&
isFixedSize(field.dataType)) {
+ // Convert numeric types to BinaryType while preserving nullability
+ field.copy(dataType = BinaryType)
+ } else {
+ field
+ }
+ })
+ }
+
+ private def isFixedSize(dataType: DataType): Boolean = dataType match {
+ case _: ByteType | _: BooleanType | _: ShortType | _: IntegerType | _:
LongType |
+ _: FloatType | _: DoubleType => true
+ case _ => false
+ }
+
+ def getTtlColFamilyName(stateName: String): String = {
+ "$ttl_" + stateName
+ }
+}
+
+/**
+ *
+ * @param initializeAvroSerde Whether or not to create the Avro serializers
and deserializers
+ * for this state type. This class is used to
create the
+ * StateStoreColumnFamilySchema for each state
variable from the driver
+ */
+class StateStoreColumnFamilySchemaUtils(initializeAvroSerde: Boolean) {
+
+ /**
+ * If initializeAvroSerde is true, this method will create an Avro
Serializer and Deserializer
+ * for a particular key and value schema.
+ */
+ private def getAvroSerde(
+ keySchema: StructType,
+ valSchema: StructType,
+ suffixKeySchema: Option[StructType] = None
+ ): Option[AvroEncoderSpec] = {
+ if (initializeAvroSerde) {
+ val avroType = SchemaConverters.toAvroType(valSchema)
+ val avroOptions = AvroOptions(Map.empty)
+ val keyAvroType = SchemaConverters.toAvroType(keySchema)
+ val keySer = new AvroSerializer(keySchema, keyAvroType, nullable = false)
Review Comment:
maybe just create helper functions for these ?
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]