sahnib commented on code in PR #45674: URL: https://github.com/apache/spark/pull/45674#discussion_r1543444350
########## sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/StateTypesEncoderUtils.scala: ########## @@ -23,11 +23,19 @@ import org.apache.spark.sql.catalyst.encoders.ExpressionEncoder.Serializer import org.apache.spark.sql.catalyst.encoders.encoderFor import org.apache.spark.sql.catalyst.expressions.{UnsafeProjection, UnsafeRow} import org.apache.spark.sql.execution.streaming.state.StateStoreErrors -import org.apache.spark.sql.types.{BinaryType, StructType} +import org.apache.spark.sql.types.{BinaryType, LongType, StructType} object StateKeyValueRowSchema { val KEY_ROW_SCHEMA: StructType = new StructType().add("key", BinaryType) - val VALUE_ROW_SCHEMA: StructType = new StructType().add("value", BinaryType) + val VALUE_ROW_SCHEMA: StructType = new StructType() + .add("value", BinaryType) + .add("ttlExpirationMs", LongType) Review Comment: Removed logging for ttlExpiration if ttl is disabled. In order to support this functionality, I have split ValueState into ValueStateImpl and ValueStateImplWithTTL - to prevent un-necessary ttlMode checks for each function. ########## sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/StateTypesEncoderUtils.scala: ########## @@ -62,33 +70,79 @@ class StateTypesEncoder[GK, V]( private val rowToObjDeserializer = valExpressionEnc.resolveAndBind().createDeserializer() private val reusedValRow = new UnsafeRow(valEncoder.schema.fields.length) + private val NO_TTL_ENCODED_VALUE: Long = -1L Review Comment: Done. -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For queries about this service, please contact Infrastructure at: us...@infra.apache.org --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org