beliefer commented on a change in pull request #27730: 
[SPARK-30841][SQL][DOC][FOLLOW-UP] Add version information to the configuration 
of SQL
URL: https://github.com/apache/spark/pull/27730#discussion_r385543548
 
 

 ##########
 File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala
 ##########
 @@ -1130,31 +1185,36 @@ object SQLConf {
       .internal()
       .doc("Minimum number of state store delta files that needs to be 
generated before they " +
         "consolidated into snapshots.")
+      .version("2.0.0")
       .intConf
       .createWithDefault(10)
 
   val FLATMAPGROUPSWITHSTATE_STATE_FORMAT_VERSION =
     buildConf("spark.sql.streaming.flatMapGroupsWithState.stateFormatVersion")
       .internal()
       .doc("State format version used by flatMapGroupsWithState operation in a 
streaming query")
+      .version("2.4.0")
       .intConf
       .checkValue(v => Set(1, 2).contains(v), "Valid versions are 1 and 2")
       .createWithDefault(2)
 
   val CHECKPOINT_LOCATION = buildConf("spark.sql.streaming.checkpointLocation")
     .doc("The default location for storing checkpoint data for streaming 
queries.")
+    .version("2.0.0")
     .stringConf
     .createOptional
 
   val FORCE_DELETE_TEMP_CHECKPOINT_LOCATION =
     buildConf("spark.sql.streaming.forceDeleteTempCheckpointLocation")
       .doc("When true, enable temporary checkpoint locations force delete.")
+      .version("3.0.0")
 
 Review comment:
   SPARK-30812, commit ID: 
b76bc0b1b8b2abd00a84f805af90ca4c5925faaa#diff-9a6b543db706f1a90f790783d6930a13

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
[email protected]


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to