beliefer commented on a change in pull request #27730:
[SPARK-30841][SQL][DOC][FOLLOW-UP] Add version information to the configuration
of SQL
URL: https://github.com/apache/spark/pull/27730#discussion_r385537797
##########
File path:
sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala
##########
@@ -803,47 +814,55 @@ object SQLConf {
"scanned are partition columns and the query has an aggregate operator
that satisfies " +
"distinct semantics. By default the optimization is disabled, since it
may return " +
"incorrect results when the files are empty.")
+ .version("2.1.1")
.booleanConf
.createWithDefault(false)
val COLUMN_NAME_OF_CORRUPT_RECORD =
buildConf("spark.sql.columnNameOfCorruptRecord")
.doc("The name of internal column for storing raw/un-parsed JSON and CSV
records that fail " +
"to parse.")
+ .version("1.2.0")
.stringConf
.createWithDefault("_corrupt_record")
val BROADCAST_TIMEOUT = buildConf("spark.sql.broadcastTimeout")
.doc("Timeout in seconds for the broadcast wait time in broadcast joins.")
+ .version("1.3.0")
.timeConf(TimeUnit.SECONDS)
.createWithDefaultString(s"${5 * 60}")
// This is only used for the thriftserver
val THRIFTSERVER_POOL = buildConf("spark.sql.thriftserver.scheduler.pool")
.doc("Set a Fair Scheduler pool for a JDBC client session.")
+ .version("1.1.1")
.stringConf
.createOptional
val THRIFTSERVER_INCREMENTAL_COLLECT =
buildConf("spark.sql.thriftServer.incrementalCollect")
.internal()
.doc("When true, enable incremental collection for execution in Thrift
Server.")
+ .version("2.0.3")
Review comment:
SPARK-18857, commit ID:
c94288b57b5ce2232e58e35cada558d8d5b8ec6e#diff-32bb9518401c0948c5ea19377b5069ab
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
With regards,
Apache Git Services
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]