beliefer commented on a change in pull request #27847: [SPARK-31002][CORE][DOC]
Add version information to the configuration of Core
URL: https://github.com/apache/spark/pull/27847#discussion_r389250439
##########
File path: core/src/main/scala/org/apache/spark/internal/config/package.scala
##########
@@ -320,55 +385,70 @@ package object config {
"This prevents Spark from memory mapping very small blocks. " +
"In general, memory mapping has high overhead for blocks close to or
below " +
"the page size of the operating system.")
+ .version("0.9.2")
.bytesConf(ByteUnit.BYTE)
.createWithDefaultString("2m")
private[spark] val STORAGE_REPLICATION_POLICY =
ConfigBuilder("spark.storage.replication.policy")
+ .version("2.1.0")
.stringConf
.createWithDefaultString(classOf[RandomBlockReplicationPolicy].getName)
private[spark] val STORAGE_REPLICATION_TOPOLOGY_MAPPER =
ConfigBuilder("spark.storage.replication.topologyMapper")
+ .version("2.1.0")
.stringConf
.createWithDefaultString(classOf[DefaultTopologyMapper].getName)
private[spark] val STORAGE_CACHED_PEERS_TTL =
ConfigBuilder("spark.storage.cachedPeersTtl")
- .intConf.createWithDefault(60 * 1000)
+ .version("1.1.1")
+ .intConf
+ .createWithDefault(60 * 1000)
private[spark] val STORAGE_MAX_REPLICATION_FAILURE =
ConfigBuilder("spark.storage.maxReplicationFailures")
- .intConf.createWithDefault(1)
+ .version("1.1.1")
+ .intConf
+ .createWithDefault(1)
private[spark] val STORAGE_REPLICATION_TOPOLOGY_FILE =
-
ConfigBuilder("spark.storage.replication.topologyFile").stringConf.createOptional
+ ConfigBuilder("spark.storage.replication.topologyFile")
+ .version("2.1.0")
+ .stringConf
+ .createOptional
private[spark] val STORAGE_EXCEPTION_PIN_LEAK =
ConfigBuilder("spark.storage.exceptionOnPinLeak")
+ .version("1.6.2")
.booleanConf
.createWithDefault(false)
private[spark] val STORAGE_BLOCKMANAGER_TIMEOUTINTERVAL =
ConfigBuilder("spark.storage.blockManagerTimeoutIntervalMs")
+ .version("0.7.3")
.timeConf(TimeUnit.MILLISECONDS)
.createWithDefaultString("60s")
private[spark] val STORAGE_BLOCKMANAGER_SLAVE_TIMEOUT =
ConfigBuilder("spark.storage.blockManagerSlaveTimeoutMs")
+ .version("0.7.0")
Review comment:
No JIRA ID, commit ID:
97434f49b8c029e9b78c91ec5f58557cd1b5c943#diff-2ce6374aac24d70c69182b067216e684
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
With regards,
Apache Git Services
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]