holdenk commented on a change in pull request #28874:
URL: https://github.com/apache/spark/pull/28874#discussion_r445082363
##########
File path: core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala
##########
@@ -1256,19 +1256,19 @@ class SparkSubmitSuite
val jars = conf.get("spark.yarn.dist.jars").split(",").toSet
- def isSchemeBlacklisted(scheme: String) = {
- blacklistSchemes.contains("*") || blacklistSchemes.contains(scheme)
+ def isSchemeForcedDownload(scheme: String) = {
Review comment:
So this seems like a not allowed type situation more than a forced
unless I'm misreading the code.
##########
File path:
sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQueryFileTest.scala
##########
@@ -45,25 +45,34 @@ abstract class HiveQueryFileTest extends HiveComparisonTest
{
runOnlyDirectories.nonEmpty ||
skipDirectories.nonEmpty
- val whiteListProperty: String = "spark.hive.whitelist"
- // Allow the whiteList to be overridden by a system property
- val realWhiteList: Seq[String] =
-
Option(System.getProperty(whiteListProperty)).map(_.split(",").toSeq).getOrElse(whiteList)
+ val deprecatedIncludeListProperty: String = "spark.hive.whitelist"
+ val includeListProperty: String = "spark.hive.includelist"
Review comment:
I think leaving it as deprecated for at least one release cycle is good,
makes upgrading easier.
##########
File path:
core/src/main/scala/org/apache/spark/deploy/history/FsHistoryProvider.scala
##########
@@ -181,23 +181,23 @@ private[history] class FsHistoryProvider(conf: SparkConf,
clock: Clock)
processing.remove(path.getName)
}
- private val blacklist = new ConcurrentHashMap[String, Long]
+ private val ignoreList = new ConcurrentHashMap[String, Long]
// Visible for testing
- private[history] def isBlacklisted(path: Path): Boolean = {
- blacklist.containsKey(path.getName)
+ private[history] def isIgnored(path: Path): Boolean = {
+ ignoreList.containsKey(path.getName)
}
- private def blacklist(path: Path): Unit = {
- blacklist.put(path.getName, clock.getTimeMillis())
+ private def ignore(path: Path): Unit = {
+ ignoreList.put(path.getName, clock.getTimeMillis())
}
/**
- * Removes expired entries in the blacklist, according to the provided
`expireTimeInSeconds`.
+ * Removes expired entries in the ignoreList, according to the provided
`expireTimeInSeconds`.
*/
- private def clearBlacklist(expireTimeInSeconds: Long): Unit = {
+ private def clearIgnoreList(expireTimeInSeconds: Long): Unit = {
val expiredThreshold = clock.getTimeMillis() - expireTimeInSeconds * 1000
- blacklist.asScala.retain((_, creationTime) => creationTime >=
expiredThreshold)
+ ignoreList.asScala.retain((_, creationTime) => creationTime >=
expiredThreshold)
Review comment:
I like the suggestion form 32037 of `unhealthy` for these kinds of
dynamic lists where we are treating some nodes as not being in a good state.
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]