mridulm commented on code in PR #41746:
URL: https://github.com/apache/spark/pull/41746#discussion_r1249162764
##########
core/src/main/scala/org/apache/spark/SparkContext.scala:
##########
@@ -383,20 +383,22 @@ class SparkContext(config: SparkConf) extends Logging {
def setLogLevel(logLevel: String): Unit = {
// let's allow lowercase or mixed case too
val upperCased = logLevel.toUpperCase(Locale.ROOT)
- require(SparkContext.VALID_LOG_LEVELS.contains(upperCased),
+ require(
+ SparkContext.VALID_LOG_LEVELS.contains(upperCased),
s"Supplied level $logLevel did not match one of:" +
s" ${SparkContext.VALID_LOG_LEVELS.mkString(",")}")
- Utils.setLogLevel(Level.toLevel(upperCased))
+ // Update only if new log level is not same as current log level
+ if (upperCased != Utils.getLogLevel) {
+ Utils.setLogLevel(Level.toLevel(upperCased))
+ // Inform all executors about the change
+ if (config.get(EXECUTOR_ALLOW_SYNC_LOG_LEVEL)) {
+ _schedulerBackend.refresh(immutable.Map("logLevel" -> upperCased))
+ }
+ }
}
try {
_conf = config.clone()
- _conf.get(SPARK_LOG_LEVEL).foreach { level =>
- if (Logging.setLogLevelPrinted) {
- System.err.printf("Setting Spark log level to \"%s\".\n", level)
- }
- setLogLevel(level)
- }
Review Comment:
To clarify, I did not mean this specific message - but all the messages
which would not get printed out due to log level not changing (until later).
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]