attilapiros commented on code in PR #41746:
URL: https://github.com/apache/spark/pull/41746#discussion_r1264265464


##########
core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedSchedulerBackend.scala:
##########
@@ -129,6 +129,9 @@ class CoarseGrainedSchedulerBackend(scheduler: 
TaskSchedulerImpl, val rpcEnv: Rp
   // The token manager used to create security tokens.
   private var delegationTokenManager: Option[HadoopDelegationTokenManager] = 
None
 
+  // Current log level of driver to send to executor
+  private var logLevel: Option[String] = None

Review Comment:
   ```suggestion
     @volatile private var logLevel: Option[String] = None
   ```



##########
core/src/main/scala/org/apache/spark/executor/CoarseGrainedExecutorBackend.scala:
##########
@@ -558,6 +566,11 @@ private[spark] object CoarseGrainedExecutorBackend extends 
Logging {
       resourcesFileOpt, resourceProfileId)
   }
 
+  private def updateLogLevel(newLogLevel: String): Unit = {
+    if (newLogLevel != Utils.getLogLevel) {
+      Utils.setLogLevel(Level.toLevel(newLogLevel))
+    }
+  }

Review Comment:
   rename to `setLogLevelIfNeeded` and move it to `Utils` you can then use it 
in `SparkContext` too



##########
core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedSchedulerBackend.scala:
##########
@@ -342,10 +355,11 @@ class CoarseGrainedSchedulerBackend(scheduler: 
TaskSchedulerImpl, val rpcEnv: Rp
       case RetrieveSparkAppConfig(resourceProfileId) =>
         val rp = 
scheduler.sc.resourceProfileManager.resourceProfileFromId(resourceProfileId)
         val reply = SparkAppConfig(
-          sparkProperties,
+          getLatestSparkConfig,

Review Comment:
   revert to:
   ```suggestion
             sparkProperties,
   ```



##########
core/src/main/scala/org/apache/spark/executor/CoarseGrainedExecutorBackend.scala:
##########
@@ -473,6 +476,11 @@ private[spark] object CoarseGrainedExecutorBackend extends 
Logging {
       }
 
       driverConf.set(EXECUTOR_ID, arguments.executorId)
+
+      if (driverConf.get(EXECUTOR_ALLOW_SYNC_LOG_LEVEL)) {
+        // Set the current log level of driver in Executor
+        cfg.logLevel.foreach(l => updateLogLevel(l))
+      }

Review Comment:
   ```suggestion
         cfg.logLevel.foreach(l => updateLogLevel(l))
   ```
   I think the check is not needed as when `EXECUTOR_ALLOW_SYNC_LOG_LEVEL` is 
false then the `cfg.logLevel` will be `None`.



##########
core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedSchedulerBackend.scala:
##########
@@ -354,6 +368,16 @@ class CoarseGrainedSchedulerBackend(scheduler: 
TaskSchedulerImpl, val rpcEnv: Rp
         logError(s"Received unexpected ask ${e}")
     }
 
+    private def getLatestSparkConfig: Seq[(String, String)] = {
+      val conf = scheduler.sc.conf
+      if (conf.get(EXECUTOR_ALLOW_SYNC_LOG_LEVEL)) {
+        conf.get(SPARK_LOG_LEVEL).map(sparkProperties :+ (SPARK_LOG_LEVEL.key, 
_))
+          .getOrElse(sparkProperties)
+      } else {
+        sparkProperties
+      }
+    }
+

Review Comment:
   revert  / remove this:
    
   ```suggestion
   ```



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to