This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new c5cc4ee9ced3 [SPARK-54547][CORE] Rename `hostPort` variable to `host` 
in `TaskSchedulerImpl.(executorLost|logExecutorLoss)` methods
c5cc4ee9ced3 is described below

commit c5cc4ee9ced32c5084ab0058c2ca7a1af642cc8f
Author: Shilong Duan <[email protected]>
AuthorDate: Fri Nov 28 14:14:48 2025 -0800

    [SPARK-54547][CORE] Rename `hostPort` variable to `host` in 
`TaskSchedulerImpl.(executorLost|logExecutorLoss)` methods
    
    ### What changes were proposed in this pull request?
    
    This PR renamed “hostPort” to “host” in TaskSchedulerImpl.
    
    ### Why are the changes needed?
    
    Improve code clarity and prevent potential misuse.
    
    ### Does this PR introduce _any_ user-facing change?
    
    No
    
    ### How was this patch tested?
    
    N/A
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    No
    
    Closes #53258 from Xtpacz/use-host-instead-of-hostPort.
    
    Authored-by: Shilong Duan <[email protected]>
    Signed-off-by: Dongjoon Hyun <[email protected]>
---
 .../org/apache/spark/scheduler/TaskSchedulerImpl.scala | 18 +++++++++---------
 1 file changed, 9 insertions(+), 9 deletions(-)

diff --git 
a/core/src/main/scala/org/apache/spark/scheduler/TaskSchedulerImpl.scala 
b/core/src/main/scala/org/apache/spark/scheduler/TaskSchedulerImpl.scala
index 1351d8c778b5..66f9a907158b 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/TaskSchedulerImpl.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/TaskSchedulerImpl.scala
@@ -984,17 +984,17 @@ private[spark] class TaskSchedulerImpl(
 
     synchronized {
       if (executorIdToRunningTaskIds.contains(executorId)) {
-        val hostPort = executorIdToHost(executorId)
-        logExecutorLoss(executorId, hostPort, reason)
+        val host = executorIdToHost(executorId)
+        logExecutorLoss(executorId, host, reason)
         removeExecutor(executorId, reason)
         failedExecutor = Some(executorId)
       } else {
         executorIdToHost.get(executorId) match {
-          case Some(hostPort) =>
+          case Some(host) =>
             // If the host mapping still exists, it means we don't know the 
loss reason for the
             // executor. So call removeExecutor() to update tasks running on 
that executor when
             // the real loss reason is finally known.
-            logExecutorLoss(executorId, hostPort, reason)
+            logExecutorLoss(executorId, host, reason)
             removeExecutor(executorId, reason)
 
           case None =>
@@ -1022,20 +1022,20 @@ private[spark] class TaskSchedulerImpl(
 
   private def logExecutorLoss(
       executorId: String,
-      hostPort: String,
+      host: String,
       reason: ExecutorLossReason): Unit = reason match {
     case LossReasonPending =>
-      logDebug(s"Executor $executorId on $hostPort lost, but reason not yet 
known.")
+      logDebug(s"Executor $executorId on $host lost, but reason not yet 
known.")
     case ExecutorKilled =>
       logInfo(log"Executor ${MDC(LogKeys.EXECUTOR_ID, executorId)} on " +
-        log"${MDC(LogKeys.HOST_PORT, hostPort)} killed by driver.")
+        log"${MDC(LogKeys.HOST_PORT, host)} killed by driver.")
     case _: ExecutorDecommission =>
       logInfo(log"Executor ${MDC(LogKeys.EXECUTOR_ID, executorId)} on " +
-        log"${MDC(LogKeys.HOST_PORT, hostPort)} is decommissioned" +
+        log"${MDC(LogKeys.HOST_PORT, host)} is decommissioned" +
         log"${MDC(DURATION, getDecommissionDuration(executorId))}.")
     case _ =>
       logError(log"Lost executor ${MDC(LogKeys.EXECUTOR_ID, executorId)} on " +
-        log"${MDC(LogKeys.HOST, hostPort)}: ${MDC(LogKeys.REASON, reason)}")
+        log"${MDC(LogKeys.HOST, host)}: ${MDC(LogKeys.REASON, reason)}")
   }
 
   // return decommission duration in string or "" if decommission startTime 
not exists


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to