Github user andrewor14 commented on a diff in the pull request:

    https://github.com/apache/spark/pull/1056#discussion_r14865059
  
    --- Diff: 
core/src/main/scala/org/apache/spark/ui/jobs/JobProgressListener.scala ---
    @@ -197,33 +189,71 @@ class JobProgressListener(conf: SparkConf) extends 
SparkListener {
                 (Some(e.toErrorString), None)
             }
     
    -      stageIdToTime.getOrElseUpdate(sid, 0L)
    -      val time = metrics.map(_.executorRunTime).getOrElse(0L)
    -      stageIdToTime(sid) += time
    -      totalTime += time
    -
    -      stageIdToShuffleRead.getOrElseUpdate(sid, 0L)
    -      val shuffleRead = 
metrics.flatMap(_.shuffleReadMetrics).map(_.remoteBytesRead).getOrElse(0L)
    -      stageIdToShuffleRead(sid) += shuffleRead
    -      totalShuffleRead += shuffleRead
    +      if (!metrics.isEmpty) {
    +        val oldMetrics = 
stageIdToTaskData.get(sid).flatMap(_.get(info.taskId)).flatMap(
    +          _.taskMetrics)
    +        updateAggregateMetrics(sid, info.executorId, metrics.get, 
oldMetrics)
    +      }
     
    -      stageIdToShuffleWrite.getOrElseUpdate(sid, 0L)
    -      val shuffleWrite =
    -        
metrics.flatMap(_.shuffleWriteMetrics).map(_.shuffleBytesWritten).getOrElse(0L)
    -      stageIdToShuffleWrite(sid) += shuffleWrite
    -      totalShuffleWrite += shuffleWrite
    +      val taskMap = stageIdToTaskData.getOrElseUpdate(sid, HashMap[Long, 
TaskUIData]())
    +      taskMap(info.taskId) = new TaskUIData(info, metrics, errorMessage)
    +    }
    +  }
     
    -      stageIdToMemoryBytesSpilled.getOrElseUpdate(sid, 0L)
    -      val memoryBytesSpilled = 
metrics.map(_.memoryBytesSpilled).getOrElse(0L)
    -      stageIdToMemoryBytesSpilled(sid) += memoryBytesSpilled
    +  def updateAggregateMetrics(sid: Int, execId: String, taskMetrics: 
TaskMetrics,
    +      oldMetrics: Option[TaskMetrics]) {
    +    val executorSummaryMap = 
stageIdToExecutorSummaries.getOrElseUpdate(key = sid,
    +      op = new HashMap[String, ExecutorSummary]())
    +    val execSummary = executorSummaryMap.getOrElseUpdate(key = execId, op 
= new ExecutorSummary)
    +
    +    stageIdToShuffleWrite.getOrElseUpdate(sid, 0L)
    +    val shuffleWriteDelta =
    +      
(taskMetrics.shuffleWriteMetrics.map(_.shuffleBytesWritten).getOrElse(0L)
    +      - 
oldMetrics.flatMap(_.shuffleWriteMetrics).map(_.shuffleBytesWritten).getOrElse(0L))
    +    stageIdToShuffleWrite(sid) += shuffleWriteDelta
    +    execSummary.shuffleWrite += shuffleWriteDelta
    +    totalShuffleWrite += shuffleWriteDelta
    +
    +    stageIdToShuffleRead.getOrElseUpdate(sid, 0L)
    +    val shuffleReadDelta =
    +      (taskMetrics.shuffleReadMetrics.map(_.remoteBytesRead).getOrElse(0L)
    +      - 
oldMetrics.flatMap(_.shuffleReadMetrics).map(_.remoteBytesRead).getOrElse(0L))
    +    stageIdToShuffleRead(sid) += shuffleReadDelta
    +    execSummary.shuffleRead += shuffleReadDelta
    +    totalShuffleRead += shuffleReadDelta
    +
    +    stageIdToDiskBytesSpilled.getOrElseUpdate(sid, 0L)
    +    val diskSpillDelta =
    +      taskMetrics.diskBytesSpilled - 
oldMetrics.map(_.diskBytesSpilled).getOrElse(0L)
    +    stageIdToDiskBytesSpilled(sid) += diskSpillDelta
    +    execSummary.diskBytesSpilled += diskSpillDelta
    +
    +    stageIdToMemoryBytesSpilled.getOrElseUpdate(sid, 0L)
    +    val memorySpillDelta =
    +      taskMetrics.memoryBytesSpilled - 
oldMetrics.map(_.memoryBytesSpilled).getOrElse(0L)
    +    stageIdToMemoryBytesSpilled(sid) += memorySpillDelta
    +    execSummary.memoryBytesSpilled += memorySpillDelta
    +
    +    stageIdToTime.getOrElseUpdate(sid, 0L)
    +    val timeDelta =
    +      taskMetrics.executorRunTime - 
oldMetrics.map(_.executorRunTime).getOrElse(0L)
    +    stageIdToTime(sid) += timeDelta
    +    execSummary.taskTime += timeDelta
    +    totalTime += timeDelta
    +  }
     
    -      stageIdToDiskBytesSpilled.getOrElseUpdate(sid, 0L)
    -      val diskBytesSpilled = metrics.map(_.diskBytesSpilled).getOrElse(0L)
    -      stageIdToDiskBytesSpilled(sid) += diskBytesSpilled
    +  override def onExecutorMetricsUpdate(executorMetricsUpdate: 
SparkListenerExecutorMetricsUpdate) {
    +    for ((taskId, sid, taskMetrics) <- executorMetricsUpdate.taskMetrics) {
    +      val taskMap = stageIdToTaskData.getOrElse(sid, null)
    --- End diff --
    
    You should `getOrElseUpdate` here. It is possible for events to be dropped 
from the queue such that we could receive an event for a task we don't know 
about. In that case, it's best to deal with it as normal and log a warning. 


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at [email protected] or file a JIRA ticket
with INFRA.
---

Reply via email to