Github user gengliangwang commented on a diff in the pull request:

    https://github.com/apache/spark/pull/21532#discussion_r194558798
  
    --- Diff: 
sql/core/src/main/scala/org/apache/spark/sql/execution/ui/SQLAppStatusListener.scala
 ---
    @@ -159,19 +159,29 @@ class SQLAppStatusListener(
       }
     
       private def aggregateMetrics(exec: LiveExecutionData): Map[Long, String] 
= {
    -    val metricIds = exec.metrics.map(_.accumulatorId).sorted
         val metricTypes = exec.metrics.map { m => (m.accumulatorId, 
m.metricType) }.toMap
    -    val metrics = exec.stages.toSeq
    -      .flatMap { stageId => Option(stageMetrics.get(stageId)) }
    -      .flatMap(_.taskMetrics.values().asScala)
    -      .flatMap { metrics => metrics.ids.zip(metrics.values) }
    -
    -    val aggregatedMetrics = (metrics ++ exec.driverAccumUpdates.toSeq)
    -      .filter { case (id, _) => metricIds.contains(id) }
    -      .groupBy(_._1)
    -      .map { case (id, values) =>
    -        id -> SQLMetrics.stringValue(metricTypes(id), 
values.map(_._2).toSeq)
    +    val metrics = metricTypes.keys
    +      .map { id => (id, scala.collection.mutable.ArrayBuffer.empty[Long]) }
    +      .toMap
    +    stageMetrics.asScala.collect { case (stage, liveStageMetrics) if 
exec.stages.contains(stage) =>
    +      liveStageMetrics.taskMetrics.values().asScala.foreach { case 
liveMetrics =>
    +        var i = 0
    +        while (i < liveMetrics.ids.length) {
    --- End diff --
    
    Use `while` for critical loop path: 
https://github.com/databricks/scala-style-guide#traversal-and-zipwithindex


---

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to