Github user squito commented on a diff in the pull request: https://github.com/apache/spark/pull/22612#discussion_r232222535 --- Diff: core/src/main/scala/org/apache/spark/metrics/ExecutorMetricType.scala --- @@ -19,18 +19,31 @@ package org.apache.spark.metrics import java.lang.management.{BufferPoolMXBean, ManagementFactory} import javax.management.ObjectName +import scala.collection.mutable + +import org.apache.spark.executor.ProcfsBasedSystems import org.apache.spark.memory.MemoryManager /** * Executor metric types for executor-level metrics stored in ExecutorMetrics. */ sealed trait ExecutorMetricType { - private[spark] def getMetricValue(memoryManager: MemoryManager): Long - private[spark] val name = getClass().getName().stripSuffix("$").split("""\.""").last + private[spark] def getMetricValue(memoryManager: MemoryManager): Long = 0 + private[spark] def getMetricSet(memoryManager: MemoryManager): Array[Long] = { + new Array[Long](0) + } + private[spark] def names = Seq(getClass().getName().stripSuffix("$").split("""\.""").last) --- End diff -- this is not a good default -- at least not on this class, where the interface allows returning multiple metrics. I do think it makes sense to have a child trait here just for all the MetricTypes which only have one value, which does have this default. Also that would be the place to have an abstract `getMetricValue()` (singular) method. That would avoid so many `new Array[Long](1)` etc. etc.
--- --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org