Github user edwinalu commented on a diff in the pull request:

    https://github.com/apache/spark/pull/21221#discussion_r195543711
  
    --- Diff: core/src/main/scala/org/apache/spark/status/api/v1/api.scala ---
    @@ -98,14 +101,53 @@ class ExecutorSummary private[spark](
         val removeReason: Option[String],
         val executorLogs: Map[String, String],
         val memoryMetrics: Option[MemoryMetrics],
    -    val blacklistedInStages: Set[Int])
    +    val blacklistedInStages: Set[Int],
    +    @JsonSerialize(using = classOf[PeakMemoryMetricsSerializer])
    +    @JsonDeserialize(using = classOf[PeakMemoryMetricsDeserializer])
    +    val peakMemoryMetrics: Option[Array[Long]])
     
     class MemoryMetrics private[spark](
         val usedOnHeapStorageMemory: Long,
         val usedOffHeapStorageMemory: Long,
         val totalOnHeapStorageMemory: Long,
         val totalOffHeapStorageMemory: Long)
     
    +/** deserialzer for peakMemoryMetrics: convert to array ordered by metric 
name */
    +class PeakMemoryMetricsDeserializer extends 
JsonDeserializer[Option[Array[Long]]] {
    +  override def deserialize(
    +      jsonParser: JsonParser,
    +      deserializationContext: DeserializationContext): Option[Array[Long]] 
= {
    +    val metricsMap = jsonParser.readValueAs(classOf[Option[Map[String, 
Object]]])
    --- End diff --
    
    Nope, it's still reading it as an Int, even with java.lang.Long, otherwise 
that would have been cleaner.


---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to