Github user squito commented on a diff in the pull request: https://github.com/apache/spark/pull/21221#discussion_r195751572 --- Diff: core/src/main/scala/org/apache/spark/status/api/v1/api.scala --- @@ -98,14 +101,53 @@ class ExecutorSummary private[spark]( val removeReason: Option[String], val executorLogs: Map[String, String], val memoryMetrics: Option[MemoryMetrics], - val blacklistedInStages: Set[Int]) + val blacklistedInStages: Set[Int], + @JsonSerialize(using = classOf[PeakMemoryMetricsSerializer]) + @JsonDeserialize(using = classOf[PeakMemoryMetricsDeserializer]) + val peakMemoryMetrics: Option[Array[Long]]) class MemoryMetrics private[spark]( val usedOnHeapStorageMemory: Long, val usedOffHeapStorageMemory: Long, val totalOnHeapStorageMemory: Long, val totalOffHeapStorageMemory: Long) +/** deserialzer for peakMemoryMetrics: convert to array ordered by metric name */ +class PeakMemoryMetricsDeserializer extends JsonDeserializer[Option[Array[Long]]] { + override def deserialize( + jsonParser: JsonParser, + deserializationContext: DeserializationContext): Option[Array[Long]] = { + val metricsMap = jsonParser.readValueAs(classOf[Option[Map[String, Object]]]) --- End diff -- ok I remember now -- the problem is that `classOf` just has the erased type. But jackson has something to get around this, with its `TypeReference`: ```scala val json = """{"a":1,"b":2147483648}""" val parsedWithTRef = mapper.readValue[Option[Map[String, Long]]](json, new TypeReference[Option[Map[String, java.lang.Long]]] {}) val parsedWithClass = mapper.readValue(json, classOf[Option[Map[String, java.lang.Long]]]) scala> parsedWithTRef.get.get("a").get res26: Long = 1 scala> parsedWithClass.get.get("a").get java.lang.ClassCastException: java.lang.Integer cannot be cast to java.lang.Long ... 48 elided ``` I think there is another scala-specific api to avoid repeating the type twice ... but also wouldn't really worry about it
--- --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org