Github user mccheah commented on a diff in the pull request:
https://github.com/apache/spark/pull/21221#discussion_r206334970
--- Diff: core/src/main/scala/org/apache/spark/status/api/v1/api.scala ---
@@ -98,14 +103,50 @@ class ExecutorSummary private[spark](
val removeReason: Option[String],
val executorLogs: Map[String, String],
val memoryMetrics: Option[MemoryMetrics],
- val blacklistedInStages: Set[Int])
+ val blacklistedInStages: Set[Int],
+ @JsonSerialize(using = classOf[ExecutorMetricsJsonSerializer])
+ @JsonDeserialize(using = classOf[ExecutorMetricsJsonDeserializer])
+ val peakMemoryMetrics: Option[ExecutorMetrics])
class MemoryMetrics private[spark](
val usedOnHeapStorageMemory: Long,
val usedOffHeapStorageMemory: Long,
val totalOnHeapStorageMemory: Long,
val totalOffHeapStorageMemory: Long)
+/** deserializer for peakMemoryMetrics: convert map to ExecutorMetrics */
+private[spark] class ExecutorMetricsJsonDeserializer
+ extends JsonDeserializer[Option[ExecutorMetrics]] {
+ override def deserialize(
+ jsonParser: JsonParser,
+ deserializationContext: DeserializationContext):
Option[ExecutorMetrics] = {
+ val metricsMap = jsonParser.readValueAs[Option[Map[String, Long]]](
+ new TypeReference[Option[Map[String, java.lang.Long]]] {})
+ metricsMap match {
+ case Some(metrics) =>
+ Some(new ExecutorMetrics(metrics))
+ case None => None
+ }
+ }
+}
+/** serializer for peakMemoryMetrics: convert ExecutorMetrics to map with
metric name as key */
+private[spark] class ExecutorMetricsJsonSerializer
+ extends JsonSerializer[Option[ExecutorMetrics]] {
--- End diff --
If this is empty does it serialize as `null` or does it not serialize at
all? Or does it serialize some other token like `empty`.
---
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]