LuciferYang commented on code in PR #36885:
URL: https://github.com/apache/spark/pull/36885#discussion_r898865471


##########
core/src/main/scala/org/apache/spark/util/JsonProtocol.scala:
##########
@@ -57,298 +57,398 @@ import org.apache.spark.util.Utils.weakIntern
 private[spark] object JsonProtocol {
   // TODO: Remove this file and put JSON serialization into each individual 
class.
 
-  private implicit val format = DefaultFormats
-
   private val mapper = new ObjectMapper().registerModule(DefaultScalaModule)
     .configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false)
 
   /** ------------------------------------------------- *
    * JSON serialization methods for SparkListenerEvents |
    * -------------------------------------------------- */
 
-  def sparkEventToJson(event: SparkListenerEvent): JValue = {
+  def sparkEventToJsonString(event: SparkListenerEvent): String = {
+    toJsonString { generator =>
+      writeSparkEventToJson(event, generator)
+    }
+  }
+
+  def toJsonString(block: JsonGenerator => Unit): String = {
+    val baos = new ByteArrayOutputStream()
+    val generator = mapper.createGenerator(baos, JsonEncoding.UTF8)
+    block(generator)
+    generator.close()
+    new String(baos.toByteArray, StandardCharsets.UTF_8)
+  }
+
+  def writeSparkEventToJson(event: SparkListenerEvent, g: JsonGenerator): Unit 
= {
     event match {
       case stageSubmitted: SparkListenerStageSubmitted =>
-        stageSubmittedToJson(stageSubmitted)
+        stageSubmittedToJson(stageSubmitted, g)
       case stageCompleted: SparkListenerStageCompleted =>
-        stageCompletedToJson(stageCompleted)
+        stageCompletedToJson(stageCompleted, g)
       case taskStart: SparkListenerTaskStart =>
-        taskStartToJson(taskStart)
+        taskStartToJson(taskStart, g)
       case taskGettingResult: SparkListenerTaskGettingResult =>
-        taskGettingResultToJson(taskGettingResult)
+        taskGettingResultToJson(taskGettingResult, g)
       case taskEnd: SparkListenerTaskEnd =>
-        taskEndToJson(taskEnd)
+        taskEndToJson(taskEnd, g)
       case jobStart: SparkListenerJobStart =>
-        jobStartToJson(jobStart)
+        jobStartToJson(jobStart, g)
       case jobEnd: SparkListenerJobEnd =>
-        jobEndToJson(jobEnd)
+        jobEndToJson(jobEnd, g)
       case environmentUpdate: SparkListenerEnvironmentUpdate =>
-        environmentUpdateToJson(environmentUpdate)
+        environmentUpdateToJson(environmentUpdate, g)
       case blockManagerAdded: SparkListenerBlockManagerAdded =>
-        blockManagerAddedToJson(blockManagerAdded)
+        blockManagerAddedToJson(blockManagerAdded, g)
       case blockManagerRemoved: SparkListenerBlockManagerRemoved =>
-        blockManagerRemovedToJson(blockManagerRemoved)
+        blockManagerRemovedToJson(blockManagerRemoved, g)
       case unpersistRDD: SparkListenerUnpersistRDD =>
-        unpersistRDDToJson(unpersistRDD)
+        unpersistRDDToJson(unpersistRDD, g)
       case applicationStart: SparkListenerApplicationStart =>
-        applicationStartToJson(applicationStart)
+        applicationStartToJson(applicationStart, g)
       case applicationEnd: SparkListenerApplicationEnd =>
-        applicationEndToJson(applicationEnd)
+        applicationEndToJson(applicationEnd, g)
       case executorAdded: SparkListenerExecutorAdded =>
-        executorAddedToJson(executorAdded)
+        executorAddedToJson(executorAdded, g)
       case executorRemoved: SparkListenerExecutorRemoved =>
-        executorRemovedToJson(executorRemoved)
+        executorRemovedToJson(executorRemoved, g)
       case logStart: SparkListenerLogStart =>
-        logStartToJson(logStart)
+        logStartToJson(logStart, g)
       case metricsUpdate: SparkListenerExecutorMetricsUpdate =>
-        executorMetricsUpdateToJson(metricsUpdate)
+        executorMetricsUpdateToJson(metricsUpdate, g)
       case stageExecutorMetrics: SparkListenerStageExecutorMetrics =>
-        stageExecutorMetricsToJson(stageExecutorMetrics)
+        stageExecutorMetricsToJson(stageExecutorMetrics, g)
       case blockUpdate: SparkListenerBlockUpdated =>
-        blockUpdateToJson(blockUpdate)
+        blockUpdateToJson(blockUpdate, g)
       case resourceProfileAdded: SparkListenerResourceProfileAdded =>
-        resourceProfileAddedToJson(resourceProfileAdded)
-      case _ => parse(mapper.writeValueAsString(event))
+        resourceProfileAddedToJson(resourceProfileAdded, g)
+      case _ =>
+        mapper.writeValue(g, event)
     }
   }
 
-  def stageSubmittedToJson(stageSubmitted: SparkListenerStageSubmitted): 
JValue = {
-    val stageInfo = stageInfoToJson(stageSubmitted.stageInfo)
-    val properties = propertiesToJson(stageSubmitted.properties)
-    ("Event" -> SPARK_LISTENER_EVENT_FORMATTED_CLASS_NAMES.stageSubmitted) ~
-    ("Stage Info" -> stageInfo) ~
-    ("Properties" -> properties)
+  def stageSubmittedToJson(stageSubmitted: SparkListenerStageSubmitted, g: 
JsonGenerator): Unit = {
+    g.writeStartObject()
+    g.writeStringField("Event", 
SPARK_LISTENER_EVENT_FORMATTED_CLASS_NAMES.stageSubmitted)
+    g.writeFieldName("Stage Info")
+    stageInfoToJson(stageSubmitted.stageInfo, g)
+    g.writeFieldName("Properties")
+    propertiesToJson(stageSubmitted.properties, g)
+    g.writeEndObject()
   }
 
-  def stageCompletedToJson(stageCompleted: SparkListenerStageCompleted): 
JValue = {
-    val stageInfo = stageInfoToJson(stageCompleted.stageInfo)
-    ("Event" -> SPARK_LISTENER_EVENT_FORMATTED_CLASS_NAMES.stageCompleted) ~
-    ("Stage Info" -> stageInfo)
+  def stageCompletedToJson(stageCompleted: SparkListenerStageCompleted, g: 
JsonGenerator): Unit = {
+    g.writeStartObject()
+    g.writeStringField("Event", 
SPARK_LISTENER_EVENT_FORMATTED_CLASS_NAMES.stageCompleted)
+    g.writeFieldName("Stage Info")
+    stageInfoToJson(stageCompleted.stageInfo, g)
+    g.writeEndObject()
   }
 
-  def taskStartToJson(taskStart: SparkListenerTaskStart): JValue = {
-    val taskInfo = taskStart.taskInfo
-    ("Event" -> SPARK_LISTENER_EVENT_FORMATTED_CLASS_NAMES.taskStart) ~
-    ("Stage ID" -> taskStart.stageId) ~
-    ("Stage Attempt ID" -> taskStart.stageAttemptId) ~
-    ("Task Info" -> taskInfoToJson(taskInfo))
+  def taskStartToJson(taskStart: SparkListenerTaskStart, g: JsonGenerator): 
Unit = {
+    g.writeStartObject()
+    g.writeStringField("Event", 
SPARK_LISTENER_EVENT_FORMATTED_CLASS_NAMES.taskStart)
+    g.writeNumberField("Stage ID", taskStart.stageId)
+    g.writeNumberField("Stage Attempt ID", taskStart.stageAttemptId)
+    g.writeFieldName("Task Info")
+    taskInfoToJson(taskStart.taskInfo, g)
+    g.writeEndObject()
   }
 
-  def taskGettingResultToJson(taskGettingResult: 
SparkListenerTaskGettingResult): JValue = {
+  def taskGettingResultToJson(
+      taskGettingResult: SparkListenerTaskGettingResult,
+      g: JsonGenerator): Unit = {
     val taskInfo = taskGettingResult.taskInfo
-    ("Event" -> SPARK_LISTENER_EVENT_FORMATTED_CLASS_NAMES.taskGettingResult) ~
-    ("Task Info" -> taskInfoToJson(taskInfo))
-  }
-
-  def taskEndToJson(taskEnd: SparkListenerTaskEnd): JValue = {
-    val taskEndReason = taskEndReasonToJson(taskEnd.reason)
-    val taskInfo = taskEnd.taskInfo
-    val executorMetrics = taskEnd.taskExecutorMetrics
-    val taskMetrics = taskEnd.taskMetrics
-    val taskMetricsJson = if (taskMetrics != null) 
taskMetricsToJson(taskMetrics) else JNothing
-    ("Event" -> SPARK_LISTENER_EVENT_FORMATTED_CLASS_NAMES.taskEnd) ~
-    ("Stage ID" -> taskEnd.stageId) ~
-    ("Stage Attempt ID" -> taskEnd.stageAttemptId) ~
-    ("Task Type" -> taskEnd.taskType) ~
-    ("Task End Reason" -> taskEndReason) ~
-    ("Task Info" -> taskInfoToJson(taskInfo)) ~
-    ("Task Executor Metrics" -> executorMetricsToJson(executorMetrics)) ~
-    ("Task Metrics" -> taskMetricsJson)
-  }
-
-  def jobStartToJson(jobStart: SparkListenerJobStart): JValue = {
-    val properties = propertiesToJson(jobStart.properties)
-    ("Event" -> SPARK_LISTENER_EVENT_FORMATTED_CLASS_NAMES.jobStart) ~
-    ("Job ID" -> jobStart.jobId) ~
-    ("Submission Time" -> jobStart.time) ~
-    ("Stage Infos" -> jobStart.stageInfos.map(stageInfoToJson)) ~  // Added in 
Spark 1.2.0
-    ("Stage IDs" -> jobStart.stageIds) ~
-    ("Properties" -> properties)
-  }
-
-  def jobEndToJson(jobEnd: SparkListenerJobEnd): JValue = {
-    val jobResult = jobResultToJson(jobEnd.jobResult)
-    ("Event" -> SPARK_LISTENER_EVENT_FORMATTED_CLASS_NAMES.jobEnd) ~
-    ("Job ID" -> jobEnd.jobId) ~
-    ("Completion Time" -> jobEnd.time) ~
-    ("Job Result" -> jobResult)
-  }
-
-  def environmentUpdateToJson(environmentUpdate: 
SparkListenerEnvironmentUpdate): JValue = {
+    g.writeStartObject()
+    g.writeStringField("Event", 
SPARK_LISTENER_EVENT_FORMATTED_CLASS_NAMES.taskGettingResult)
+    g.writeFieldName("Task Info")
+    taskInfoToJson(taskInfo, g)
+    g.writeEndObject()
+  }
+
+  def taskEndToJson(taskEnd: SparkListenerTaskEnd, g: JsonGenerator): Unit = {
+    g.writeStartObject()
+    g.writeStringField("Event", 
SPARK_LISTENER_EVENT_FORMATTED_CLASS_NAMES.taskEnd)
+    g.writeNumberField("Stage ID", taskEnd.stageId)
+    g.writeNumberField("Stage Attempt ID", taskEnd.stageAttemptId)
+    g.writeStringField("Task Type", taskEnd.taskType)
+    g.writeFieldName("Task End Reason")
+    taskEndReasonToJson(taskEnd.reason, g)
+    g.writeFieldName("Task Info")
+    taskInfoToJson(taskEnd.taskInfo, g)
+    g.writeFieldName("Task Executor Metrics")
+    executorMetricsToJson(taskEnd.taskExecutorMetrics, g)
+    Option(taskEnd.taskMetrics).foreach { m =>
+      g.writeFieldName("Task Metrics")
+      taskMetricsToJson(m, g)
+    }
+    g.writeEndObject()
+  }
+
+  def jobStartToJson(jobStart: SparkListenerJobStart, g: JsonGenerator): Unit 
= {
+    g.writeStartObject()
+    g.writeStringField("Event", 
SPARK_LISTENER_EVENT_FORMATTED_CLASS_NAMES.jobStart)
+    g.writeNumberField("Job ID", jobStart.jobId)
+    g.writeNumberField("Submission Time", jobStart.time)
+    g.writeArrayFieldStart("Stage Infos")  // Added in Spark 1.2.0
+    jobStart.stageInfos.foreach(stageInfoToJson(_, g))
+    g.writeEndArray()
+    g.writeArrayFieldStart("Stage IDs")
+    jobStart.stageIds.foreach(g.writeNumber)
+    g.writeEndArray()
+    g.writeFieldName("Properties")
+    propertiesToJson(jobStart.properties, g)
+    g.writeEndObject()
+  }
+
+  def jobEndToJson(jobEnd: SparkListenerJobEnd, g: JsonGenerator): Unit = {
+    g.writeStartObject()
+    g.writeStringField("Event", 
SPARK_LISTENER_EVENT_FORMATTED_CLASS_NAMES.jobEnd)
+    g.writeNumberField("Job ID", jobEnd.jobId)
+    g.writeNumberField("Completion Time", jobEnd.time)
+    g.writeFieldName("Job Result")
+    jobResultToJson(jobEnd.jobResult, g)
+    g.writeEndObject()
+  }
+
+  def environmentUpdateToJson(
+      environmentUpdate: SparkListenerEnvironmentUpdate,
+      g: JsonGenerator): Unit = {
     val environmentDetails = environmentUpdate.environmentDetails
-    val jvmInformation = mapToJson(environmentDetails("JVM Information").toMap)
-    val sparkProperties = mapToJson(environmentDetails("Spark 
Properties").toMap)
-    val hadoopProperties = mapToJson(environmentDetails("Hadoop 
Properties").toMap)
-    val systemProperties = mapToJson(environmentDetails("System 
Properties").toMap)
-    val metricsProperties = mapToJson(environmentDetails("Metrics 
Properties").toMap)
-    val classpathEntries = mapToJson(environmentDetails("Classpath 
Entries").toMap)
-    ("Event" -> SPARK_LISTENER_EVENT_FORMATTED_CLASS_NAMES.environmentUpdate) ~
-    ("JVM Information" -> jvmInformation) ~
-    ("Spark Properties" -> sparkProperties) ~
-    ("Hadoop Properties" -> hadoopProperties) ~
-    ("System Properties" -> systemProperties) ~
-    ("Metrics Properties"-> metricsProperties) ~
-    ("Classpath Entries" -> classpathEntries)
-  }
-
-  def blockManagerAddedToJson(blockManagerAdded: 
SparkListenerBlockManagerAdded): JValue = {
-    val blockManagerId = blockManagerIdToJson(blockManagerAdded.blockManagerId)
-    ("Event" -> SPARK_LISTENER_EVENT_FORMATTED_CLASS_NAMES.blockManagerAdded) ~
-    ("Block Manager ID" -> blockManagerId) ~
-    ("Maximum Memory" -> blockManagerAdded.maxMem) ~
-    ("Timestamp" -> blockManagerAdded.time) ~
-    ("Maximum Onheap Memory" -> blockManagerAdded.maxOnHeapMem) ~
-    ("Maximum Offheap Memory" -> blockManagerAdded.maxOffHeapMem)
-  }
-
-  def blockManagerRemovedToJson(blockManagerRemoved: 
SparkListenerBlockManagerRemoved): JValue = {
-    val blockManagerId = 
blockManagerIdToJson(blockManagerRemoved.blockManagerId)
-    ("Event" -> 
SPARK_LISTENER_EVENT_FORMATTED_CLASS_NAMES.blockManagerRemoved) ~
-    ("Block Manager ID" -> blockManagerId) ~
-    ("Timestamp" -> blockManagerRemoved.time)
-  }
-
-  def unpersistRDDToJson(unpersistRDD: SparkListenerUnpersistRDD): JValue = {
-    ("Event" -> SPARK_LISTENER_EVENT_FORMATTED_CLASS_NAMES.unpersistRDD) ~
-    ("RDD ID" -> unpersistRDD.rddId)
-  }
-
-  def applicationStartToJson(applicationStart: SparkListenerApplicationStart): 
JValue = {
-    ("Event" -> SPARK_LISTENER_EVENT_FORMATTED_CLASS_NAMES.applicationStart) ~
-    ("App Name" -> applicationStart.appName) ~
-    ("App ID" -> applicationStart.appId.map(JString(_)).getOrElse(JNothing)) ~
-    ("Timestamp" -> applicationStart.time) ~
-    ("User" -> applicationStart.sparkUser) ~
-    ("App Attempt ID" -> 
applicationStart.appAttemptId.map(JString(_)).getOrElse(JNothing)) ~
-    ("Driver Logs" -> 
applicationStart.driverLogs.map(mapToJson).getOrElse(JNothing)) ~
-    ("Driver Attributes" -> 
applicationStart.driverAttributes.map(mapToJson).getOrElse(JNothing))
-  }
-
-  def applicationEndToJson(applicationEnd: SparkListenerApplicationEnd): 
JValue = {
-    ("Event" -> SPARK_LISTENER_EVENT_FORMATTED_CLASS_NAMES.applicationEnd) ~
-    ("Timestamp" -> applicationEnd.time)
-  }
-
-  def resourceProfileAddedToJson(profileAdded: 
SparkListenerResourceProfileAdded): JValue = {
-    ("Event" -> 
SPARK_LISTENER_EVENT_FORMATTED_CLASS_NAMES.resourceProfileAdded) ~
-      ("Resource Profile Id" -> profileAdded.resourceProfile.id) ~
-      ("Executor Resource Requests" ->
-        
executorResourceRequestMapToJson(profileAdded.resourceProfile.executorResources))
 ~
-      ("Task Resource Requests" ->
-        
taskResourceRequestMapToJson(profileAdded.resourceProfile.taskResources))
-  }
-
-  def executorAddedToJson(executorAdded: SparkListenerExecutorAdded): JValue = 
{
-    ("Event" -> SPARK_LISTENER_EVENT_FORMATTED_CLASS_NAMES.executorAdded) ~
-    ("Timestamp" -> executorAdded.time) ~
-    ("Executor ID" -> executorAdded.executorId) ~
-    ("Executor Info" -> executorInfoToJson(executorAdded.executorInfo))
-  }
-
-  def executorRemovedToJson(executorRemoved: SparkListenerExecutorRemoved): 
JValue = {
-    ("Event" -> SPARK_LISTENER_EVENT_FORMATTED_CLASS_NAMES.executorRemoved) ~
-    ("Timestamp" -> executorRemoved.time) ~
-    ("Executor ID" -> executorRemoved.executorId) ~
-    ("Removed Reason" -> executorRemoved.reason)
-  }
-
-  def logStartToJson(logStart: SparkListenerLogStart): JValue = {
-    ("Event" -> SPARK_LISTENER_EVENT_FORMATTED_CLASS_NAMES.logStart) ~
-    ("Spark Version" -> SPARK_VERSION)
-  }
-
-  def executorMetricsUpdateToJson(metricsUpdate: 
SparkListenerExecutorMetricsUpdate): JValue = {
+    g.writeStartObject()
+    g.writeStringField("Event", 
SPARK_LISTENER_EVENT_FORMATTED_CLASS_NAMES.environmentUpdate)
+    writeMapField("JVM Information", environmentDetails("JVM 
Information").toMap, g)
+    writeMapField("Spark Properties", environmentDetails("Spark 
Properties").toMap, g)
+    writeMapField("Hadoop Properties", environmentDetails("Hadoop 
Properties").toMap, g)
+    writeMapField("System Properties", environmentDetails("System 
Properties").toMap, g)
+    writeMapField("Metrics Properties", environmentDetails("Metrics 
Properties").toMap, g)
+    writeMapField("Classpath Entries", environmentDetails("Classpath 
Entries").toMap, g)
+    g.writeEndObject()
+  }
+
+  def blockManagerAddedToJson(
+      blockManagerAdded: SparkListenerBlockManagerAdded,
+      g: JsonGenerator): Unit = {
+    g.writeStartObject()
+    g.writeStringField("Event", 
SPARK_LISTENER_EVENT_FORMATTED_CLASS_NAMES.blockManagerAdded)
+    g.writeFieldName("Block Manager ID")
+    blockManagerIdToJson(blockManagerAdded.blockManagerId, g)
+    g.writeNumberField("Maximum Memory", blockManagerAdded.maxMem)
+    g.writeNumberField("Timestamp", blockManagerAdded.time)
+    blockManagerAdded.maxOnHeapMem.foreach(g.writeNumberField("Maximum Onheap 
Memory", _))
+    blockManagerAdded.maxOffHeapMem.foreach(g.writeNumberField("Maximum 
Offheap Memory", _))
+    g.writeEndObject()
+  }
+
+  def blockManagerRemovedToJson(
+      blockManagerRemoved: SparkListenerBlockManagerRemoved,
+      g: JsonGenerator): Unit = {
+    g.writeStartObject()
+    g.writeStringField("Event", 
SPARK_LISTENER_EVENT_FORMATTED_CLASS_NAMES.blockManagerRemoved)
+    g.writeFieldName("Block Manager ID")
+    blockManagerIdToJson(blockManagerRemoved.blockManagerId, g)
+    g.writeNumberField("Timestamp", blockManagerRemoved.time)
+    g.writeEndObject()
+  }
+
+  def unpersistRDDToJson(unpersistRDD: SparkListenerUnpersistRDD, g: 
JsonGenerator): Unit = {
+    g.writeStartObject()
+    g.writeStringField("Event", 
SPARK_LISTENER_EVENT_FORMATTED_CLASS_NAMES.unpersistRDD)
+    g.writeNumberField("RDD ID", unpersistRDD.rddId)
+    g.writeEndObject()
+  }
+
+  def applicationStartToJson(
+      applicationStart: SparkListenerApplicationStart,
+      g: JsonGenerator): Unit = {
+    g.writeStartObject()
+    g.writeStringField("Event", 
SPARK_LISTENER_EVENT_FORMATTED_CLASS_NAMES.applicationStart)
+    g.writeStringField("App Name", applicationStart.appName)
+    applicationStart.appId.foreach(g.writeStringField("App ID", _))
+    g.writeNumberField("Timestamp", applicationStart.time)
+    g.writeStringField("User", applicationStart.sparkUser)
+    applicationStart.appAttemptId.foreach(g.writeStringField("App Attempt ID", 
_))
+    applicationStart.driverLogs.foreach(writeMapField("Driver Logs", _, g))
+    applicationStart.driverAttributes.foreach(writeMapField("Driver 
Attributes", _, g))
+    g.writeEndObject()
+  }
+
+  def applicationEndToJson(
+      applicationEnd: SparkListenerApplicationEnd,
+      g: JsonGenerator): Unit = {
+    g.writeStartObject()
+    g.writeStringField("Event", 
SPARK_LISTENER_EVENT_FORMATTED_CLASS_NAMES.applicationEnd)
+    g.writeNumberField("Timestamp", applicationEnd.time)
+    g.writeEndObject()
+  }
+
+  def resourceProfileAddedToJson(
+      profileAdded: SparkListenerResourceProfileAdded,
+      g: JsonGenerator
+    ): Unit = {
+    g.writeStartObject()
+    g.writeStringField("Event", 
SPARK_LISTENER_EVENT_FORMATTED_CLASS_NAMES.resourceProfileAdded)
+    g.writeNumberField("Resource Profile Id", profileAdded.resourceProfile.id)
+    g.writeFieldName("Executor Resource Requests")
+    
executorResourceRequestMapToJson(profileAdded.resourceProfile.executorResources,
 g)
+    g.writeFieldName("Task Resource Requests")
+    taskResourceRequestMapToJson(profileAdded.resourceProfile.taskResources, g)
+    g.writeEndObject()
+  }
+
+  def executorAddedToJson(executorAdded: SparkListenerExecutorAdded, g: 
JsonGenerator): Unit = {
+    g.writeStartObject()
+    g.writeStringField("Event", 
SPARK_LISTENER_EVENT_FORMATTED_CLASS_NAMES.executorAdded)
+    g.writeNumberField("Timestamp", executorAdded.time)
+    g.writeStringField("Executor ID", executorAdded.executorId)
+    g.writeFieldName("Executor Info")
+    executorInfoToJson(executorAdded.executorInfo, g)
+    g.writeEndObject()
+  }
+
+  def executorRemovedToJson(
+      executorRemoved: SparkListenerExecutorRemoved,
+      g: JsonGenerator): Unit = {
+    g.writeStartObject()
+    g.writeStringField("Event", 
SPARK_LISTENER_EVENT_FORMATTED_CLASS_NAMES.executorRemoved)
+    g.writeNumberField("Timestamp", executorRemoved.time)
+    g.writeStringField("Executor ID", executorRemoved.executorId)
+    g.writeStringField("Removed Reason", executorRemoved.reason)
+    g.writeEndObject()
+  }
+
+  def logStartToJson(logStart: SparkListenerLogStart, g: JsonGenerator): Unit 
= {
+    g.writeStartObject()
+    g.writeStringField("Event", 
SPARK_LISTENER_EVENT_FORMATTED_CLASS_NAMES.logStart)
+    g.writeStringField("Spark Version", SPARK_VERSION)

Review Comment:
   "Spark Version" always written as `SPARK_VERSION` instead of  
`logStart.sparkVersion`, so it seems that parameter `logStart` is useless
   
   
   



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to