[ https://issues.apache.org/jira/browse/SPARK-18462?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=15675234#comment-15675234 ]
Apache Spark commented on SPARK-18462: -------------------------------------- User 'JoshRosen' has created a pull request for this issue: https://github.com/apache/spark/pull/15922 > SparkListenerDriverAccumUpdates event does not deserialize properly in > history server > ------------------------------------------------------------------------------------- > > Key: SPARK-18462 > URL: https://issues.apache.org/jira/browse/SPARK-18462 > Project: Spark > Issue Type: Bug > Components: SQL > Affects Versions: 2.0.0 > Reporter: Josh Rosen > Assignee: Josh Rosen > > The following test fails with a ClassCastException due to oddities in how > Jackson object mapping works, breaking the SQL tab in the history server: > {code} > +++ > b/sql/core/src/test/scala/org/apache/spark/sql/execution/ui/SQLListenerSuite.scala > @@ -19,6 +19,7 @@ package org.apache.spark.sql.execution.ui > import java.util.Properties > +import org.json4s.jackson.JsonMethods._ > import org.mockito.Mockito.mock > import org.apache.spark._ > @@ -35,7 +36,7 @@ import org.apache.spark.sql.execution.{LeafExecNode, > QueryExecution, SparkPlanIn > import org.apache.spark.sql.execution.metric.{SQLMetric, SQLMetrics} > import org.apache.spark.sql.test.SharedSQLContext > import org.apache.spark.ui.SparkUI > -import org.apache.spark.util.{AccumulatorMetadata, LongAccumulator} > +import org.apache.spark.util.{AccumulatorMetadata, JsonProtocol, > LongAccumulator} > class SQLListenerSuite extends SparkFunSuite with SharedSQLContext { > @@ -416,6 +417,20 @@ class SQLListenerSuite extends SparkFunSuite with > SharedSQLContext { > assert(driverUpdates(physicalPlan.longMetric("dummy").id) == > expectedAccumValue) > } > + test("roundtripping SparkListenerDriverAccumUpdates through JsonProtocol") > { > + val event = SparkListenerDriverAccumUpdates(1L, Seq((2L, 3L))) > + val actualJsonString = > compact(render(JsonProtocol.sparkEventToJson(event))) > + val newEvent = JsonProtocol.sparkEventFromJson(parse(actualJsonString)) > + newEvent match { > + case SparkListenerDriverAccumUpdates(executionId, accums) => > + assert(executionId == 1L) > + accums.foreach { case (a, b) => > + assert(a == 2L) > + assert(b == 3L) > + } > + } > + } > + > {code} -- This message was sent by Atlassian JIRA (v6.3.4#6332) --------------------------------------------------------------------- To unsubscribe, e-mail: issues-unsubscr...@spark.apache.org For additional commands, e-mail: issues-h...@spark.apache.org