[ 
https://issues.apache.org/jira/browse/SPARK-39100?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
 ]

Apache Spark reassigned SPARK-39100:
------------------------------------

    Assignee: Apache Spark

> RDD DeterministicLevel Needs Backward Compatibility
> ---------------------------------------------------
>
>                 Key: SPARK-39100
>                 URL: https://issues.apache.org/jira/browse/SPARK-39100
>             Project: Spark
>          Issue Type: Bug
>          Components: Web UI
>    Affects Versions: 2.4.8, 3.2.1
>            Reporter: Zhou JIANG
>            Assignee: Apache Spark
>            Priority: Major
>
> SPARK-34592 introduces RDD DeterministicLevel - which would be used in Spark 
> UI stage DAG visualization.
> This is not a fully-backward compatible for History Server. In production, 
> history server (despite its own version), could be responsible to deserialize 
> job / RDD data even for multiple Spark versions. Historical RDD data from 
> previous Spark version do have the new field, and UI rendering for stages / 
> executors tab could crash with -
>  
>     throwable: { [-] 
>       class:  scala.MatchError 
>       msg:  null 
>       stack: [ [-] 
>         
> org.apache.spark.ui.scope.RDDOperationGraph$.org$apache$spark$ui$scope$RDDOperationGraph$$makeDotNode(RDDOperationGraph.scala:242)
>  
>         
> org.apache.spark.ui.scope.RDDOperationGraph$$anonfun$org$apache$spark$ui$scope$RDDOperationGraph$$makeDotSubgraph$1.apply(RDDOperationGraph.scala:260)
>  
>         
> org.apache.spark.ui.scope.RDDOperationGraph$$anonfun$org$apache$spark$ui$scope$RDDOperationGraph$$makeDotSubgraph$1.apply(RDDOperationGraph.scala:259)
>  
>         scala.collection.immutable.Stream.foreach(Stream.scala:594) 
>         
> org.apache.spark.ui.scope.RDDOperationGraph$.org$apache$spark$ui$scope$RDDOperationGraph$$makeDotSubgraph(RDDOperationGraph.scala:259)
>  
>         
> org.apache.spark.ui.scope.RDDOperationGraph$$anonfun$org$apache$spark$ui$scope$RDDOperationGraph$$makeDotSubgraph$2.apply(RDDOperationGraph.scala:263)
>  
>         
> org.apache.spark.ui.scope.RDDOperationGraph$$anonfun$org$apache$spark$ui$scope$RDDOperationGraph$$makeDotSubgraph$2.apply(RDDOperationGraph.scala:262)
>  
>         scala.collection.immutable.Stream.foreach(Stream.scala:594) 
>         
> org.apache.spark.ui.scope.RDDOperationGraph$.org$apache$spark$ui$scope$RDDOperationGraph$$makeDotSubgraph(RDDOperationGraph.scala:262)
>  
>         
> org.apache.spark.ui.scope.RDDOperationGraph$.makeDotFile(RDDOperationGraph.scala:227)
>  
>         
> org.apache.spark.ui.UIUtils$$anonfun$showDagViz$1.apply(UIUtils.scala:433) 
>         
> org.apache.spark.ui.UIUtils$$anonfun$showDagViz$1.apply(UIUtils.scala:429) 
>         
> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
>  
>         
> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
>  
>         scala.collection.immutable.List.foreach(List.scala:392) 
>         scala.collection.TraversableLike$class.map(TraversableLike.scala:234) 
>         scala.collection.immutable.List.map(List.scala:296) 
>         org.apache.spark.ui.UIUtils$.showDagViz(UIUtils.scala:429) 
>         org.apache.spark.ui.UIUtils$.showDagVizForStage(UIUtils.scala:401) 
>         org.apache.spark.ui.jobs.StagePage.render(StagePage.scala:257) 
>         org.apache.spark.ui.WebUI$$anonfun$2.apply(WebUI.scala:90) 
>         org.apache.spark.ui.WebUI$$anonfun$2.apply(WebUI.scala:90) 
>         org.apache.spark.ui.JettyUtils$$anon$3.doGet(JettyUtils.scala:90) 
>         javax.servlet.http.HttpServlet.service(HttpServlet.java:687) 
>         javax.servlet.http.HttpServlet.service(HttpServlet.java:790) 
>         
> org.spark_project.jetty.servlet.ServletHolder.handle(ServletHolder.java:791) 
>         
> org.spark_project.jetty.servlet.ServletHandler$ChainEnd.doFilter(ServletHandler.java:1626)
>  
>         pie.spark.ui.filter.PIEAuthFilter.doFilter(PIEAuthFilter.scala:89) 
>         
> org.spark_project.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) 
>         
> org.spark_project.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1601)
>  
>         
> org.apache.spark.deploy.history.ApplicationCacheCheckFilter.doFilter(ApplicationCache.scala:405)
>  
>         
> org.spark_project.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193) 
>         
> org.spark_project.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1601)
>  
>         
> org.spark_project.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:548)
>  
>         
> org.spark_project.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:233)
>  
>         
> org.spark_project.jetty.server.handler.ContextHandler.doHandle(ContextHandler.java:1435)
>  
>         
> org.spark_project.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:188)
>  
>         
> org.spark_project.jetty.servlet.ServletHandler.doScope(ServletHandler.java:501)
>  
>         
> org.spark_project.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:186)
>  
>         
> org.spark_project.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1350)
>  
>         
> org.spark_project.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:141)
>  
>         
> org.spark_project.jetty.server.handler.ContextHandlerCollection.handle(ContextHandlerCollection.java:234)
>  
>         
> org.spark_project.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:127)
>  
>         org.spark_project.jetty.server.Server.handle(Server.java:516) 
>         
> org.spark_project.jetty.server.HttpChannel.lambda$handle$1(HttpChannel.java:388)
>  
>         
> org.spark_project.jetty.server.HttpChannel.dispatch(HttpChannel.java:633) 
>         
> org.spark_project.jetty.server.HttpChannel.handle(HttpChannel.java:380) 
>         
> org.spark_project.jetty.server.HttpConnection.onFillable(HttpConnection.java:277)
>  
>         
> org.spark_project.jetty.io.AbstractConnection$ReadCallback.succeeded(AbstractConnection.java:311)
>  
>         
> org.spark_project.jetty.io.FillInterest.fillable(FillInterest.java:105) 
>         
> org.spark_project.jetty.io.ChannelEndPoint$1.run(ChannelEndPoint.java:104) 
>         
> org.spark_project.jetty.util.thread.strategy.EatWhatYouKill.runTask(EatWhatYouKill.java:336)
>  
>         
> org.spark_project.jetty.util.thread.strategy.EatWhatYouKill.doProduce(EatWhatYouKill.java:313)
>  
>         
> org.spark_project.jetty.util.thread.strategy.EatWhatYouKill.tryProduce(EatWhatYouKill.java:171)
>  
>         
> org.spark_project.jetty.util.thread.strategy.EatWhatYouKill.run(EatWhatYouKill.java:129)
>  
>         
> org.spark_project.jetty.util.thread.ReservedThreadExecutor$ReservedThread.run(ReservedThreadExecutor.java:383)
>  
>         
> org.spark_project.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:882)
>  
>         
> org.spark_project.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:1036)
>  
>         java.lang.Thread.run(Thread.java:748) 
>      ] 
>    } 



--
This message was sent by Atlassian Jira
(v8.20.7#820007)

---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscr...@spark.apache.org
For additional commands, e-mail: issues-h...@spark.apache.org

Reply via email to