[
https://issues.apache.org/jira/browse/SPARK-31929?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
]
zhli updated SPARK-31929:
-------------------------
Summary: local cache size exceeding "spark.history.store.maxDiskUsage"
triggered "java.io.IOException" in history server on Windows (was: local cache
exceeding "spark.history.store.maxDiskUsage" triggered "java.io.IOException" in
history server on Windows)
> local cache size exceeding "spark.history.store.maxDiskUsage" triggered
> "java.io.IOException" in history server on Windows
> --------------------------------------------------------------------------------------------------------------------------
>
> Key: SPARK-31929
> URL: https://issues.apache.org/jira/browse/SPARK-31929
> Project: Spark
> Issue Type: Bug
> Components: Web UI
> Affects Versions: 2.4.4, 3.0.0
> Environment: System: Windows
> Config:
> spark.history.retainedApplications 200
> spark.history.store.maxDiskUsage 2g
> spark.history.store.path d://cache_hs
> Reporter: zhli
> Priority: Minor
>
> h2.
> h2. HTTP ERROR 500
> Problem accessing /history/app-20190711215551-0001/stages/. Reason:
> Server Error
>
> h3. Caused by:
> java.io.IOException: Unable to delete file:
> d:\cache_hs\apps\app-20190711215551-0001.ldb\MANIFEST-000007 at
> org.apache.commons.io.FileUtils.forceDelete(FileUtils.java:2381) at
> org.apache.commons.io.FileUtils.cleanDirectory(FileUtils.java:1679) at
> org.apache.commons.io.FileUtils.deleteDirectory(FileUtils.java:1575) at
> org.apache.spark.deploy.history.HistoryServerDiskManager.org$apache$spark$deploy$history$HistoryServerDiskManager$$deleteStore(HistoryServerDiskManager.scala:198)
> at
> org.apache.spark.deploy.history.HistoryServerDiskManager.$anonfun$release$1(HistoryServerDiskManager.scala:161)
> at scala.runtime.java8.JFunction1$mcVJ$sp.apply(JFunction1$mcVJ$sp.java:23)
> at scala.Option.foreach(Option.scala:407) at
> org.apache.spark.deploy.history.HistoryServerDiskManager.release(HistoryServerDiskManager.scala:156)
> at
> org.apache.spark.deploy.history.FsHistoryProvider.$anonfun$loadDiskStore$1(FsHistoryProvider.scala:1163)
> at
> org.apache.spark.deploy.history.FsHistoryProvider.$anonfun$loadDiskStore$1$adapted(FsHistoryProvider.scala:1157)
> at scala.Option.foreach(Option.scala:407) at
> org.apache.spark.deploy.history.FsHistoryProvider.loadDiskStore(FsHistoryProvider.scala:1157)
> at
> org.apache.spark.deploy.history.FsHistoryProvider.getAppUI(FsHistoryProvider.scala:363)
> at
> org.apache.spark.deploy.history.HistoryServer.getAppUI(HistoryServer.scala:191)
> at
> org.apache.spark.deploy.history.ApplicationCache.$anonfun$loadApplicationEntry$2(ApplicationCache.scala:163)
> at
> org.apache.spark.deploy.history.ApplicationCache.time(ApplicationCache.scala:135)
> at
> org.apache.spark.deploy.history.ApplicationCache.org$apache$spark$deploy$history$ApplicationCache$$loadApplicationEntry(ApplicationCache.scala:161)
> at
> org.apache.spark.deploy.history.ApplicationCache$$anon$1.load(ApplicationCache.scala:56)
> at
> org.apache.spark.deploy.history.ApplicationCache$$anon$1.load(ApplicationCache.scala:52)
> at
> org.sparkproject.guava.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3599)
> at
> org.sparkproject.guava.cache.LocalCache$Segment.loadSync(LocalCache.java:2379)
> at
> org.sparkproject.guava.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2342)
> at org.sparkproject.guava.cache.LocalCache$Segment.get(LocalCache.java:2257)
> at org.sparkproject.guava.cache.LocalCache.get(LocalCache.java:4000) at
> org.sparkproject.guava.cache.LocalCache.getOrLoad(LocalCache.java:4004) at
> org.sparkproject.guava.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:4874)
> at
> org.apache.spark.deploy.history.ApplicationCache.get(ApplicationCache.scala:89)
> at
> org.apache.spark.deploy.history.ApplicationCache.withSparkUI(ApplicationCache.scala:101)
> at
> org.apache.spark.deploy.history.HistoryServer.org$apache$spark$deploy$history$HistoryServer$$loadAppUi(HistoryServer.scala:248)
> at
> org.apache.spark.deploy.history.HistoryServer$$anon$1.doGet(HistoryServer.scala:101)
> at javax.servlet.http.HttpServlet.service(HttpServlet.java:687) at
> javax.servlet.http.HttpServlet.service(HttpServlet.java:790) at
> org.sparkproject.jetty.servlet.ServletHolder.handle(ServletHolder.java:873)
> at
> org.sparkproject.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1623)
> at
> org.apache.spark.ui.HttpSecurityFilter.doFilter(HttpSecurityFilter.scala:95)
> at
> org.sparkproject.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1610)
> at
> org.sparkproject.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:540)
> at
> org.sparkproject.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:255)
> at
> org.sparkproject.jetty.server.handler.ContextHandler.doHandle(ContextHandler.java:1345)
> at
> org.sparkproject.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:203)
> at
> org.sparkproject.jetty.servlet.ServletHandler.doScope(ServletHandler.java:480)
> at
> org.sparkproject.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:201)
> at
> org.sparkproject.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1247)
> at
> org.sparkproject.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:144)
> at
> org.sparkproject.jetty.server.handler.gzip.GzipHandler.handle(GzipHandler.java:753)
> at
> org.sparkproject.jetty.server.handler.ContextHandlerCollection.handle(ContextHandlerCollection.java:220)
> at
> org.sparkproject.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:132)
> at org.sparkproject.jetty.server.Server.handle(Server.java:505) at
> org.sparkproject.jetty.server.HttpChannel.handle(HttpChannel.java:370) at
> org.sparkproject.jetty.server.HttpConnection.onFillable(HttpConnection.java:267)
> at
> org.sparkproject.jetty.io.AbstractConnection$ReadCallback.succeeded(AbstractConnection.java:305)
> at org.sparkproject.jetty.io.FillInterest.fillable(FillInterest.java:103) at
> org.sparkproject.jetty.io.ChannelEndPoint$2.run(ChannelEndPoint.java:117) at
> org.sparkproject.jetty.util.thread.strategy.EatWhatYouKill.runTask(EatWhatYouKill.java:333)
> at
> org.sparkproject.jetty.util.thread.strategy.EatWhatYouKill.doProduce(EatWhatYouKill.java:310)
> at
> org.sparkproject.jetty.util.thread.strategy.EatWhatYouKill.tryProduce(EatWhatYouKill.java:168)
> at
> org.sparkproject.jetty.util.thread.strategy.EatWhatYouKill.run(EatWhatYouKill.java:126)
> at
> org.sparkproject.jetty.util.thread.ReservedThreadExecutor$ReservedThread.run(ReservedThreadExecutor.java:366)
> at
> org.sparkproject.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:698)
> at
> org.sparkproject.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:804)
> at java.lang.Thread.run(Thread.java:748)
> ----
> [Powered by Jetty:// 9.4.z-SNAPSHOT|http://eclipse.org/jetty]
> ----
--
This message was sent by Atlassian Jira
(v8.3.4#803005)
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]