Github user vanzin commented on a diff in the pull request:
https://github.com/apache/spark/pull/22504#discussion_r226740098
--- Diff:
core/src/main/scala/org/apache/spark/deploy/history/FsHistoryProvider.scala ---
@@ -798,14 +815,52 @@ private[history] class FsHistoryProvider(conf:
SparkConf, clock: Clock)
stale.foreach { log =>
if (log.appId.isEmpty) {
logInfo(s"Deleting invalid / corrupt event log ${log.logPath}")
- deleteLog(new Path(log.logPath))
+ deleteLog(fs, new Path(log.logPath))
listing.delete(classOf[LogInfo], log.logPath)
}
}
// Clean the blacklist from the expired entries.
clearBlacklist(CLEAN_INTERVAL_S)
}
+ /**
+ * Delete driver logs from the configured spark dfs dir that exceed the
configured max age
+ */
+ private[history] def cleanDriverLogs(): Unit = Utils.tryLog {
+ val driverLogDir = conf.get(DRIVER_LOG_DFS_DIR).get
+ val currentTime = clock.getTimeMillis()
+ val maxTime = currentTime - conf.get(MAX_DRIVER_LOG_AGE_S) * 1000
+ val appDirs = driverLogFs.get.listLocatedStatus(new Path(driverLogDir))
+ while (appDirs.hasNext()) {
+ val appDirStatus = appDirs.next()
+ if (appDirStatus.isDirectory()) {
+ val logFiles = driverLogFs.get.listStatus(appDirStatus.getPath())
+ var deleteDir = true
+ logFiles.foreach { f =>
+ try {
+ val info = listing.read(classOf[LogInfo],
f.getPath().toString())
--- End diff --
This block probably deserves a comment.
---
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]