Github user vanzin commented on a diff in the pull request:
https://github.com/apache/spark/pull/22504#discussion_r226457291
--- Diff:
core/src/main/scala/org/apache/spark/deploy/history/FsHistoryProvider.scala ---
@@ -800,14 +817,33 @@ private[history] class FsHistoryProvider(conf:
SparkConf, clock: Clock)
stale.foreach { log =>
if (log.appId.isEmpty) {
logInfo(s"Deleting invalid / corrupt event log ${log.logPath}")
- deleteLog(new Path(log.logPath))
+ deleteLog(fs, new Path(log.logPath))
listing.delete(classOf[LogInfo], log.logPath)
}
}
// Clean the blacklist from the expired entries.
clearBlacklist(CLEAN_INTERVAL_S)
}
+ /**
+ * Delete driver logs from the configured spark dfs dir that exceed the
configured max age
+ */
+ private[history] def cleanDriverLogs(): Unit = Utils.tryLog {
+ val driverLogDir = conf.get(DRIVER_LOG_DFS_DIR)
+ driverLogDir.foreach { dl =>
+ val maxTime = clock.getTimeMillis() -
+ conf.get(MAX_DRIVER_LOG_AGE_S) * 1000
--- End diff --
Fits in previous line.
---
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]