Github user vanzin commented on a diff in the pull request:
https://github.com/apache/spark/pull/1218#discussion_r16210962
--- Diff:
core/src/main/scala/org/apache/spark/deploy/history/FsHistoryProvider.scala ---
@@ -117,84 +145,88 @@ private[history] class FsHistoryProvider(conf:
SparkConf) extends ApplicationHis
try {
val logStatus = fs.listStatus(new Path(resolvedLogDir))
val logDirs = if (logStatus != null) logStatus.filter(_.isDir).toSeq
else Seq[FileStatus]()
- val logInfos = logDirs.filter { dir =>
- fs.isFile(new Path(dir.getPath,
EventLoggingListener.APPLICATION_COMPLETE))
- }
- val currentApps = Map[String, ApplicationHistoryInfo](
- appList.map(app => app.id -> app):_*)
-
- // For any application that either (i) is not listed or (ii) has
changed since the last time
- // the listing was created (defined by the log dir's modification
time), load the app's info.
- // Otherwise just reuse what's already in memory.
- val newApps = new
mutable.ArrayBuffer[ApplicationHistoryInfo](logInfos.size)
- for (dir <- logInfos) {
- val curr = currentApps.getOrElse(dir.getPath().getName(), null)
- if (curr == null || curr.lastUpdated < getModificationTime(dir)) {
+ // Load all new logs from the log directory. Only directories that
have a modification time
+ // later than the last known log directory will be loaded.
+ var newMostRecentModTime = mostRecentLogModTime
+ val logInfos = logDirs
+ .filter { dir =>
+ if (fs.isFile(new Path(dir.getPath(),
EventLoggingListener.APPLICATION_COMPLETE))) {
+ val modTime = getModificationTime(dir)
+ newMostRecentModTime = math.max(newMostRecentModTime, modTime)
+ modTime > mostRecentLogModTime
+ } else {
+ false
+ }
+ }
+ .map { dir =>
try {
- val (app, _) = loadAppInfo(dir, renderUI = false)
- newApps += app
+ val (replayBus, appListener) = createReplayBus(dir)
+ replayBus.replay()
+ new FsApplicationHistoryInfo(
+ dir.getPath().getName(),
+ appListener.appId.getOrElse(dir.getPath().getName()),
+ appListener.appName.getOrElse(NOT_STARTED),
+ appListener.startTime.getOrElse(-1L),
+ appListener.endTime.getOrElse(-1L),
+ getModificationTime(dir),
+ appListener.sparkUser.getOrElse(NOT_STARTED))
} catch {
- case e: Exception => logError(s"Failed to load app info from
directory $dir.")
+ case e: Exception =>
+ logInfo(s"Failed to load application log data from $dir.", e)
+ null
}
- } else {
- newApps += curr
}
- }
+ .sortBy { info => if (info != null) -info.endTime else -1 }
+
+ mostRecentLogModTime = newMostRecentModTime
+
+ if (!logInfos.isEmpty) {
+ var newAppList = new mutable.LinkedHashMap[String,
FsApplicationHistoryInfo]()
+
+ // Merge the new apps with the existing ones, discarding any
duplicates. The new map
+ // is created in descending end time order.
+ val currentApps = appList.values.iterator
+ var current = if (currentApps.hasNext) currentApps.next else null
+ def addOldInfo(oldInfo: FsApplicationHistoryInfo) = {
+ if (!newAppList.contains(oldInfo.id)) {
+ newAppList += (oldInfo.id -> oldInfo)
+ }
+ }
+
- appList = newApps.sortBy { info => -info.endTime }
+ logInfos.foreach { info =>
+ if (info != null) {
+ while (current != null && current.endTime > info.endTime) {
+ addOldInfo(current)
+ current = if (currentApps.hasNext) currentApps.next else null
+ }
+
+ newAppList += (info.id -> info)
+ }
+ }
+
+ if (current != null) {
+ addOldInfo(current)
+ }
+ currentApps.foreach { oldInfo =>
+ addOldInfo(oldInfo)
+ }
+
+ appList = newAppList
--- End diff --
Actually that's not true. It creates a new map only when there are new or
updated logs.
The new map is needed because we want to have things in order (decreasing
end time) when listing apps. When you have a map, that means you have to use a
linked hash map (so that iteration order is predictable).
You could manually keep a map and a list, I guess, which is kinda what
LinkedHashMap does anyway. But that sounds like more work for no gain that I
can see.
---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at [email protected] or file a JIRA ticket
with INFRA.
---
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]