Github user andrewor14 commented on a diff in the pull request:
https://github.com/apache/spark/pull/204#discussion_r11469925
--- Diff:
core/src/main/scala/org/apache/spark/scheduler/EventLoggingListener.scala ---
@@ -90,9 +97,115 @@ private[spark] class EventLoggingListener(appName:
String, conf: SparkConf)
logEvent(event, flushLogger = true)
override def onUnpersistRDD(event: SparkListenerUnpersistRDD) =
logEvent(event, flushLogger = true)
+ override def onApplicationStart(event: SparkListenerApplicationStart) =
+ logEvent(event, flushLogger = true)
+ override def onApplicationEnd(event: SparkListenerApplicationEnd) =
+ logEvent(event, flushLogger = true)
+
+ /**
+ * Stop logging events.
+ * In addition, create an empty special file to indicate application
completion.
+ */
+ def stop() = {
+ logger.newFile(APPLICATION_COMPLETE)
+ logger.stop()
+ }
+}
+
+private[spark] object EventLoggingListener extends Logging {
+ val LOG_PREFIX = "EVENT_LOG_"
+ val SPARK_VERSION_PREFIX = "SPARK_VERSION_"
+ val COMPRESSION_CODEC_PREFIX = "COMPRESSION_CODEC_"
+ val APPLICATION_COMPLETE = "APPLICATION_COMPLETE"
+
+ def isEventLogFile(fileName: String): Boolean = {
+ fileName.startsWith(LOG_PREFIX)
+ }
+
+ def isSparkVersionFile(fileName: String): Boolean = {
+ fileName.startsWith(SPARK_VERSION_PREFIX)
+ }
+
+ def isCompressionCodecFile(fileName: String): Boolean = {
+ fileName.startsWith(COMPRESSION_CODEC_PREFIX)
+ }
- def stop() = logger.stop()
+ def isApplicationCompleteFile(fileName: String): Boolean = {
+ fileName == APPLICATION_COMPLETE
+ }
+
+ def parseSparkVersion(fileName: String): String = {
+ if (isSparkVersionFile(fileName)) {
+ fileName.replaceAll(SPARK_VERSION_PREFIX, "")
+ } else ""
+ }
+
+ def parseCompressionCodec(fileName: String): String = {
+ if (isCompressionCodecFile(fileName)) {
+ fileName.replaceAll(COMPRESSION_CODEC_PREFIX, "")
+ } else ""
+ }
+
+ /**
+ * Parse the event logging information associated with the logs in the
given directory.
+ *
+ * Specifically, this looks for event log files, the Spark version file,
the compression
+ * codec file (if event logs are compressed), and the application
completion file (if the
+ * application has run to completion).
+ */
+ def parseLoggingInfo(logDir: Path, fileSystem: FileSystem):
EventLoggingInfo = {
+ try {
+ val fileStatuses = fileSystem.listStatus(logDir)
+ val filePaths =
+ if (fileStatuses != null) {
+ fileStatuses.filter(!_.isDir).map(_.getPath).toSeq
+ } else {
+ Seq[Path]()
+ }
+ if (filePaths.isEmpty) {
+ logWarning("No files found in logging directory %s".format(logDir))
+ }
+ EventLoggingInfo(
+ logPaths = filePaths.filter { path => isEventLogFile(path.getName)
},
+ sparkVersion = filePaths
+ .find { path => isSparkVersionFile(path.getName) }
+ .map { path => parseSparkVersion(path.getName) }
+ .getOrElse("<Unknown>"),
+ compressionCodec = filePaths
+ .find { path => isCompressionCodecFile(path.getName) }
+ .map { path =>
+ val codec =
EventLoggingListener.parseCompressionCodec(path.getName)
+ val conf = new SparkConf
+ conf.set("spark.io.compression.codec", codec)
+ CompressionCodec.createCodec(conf)
--- End diff --
Good point
---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at [email protected] or file a JIRA ticket
with INFRA.
---