Github user andrewor14 commented on a diff in the pull request:

    https://github.com/apache/spark/pull/1222#discussion_r21727246
  
    --- Diff: 
core/src/main/scala/org/apache/spark/scheduler/EventLoggingListener.scala ---
    @@ -58,36 +61,78 @@ private[spark] class EventLoggingListener(
       private val shouldOverwrite = 
sparkConf.getBoolean("spark.eventLog.overwrite", false)
       private val testing = sparkConf.getBoolean("spark.eventLog.testing", 
false)
       private val outputBufferSize = 
sparkConf.getInt("spark.eventLog.buffer.kb", 100) * 1024
    -  val logDir = EventLoggingListener.getLogDirPath(logBaseDir, appId)
    -  val logDirName: String = logDir.split("/").last
    -  protected val logger = new FileLogger(logDir, sparkConf, hadoopConf, 
outputBufferSize,
    -    shouldCompress, shouldOverwrite, Some(LOG_FILE_PERMISSIONS))
    +  private val fileSystem = Utils.getHadoopFileSystem(new URI(logBaseDir), 
hadoopConf)
    +
    +  // Only defined if the file system scheme is not local
    +  private var hadoopDataStream: Option[FSDataOutputStream] = None
    +
    +  // The Hadoop APIs have changed over time, so we use reflection to 
figure out
    +  // the correct method to use to flush a hadoop data stream. See 
SPARK-1518
    +  // for details.
    +  private val hadoopFlushMethod = {
    +    val cls = classOf[FSDataOutputStream]
    +    
scala.util.Try(cls.getMethod("hflush")).getOrElse(cls.getMethod("sync"))
    +  }
    +
    +  private var writer: Option[PrintWriter] = None
     
       // For testing. Keep track of all JSON serialized events that have been 
logged.
       private[scheduler] val loggedEvents = new ArrayBuffer[JValue]
     
    +  // Visible for tests only.
    +  private[scheduler] val logPath = getLogPath(logBaseDir, appId)
    +
       /**
    -   * Begin logging events.
    -   * If compression is used, log a file that indicates which compression 
library is used.
    +   * Creates the log file in the configured log directory.
        */
       def start() {
    -    logger.start()
    -    logInfo("Logging events to %s".format(logDir))
    -    if (shouldCompress) {
    -      val codec =
    -        sparkConf.get("spark.io.compression.codec", 
CompressionCodec.DEFAULT_COMPRESSION_CODEC)
    -      logger.newFile(COMPRESSION_CODEC_PREFIX + codec)
    +    if (!fileSystem.isDirectory(new Path(logBaseDir))) {
    +      throw new IllegalArgumentException(s"Log directory $logBaseDir does 
not exist.");
    --- End diff --
    
    semicolon!


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at [email protected] or file a JIRA ticket
with INFRA.
---

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to