Github user vanzin commented on a diff in the pull request:

    https://github.com/apache/spark/pull/10860#discussion_r50738479
  
    --- Diff: 
core/src/main/scala/org/apache/spark/deploy/history/FsHistoryProvider.scala ---
    @@ -562,90 +544,18 @@ private[history] class FsHistoryProvider(conf: 
SparkConf, clock: Clock)
       }
     
       /**
    -   * Loads a legacy log directory. This assumes that the log directory 
contains a single event
    -   * log file (along with other metadata files), which is the case for 
directories generated by
    -   * the code in previous releases.
    -   *
    -   * @return input stream that holds one JSON record per line.
    -   */
    -  private[history] def openLegacyEventLog(dir: Path): InputStream = {
    -    val children = fs.listStatus(dir)
    -    var eventLogPath: Path = null
    -    var codecName: Option[String] = None
    -
    -    children.foreach { child =>
    -      child.getPath().getName() match {
    -        case name if name.startsWith(LOG_PREFIX) =>
    -          eventLogPath = child.getPath()
    -        case codec if codec.startsWith(COMPRESSION_CODEC_PREFIX) =>
    -          codecName = 
Some(codec.substring(COMPRESSION_CODEC_PREFIX.length()))
    -        case _ =>
    -      }
    -    }
    -
    -    if (eventLogPath == null) {
    -      throw new IllegalArgumentException(s"$dir is not a Spark application 
log directory.")
    -    }
    -
    -    val codec = try {
    -        codecName.map { c => CompressionCodec.createCodec(conf, c) }
    -      } catch {
    -        case e: Exception =>
    -          throw new IllegalArgumentException(s"Unknown compression codec 
$codecName.")
    -      }
    -
    -    val in = new BufferedInputStream(fs.open(eventLogPath))
    -    codec.map(_.compressedInputStream(in)).getOrElse(in)
    -  }
    -
    -  /**
    -   * Return whether the specified event log path contains a old 
directory-based event log.
    -   * Previously, the event log of an application comprises of multiple 
files in a directory.
    -   * As of Spark 1.3, these files are consolidated into a single one that 
replaces the directory.
    -   * See SPARK-2261 for more detail.
    -   */
    -  private def isLegacyLogDirectory(entry: FileStatus): Boolean = 
entry.isDirectory
    -
    -  /**
        * Returns the modification time of the given event log. If the status 
points at an empty
    --- End diff --
    
    Can this method just return `Long` now? The comment mentions that it would 
return `None` in case the directory was empty, but there are no more 
directories involved now.


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at [email protected] or file a JIRA ticket
with INFRA.
---

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to