dongjoon-hyun commented on a change in pull request #25385: optimization spark
event log that resulting in history server is too …
URL: https://github.com/apache/spark/pull/25385#discussion_r311884174
##########
File path:
core/src/main/scala/org/apache/spark/scheduler/EventLoggingListener.scala
##########
@@ -148,6 +152,59 @@ private[spark] class EventLoggingListener(
}
}
+ /** cut the event log file **/
+ private def logReload(): Unit = {
+ val tlogPath = getLogPath(logBaseDir, appId, appAttemptId,
compressionCodecName, cutType)
+ // ����ȡ��logPathû�䣬�Ͳ������������־�ļ�
+ if (tlogPath == logPath) {
+ return
+ }
+
+ if (!fileSystem.getFileStatus(new Path(logBaseDir)).isDirectory) {
+ throw new IllegalArgumentException(s"Log directory $logBaseDir is not a
directory.")
+ }
+
+ val workingPath = tlogPath + IN_PROGRESS
+ val path = new Path(workingPath)
+ val uri = path.toUri
+ val defaultFs = FileSystem.getDefaultUri(hadoopConf).getScheme
+ val isDefaultLocal = defaultFs == null || defaultFs == "file"
+
+ if (shouldOverwrite && fileSystem.delete(path, true)) {
+ logWarning(s"Event log $path already exists. Overwriting...")
+ }
+
+ /* The Hadoop LocalFileSystem (r1.0.4) has known issues with syncing
(HADOOP-7844).
+ * Therefore, for local files, use FileOutputStream instead. */
+ val dstream =
+ if ((isDefaultLocal && uri.getScheme == null) || uri.getScheme ==
"file") {
+ new FileOutputStream(uri.getPath)
+ } else {
+ hadoopDataStream = Some(fileSystem.create(path))
+ hadoopDataStream.get
+ }
Review comment:
From line 163 to line 199, this seems to be a blind copy of the body of `def
start()`.
Please try to reduce this kind of copy&paste.
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
With regards,
Apache Git Services
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]