hadoop git commit: MAPREDUCE-7027: HadoopArchiveLogs shouldn't delete the original logs if the HAR creation fails. Contributed by Gergely Novák

2018-10-29 Thread aajisaka
Repository: hadoop
Updated Branches:
  refs/heads/branch-2.9 f84beefb4 -> f79470e69


MAPREDUCE-7027: HadoopArchiveLogs shouldn't delete the original logs if the HAR 
creation fails. Contributed by Gergely Novák

(cherry picked from commit 68ce193efcb595f75d7addf751559c806a5aa399)
(cherry picked from commit a0b55392d4385b623f6a8c0af17e58e2a9468be5)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/f79470e6
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/f79470e6
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/f79470e6

Branch: refs/heads/branch-2.9
Commit: f79470e69666875cd43f171e438f7764796ce535
Parents: f84beef
Author: Xuan Gong 
Authored: Fri Feb 23 14:37:26 2018 -0800
Committer: Akira Ajisaka 
Committed: Tue Oct 30 10:04:48 2018 +0900

--
 .../hadoop/tools/HadoopArchiveLogsRunner.java   |  26 ++-
 .../tools/TestHadoopArchiveLogsRunner.java  | 204 +++
 2 files changed, 141 insertions(+), 89 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/f79470e6/hadoop-tools/hadoop-archive-logs/src/main/java/org/apache/hadoop/tools/HadoopArchiveLogsRunner.java
--
diff --git 
a/hadoop-tools/hadoop-archive-logs/src/main/java/org/apache/hadoop/tools/HadoopArchiveLogsRunner.java
 
b/hadoop-tools/hadoop-archive-logs/src/main/java/org/apache/hadoop/tools/HadoopArchiveLogsRunner.java
index b3c2de6..b736694 100644
--- 
a/hadoop-tools/hadoop-archive-logs/src/main/java/org/apache/hadoop/tools/HadoopArchiveLogsRunner.java
+++ 
b/hadoop-tools/hadoop-archive-logs/src/main/java/org/apache/hadoop/tools/HadoopArchiveLogsRunner.java
@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.tools;
 
+import com.google.common.annotations.VisibleForTesting;
 import org.apache.commons.cli.CommandLine;
 import org.apache.commons.cli.CommandLineParser;
 import org.apache.commons.cli.GnuParser;
@@ -65,6 +66,9 @@ public class HadoopArchiveLogsRunner implements Tool {
 
   private JobConf conf;
 
+  @VisibleForTesting
+  HadoopArchives hadoopArchives;
+
   private static final FsPermission HAR_DIR_PERM =
   new FsPermission(FsAction.ALL, FsAction.READ_EXECUTE, FsAction.NONE);
   private static final FsPermission HAR_INNER_FILES_PERM =
@@ -72,6 +76,7 @@ public class HadoopArchiveLogsRunner implements Tool {
 
   public HadoopArchiveLogsRunner(Configuration conf) {
 setConf(conf);
+hadoopArchives = new HadoopArchives(conf);
   }
 
   public static void main(String[] args) {
@@ -132,10 +137,10 @@ public class HadoopArchiveLogsRunner implements Tool {
 conf.set("mapreduce.framework.name", "local");
 // Set the umask so we get 640 files and 750 dirs
 conf.set("fs.permissions.umask-mode", "027");
-HadoopArchives ha = new HadoopArchives(conf);
+String harName = appId + ".har";
 String[] haArgs = {
 "-archiveName",
-appId + ".har",
+harName,
 "-p",
 remoteAppLogDir,
 "*",
@@ -146,15 +151,26 @@ public class HadoopArchiveLogsRunner implements Tool {
   sb.append("\n\t").append(haArg);
 }
 LOG.info(sb.toString());
-ha.run(haArgs);
+int exitCode = hadoopArchives.run(haArgs);
+if (exitCode != 0) {
+  LOG.warn("Failed to create archives for " + appId);
+  return -1;
+}
 
 FileSystem fs = null;
 // Move har file to correct location and delete original logs
 try {
   fs = FileSystem.get(conf);
-  Path harDest = new Path(remoteAppLogDir, appId + ".har");
+  Path harPath = new Path(workingDir, harName);
+  if (!fs.exists(harPath) ||
+  fs.listStatus(harPath).length == 0) {
+LOG.warn("The created archive \"" + harName +
+"\" is missing or empty.");
+return -1;
+  }
+  Path harDest = new Path(remoteAppLogDir, harName);
   LOG.info("Moving har to original location");
-  fs.rename(new Path(workingDir, appId + ".har"), harDest);
+  fs.rename(harPath, harDest);
   LOG.info("Deleting original logs");
   for (FileStatus original : fs.listStatus(new Path(remoteAppLogDir),
   new PathFilter() {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/f79470e6/hadoop-tools/hadoop-archive-logs/src/test/java/org/apache/hadoop/tools/TestHadoopArchiveLogsRunner.java
--
diff --git 
a/hadoop-tools/hadoop-archive-logs/src/test/java/org/apache/hadoop/tools/TestHadoopArchiveLogsRunner.java
 
b/hadoop-tools/hadoop-archive-logs/src/test/java/org/apache/hadoop/tools/TestHadoopArchiveLogsRunner.java
index fad9b97..5369338 100644
--- 
a/hadoop-tools/hadoop-archive-logs/src/test/java/org/apache/hadoop/tools/TestHadoopArchiveLogsRunner.java
+++ 

[52/59] [abbrv] hadoop git commit: MAPREDUCE-7027: HadoopArchiveLogs shouldn't delete the original logs if the HAR creation fails. Contributed by Gergely Novák

2018-02-26 Thread xyao
MAPREDUCE-7027: HadoopArchiveLogs shouldn't delete the original logs if the HAR 
creation fails. Contributed by Gergely Novák


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/68ce193e
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/68ce193e
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/68ce193e

Branch: refs/heads/HDFS-7240
Commit: 68ce193efcb595f75d7addf751559c806a5aa399
Parents: 51088d3
Author: Xuan Gong 
Authored: Fri Feb 23 14:37:26 2018 -0800
Committer: Xuan Gong 
Committed: Fri Feb 23 14:37:26 2018 -0800

--
 .../hadoop/tools/HadoopArchiveLogsRunner.java   |  26 ++-
 .../tools/TestHadoopArchiveLogsRunner.java  | 204 +++
 2 files changed, 141 insertions(+), 89 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/68ce193e/hadoop-tools/hadoop-archive-logs/src/main/java/org/apache/hadoop/tools/HadoopArchiveLogsRunner.java
--
diff --git 
a/hadoop-tools/hadoop-archive-logs/src/main/java/org/apache/hadoop/tools/HadoopArchiveLogsRunner.java
 
b/hadoop-tools/hadoop-archive-logs/src/main/java/org/apache/hadoop/tools/HadoopArchiveLogsRunner.java
index b3c2de6..b736694 100644
--- 
a/hadoop-tools/hadoop-archive-logs/src/main/java/org/apache/hadoop/tools/HadoopArchiveLogsRunner.java
+++ 
b/hadoop-tools/hadoop-archive-logs/src/main/java/org/apache/hadoop/tools/HadoopArchiveLogsRunner.java
@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.tools;
 
+import com.google.common.annotations.VisibleForTesting;
 import org.apache.commons.cli.CommandLine;
 import org.apache.commons.cli.CommandLineParser;
 import org.apache.commons.cli.GnuParser;
@@ -65,6 +66,9 @@ public class HadoopArchiveLogsRunner implements Tool {
 
   private JobConf conf;
 
+  @VisibleForTesting
+  HadoopArchives hadoopArchives;
+
   private static final FsPermission HAR_DIR_PERM =
   new FsPermission(FsAction.ALL, FsAction.READ_EXECUTE, FsAction.NONE);
   private static final FsPermission HAR_INNER_FILES_PERM =
@@ -72,6 +76,7 @@ public class HadoopArchiveLogsRunner implements Tool {
 
   public HadoopArchiveLogsRunner(Configuration conf) {
 setConf(conf);
+hadoopArchives = new HadoopArchives(conf);
   }
 
   public static void main(String[] args) {
@@ -132,10 +137,10 @@ public class HadoopArchiveLogsRunner implements Tool {
 conf.set("mapreduce.framework.name", "local");
 // Set the umask so we get 640 files and 750 dirs
 conf.set("fs.permissions.umask-mode", "027");
-HadoopArchives ha = new HadoopArchives(conf);
+String harName = appId + ".har";
 String[] haArgs = {
 "-archiveName",
-appId + ".har",
+harName,
 "-p",
 remoteAppLogDir,
 "*",
@@ -146,15 +151,26 @@ public class HadoopArchiveLogsRunner implements Tool {
   sb.append("\n\t").append(haArg);
 }
 LOG.info(sb.toString());
-ha.run(haArgs);
+int exitCode = hadoopArchives.run(haArgs);
+if (exitCode != 0) {
+  LOG.warn("Failed to create archives for " + appId);
+  return -1;
+}
 
 FileSystem fs = null;
 // Move har file to correct location and delete original logs
 try {
   fs = FileSystem.get(conf);
-  Path harDest = new Path(remoteAppLogDir, appId + ".har");
+  Path harPath = new Path(workingDir, harName);
+  if (!fs.exists(harPath) ||
+  fs.listStatus(harPath).length == 0) {
+LOG.warn("The created archive \"" + harName +
+"\" is missing or empty.");
+return -1;
+  }
+  Path harDest = new Path(remoteAppLogDir, harName);
   LOG.info("Moving har to original location");
-  fs.rename(new Path(workingDir, appId + ".har"), harDest);
+  fs.rename(harPath, harDest);
   LOG.info("Deleting original logs");
   for (FileStatus original : fs.listStatus(new Path(remoteAppLogDir),
   new PathFilter() {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/68ce193e/hadoop-tools/hadoop-archive-logs/src/test/java/org/apache/hadoop/tools/TestHadoopArchiveLogsRunner.java
--
diff --git 
a/hadoop-tools/hadoop-archive-logs/src/test/java/org/apache/hadoop/tools/TestHadoopArchiveLogsRunner.java
 
b/hadoop-tools/hadoop-archive-logs/src/test/java/org/apache/hadoop/tools/TestHadoopArchiveLogsRunner.java
index fad9b97..5369338 100644
--- 
a/hadoop-tools/hadoop-archive-logs/src/test/java/org/apache/hadoop/tools/TestHadoopArchiveLogsRunner.java
+++ 
b/hadoop-tools/hadoop-archive-logs/src/test/java/org/apache/hadoop/tools/TestHadoopArchiveLogsRunner.java
@@ -32,112 +32,148 @@ import org.apache.hadoop.util.ToolRunner;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import 

hadoop git commit: MAPREDUCE-7027: HadoopArchiveLogs shouldn't delete the original logs if the HAR creation fails. Contributed by Gergely Novák

2018-02-23 Thread xgong
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 80f716537 -> a0b55392d


MAPREDUCE-7027: HadoopArchiveLogs shouldn't delete the original logs if the HAR 
creation fails. Contributed by Gergely Novák

(cherry picked from commit 68ce193efcb595f75d7addf751559c806a5aa399)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/a0b55392
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/a0b55392
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/a0b55392

Branch: refs/heads/branch-2
Commit: a0b55392d4385b623f6a8c0af17e58e2a9468be5
Parents: 80f7165
Author: Xuan Gong 
Authored: Fri Feb 23 14:37:26 2018 -0800
Committer: Xuan Gong 
Committed: Fri Feb 23 14:40:51 2018 -0800

--
 .../hadoop/tools/HadoopArchiveLogsRunner.java   |  26 ++-
 .../tools/TestHadoopArchiveLogsRunner.java  | 204 +++
 2 files changed, 141 insertions(+), 89 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/a0b55392/hadoop-tools/hadoop-archive-logs/src/main/java/org/apache/hadoop/tools/HadoopArchiveLogsRunner.java
--
diff --git 
a/hadoop-tools/hadoop-archive-logs/src/main/java/org/apache/hadoop/tools/HadoopArchiveLogsRunner.java
 
b/hadoop-tools/hadoop-archive-logs/src/main/java/org/apache/hadoop/tools/HadoopArchiveLogsRunner.java
index b3c2de6..b736694 100644
--- 
a/hadoop-tools/hadoop-archive-logs/src/main/java/org/apache/hadoop/tools/HadoopArchiveLogsRunner.java
+++ 
b/hadoop-tools/hadoop-archive-logs/src/main/java/org/apache/hadoop/tools/HadoopArchiveLogsRunner.java
@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.tools;
 
+import com.google.common.annotations.VisibleForTesting;
 import org.apache.commons.cli.CommandLine;
 import org.apache.commons.cli.CommandLineParser;
 import org.apache.commons.cli.GnuParser;
@@ -65,6 +66,9 @@ public class HadoopArchiveLogsRunner implements Tool {
 
   private JobConf conf;
 
+  @VisibleForTesting
+  HadoopArchives hadoopArchives;
+
   private static final FsPermission HAR_DIR_PERM =
   new FsPermission(FsAction.ALL, FsAction.READ_EXECUTE, FsAction.NONE);
   private static final FsPermission HAR_INNER_FILES_PERM =
@@ -72,6 +76,7 @@ public class HadoopArchiveLogsRunner implements Tool {
 
   public HadoopArchiveLogsRunner(Configuration conf) {
 setConf(conf);
+hadoopArchives = new HadoopArchives(conf);
   }
 
   public static void main(String[] args) {
@@ -132,10 +137,10 @@ public class HadoopArchiveLogsRunner implements Tool {
 conf.set("mapreduce.framework.name", "local");
 // Set the umask so we get 640 files and 750 dirs
 conf.set("fs.permissions.umask-mode", "027");
-HadoopArchives ha = new HadoopArchives(conf);
+String harName = appId + ".har";
 String[] haArgs = {
 "-archiveName",
-appId + ".har",
+harName,
 "-p",
 remoteAppLogDir,
 "*",
@@ -146,15 +151,26 @@ public class HadoopArchiveLogsRunner implements Tool {
   sb.append("\n\t").append(haArg);
 }
 LOG.info(sb.toString());
-ha.run(haArgs);
+int exitCode = hadoopArchives.run(haArgs);
+if (exitCode != 0) {
+  LOG.warn("Failed to create archives for " + appId);
+  return -1;
+}
 
 FileSystem fs = null;
 // Move har file to correct location and delete original logs
 try {
   fs = FileSystem.get(conf);
-  Path harDest = new Path(remoteAppLogDir, appId + ".har");
+  Path harPath = new Path(workingDir, harName);
+  if (!fs.exists(harPath) ||
+  fs.listStatus(harPath).length == 0) {
+LOG.warn("The created archive \"" + harName +
+"\" is missing or empty.");
+return -1;
+  }
+  Path harDest = new Path(remoteAppLogDir, harName);
   LOG.info("Moving har to original location");
-  fs.rename(new Path(workingDir, appId + ".har"), harDest);
+  fs.rename(harPath, harDest);
   LOG.info("Deleting original logs");
   for (FileStatus original : fs.listStatus(new Path(remoteAppLogDir),
   new PathFilter() {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a0b55392/hadoop-tools/hadoop-archive-logs/src/test/java/org/apache/hadoop/tools/TestHadoopArchiveLogsRunner.java
--
diff --git 
a/hadoop-tools/hadoop-archive-logs/src/test/java/org/apache/hadoop/tools/TestHadoopArchiveLogsRunner.java
 
b/hadoop-tools/hadoop-archive-logs/src/test/java/org/apache/hadoop/tools/TestHadoopArchiveLogsRunner.java
index fad9b97..5369338 100644
--- 
a/hadoop-tools/hadoop-archive-logs/src/test/java/org/apache/hadoop/tools/TestHadoopArchiveLogsRunner.java
+++ 

hadoop git commit: MAPREDUCE-7027: HadoopArchiveLogs shouldn't delete the original logs if the HAR creation fails. Contributed by Gergely Novák

2018-02-23 Thread xgong
Repository: hadoop
Updated Branches:
  refs/heads/trunk 51088d323 -> 68ce193ef


MAPREDUCE-7027: HadoopArchiveLogs shouldn't delete the original logs if the HAR 
creation fails. Contributed by Gergely Novák


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/68ce193e
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/68ce193e
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/68ce193e

Branch: refs/heads/trunk
Commit: 68ce193efcb595f75d7addf751559c806a5aa399
Parents: 51088d3
Author: Xuan Gong 
Authored: Fri Feb 23 14:37:26 2018 -0800
Committer: Xuan Gong 
Committed: Fri Feb 23 14:37:26 2018 -0800

--
 .../hadoop/tools/HadoopArchiveLogsRunner.java   |  26 ++-
 .../tools/TestHadoopArchiveLogsRunner.java  | 204 +++
 2 files changed, 141 insertions(+), 89 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/68ce193e/hadoop-tools/hadoop-archive-logs/src/main/java/org/apache/hadoop/tools/HadoopArchiveLogsRunner.java
--
diff --git 
a/hadoop-tools/hadoop-archive-logs/src/main/java/org/apache/hadoop/tools/HadoopArchiveLogsRunner.java
 
b/hadoop-tools/hadoop-archive-logs/src/main/java/org/apache/hadoop/tools/HadoopArchiveLogsRunner.java
index b3c2de6..b736694 100644
--- 
a/hadoop-tools/hadoop-archive-logs/src/main/java/org/apache/hadoop/tools/HadoopArchiveLogsRunner.java
+++ 
b/hadoop-tools/hadoop-archive-logs/src/main/java/org/apache/hadoop/tools/HadoopArchiveLogsRunner.java
@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.tools;
 
+import com.google.common.annotations.VisibleForTesting;
 import org.apache.commons.cli.CommandLine;
 import org.apache.commons.cli.CommandLineParser;
 import org.apache.commons.cli.GnuParser;
@@ -65,6 +66,9 @@ public class HadoopArchiveLogsRunner implements Tool {
 
   private JobConf conf;
 
+  @VisibleForTesting
+  HadoopArchives hadoopArchives;
+
   private static final FsPermission HAR_DIR_PERM =
   new FsPermission(FsAction.ALL, FsAction.READ_EXECUTE, FsAction.NONE);
   private static final FsPermission HAR_INNER_FILES_PERM =
@@ -72,6 +76,7 @@ public class HadoopArchiveLogsRunner implements Tool {
 
   public HadoopArchiveLogsRunner(Configuration conf) {
 setConf(conf);
+hadoopArchives = new HadoopArchives(conf);
   }
 
   public static void main(String[] args) {
@@ -132,10 +137,10 @@ public class HadoopArchiveLogsRunner implements Tool {
 conf.set("mapreduce.framework.name", "local");
 // Set the umask so we get 640 files and 750 dirs
 conf.set("fs.permissions.umask-mode", "027");
-HadoopArchives ha = new HadoopArchives(conf);
+String harName = appId + ".har";
 String[] haArgs = {
 "-archiveName",
-appId + ".har",
+harName,
 "-p",
 remoteAppLogDir,
 "*",
@@ -146,15 +151,26 @@ public class HadoopArchiveLogsRunner implements Tool {
   sb.append("\n\t").append(haArg);
 }
 LOG.info(sb.toString());
-ha.run(haArgs);
+int exitCode = hadoopArchives.run(haArgs);
+if (exitCode != 0) {
+  LOG.warn("Failed to create archives for " + appId);
+  return -1;
+}
 
 FileSystem fs = null;
 // Move har file to correct location and delete original logs
 try {
   fs = FileSystem.get(conf);
-  Path harDest = new Path(remoteAppLogDir, appId + ".har");
+  Path harPath = new Path(workingDir, harName);
+  if (!fs.exists(harPath) ||
+  fs.listStatus(harPath).length == 0) {
+LOG.warn("The created archive \"" + harName +
+"\" is missing or empty.");
+return -1;
+  }
+  Path harDest = new Path(remoteAppLogDir, harName);
   LOG.info("Moving har to original location");
-  fs.rename(new Path(workingDir, appId + ".har"), harDest);
+  fs.rename(harPath, harDest);
   LOG.info("Deleting original logs");
   for (FileStatus original : fs.listStatus(new Path(remoteAppLogDir),
   new PathFilter() {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/68ce193e/hadoop-tools/hadoop-archive-logs/src/test/java/org/apache/hadoop/tools/TestHadoopArchiveLogsRunner.java
--
diff --git 
a/hadoop-tools/hadoop-archive-logs/src/test/java/org/apache/hadoop/tools/TestHadoopArchiveLogsRunner.java
 
b/hadoop-tools/hadoop-archive-logs/src/test/java/org/apache/hadoop/tools/TestHadoopArchiveLogsRunner.java
index fad9b97..5369338 100644
--- 
a/hadoop-tools/hadoop-archive-logs/src/test/java/org/apache/hadoop/tools/TestHadoopArchiveLogsRunner.java
+++ 
b/hadoop-tools/hadoop-archive-logs/src/test/java/org/apache/hadoop/tools/TestHadoopArchiveLogsRunner.java
@@ -32,112 +32,148 @@ import