[04/50] [abbrv] ambari git commit: AMBARI-21569.Users randomly getting "HDFS020 Could not write file" exceptions while running query from Hive View(Venkata Sairam)
AMBARI-21569.Users randomly getting "HDFS020 Could not write file" exceptions while running query from Hive View(Venkata Sairam) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/8e36662a Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/8e36662a Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/8e36662a Branch: refs/heads/branch-3.0-perf Commit: 8e36662ae1dffe7cb637b3d1edb38278f0111012 Parents: 5136021 Author: Venkata SairamAuthored: Thu Nov 23 15:48:52 2017 +0530 Committer: Venkata Sairam Committed: Thu Nov 23 15:48:52 2017 +0530 -- .../org/apache/ambari/view/utils/hdfs/HdfsApi.java | 16 +++- .../apache/ambari/view/utils/hdfs/HdfsUtil.java| 17 - 2 files changed, 27 insertions(+), 6 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/8e36662a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java -- diff --git a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java index 3db2081..812cd54 100644 --- a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java +++ b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java @@ -485,7 +485,20 @@ public class HdfsApi { * @throws IOException * @throws InterruptedException */ - public T execute(PrivilegedExceptionAction action) + public T execute(PrivilegedExceptionAction action) throws IOException, InterruptedException { +return this.execute(action, false); + } + + + /** + * Executes action on HDFS using doAs + * @param action strategy object + * @param result type + * @return result of operation + * @throws IOException + * @throws InterruptedException + */ + public T execute(PrivilegedExceptionAction action, boolean alwaysRetry) throws IOException, InterruptedException { T result = null; @@ -508,6 +521,7 @@ public class HdfsApi { } LOG.info("HDFS threw 'IOException: Cannot obtain block length' exception. " + "Retrying... Try #" + (tryNumber + 1)); +LOG.error("Retrying: " + ex.getMessage(),ex); Thread.sleep(1000); //retry after 1 second } } while (!succeeded); http://git-wip-us.apache.org/repos/asf/ambari/blob/8e36662a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java -- diff --git a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java index 0670f1a..810129b 100644 --- a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java +++ b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java @@ -27,6 +27,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; +import java.security.PrivilegedExceptionAction; import java.util.Map; public class HdfsUtil { @@ -38,13 +39,19 @@ public class HdfsUtil { * @param filePath path to file * @param content new content of file */ - public static void putStringToFile(HdfsApi hdfs, String filePath, String content) throws HdfsApiException { -FSDataOutputStream stream; + public static void putStringToFile(final HdfsApi hdfs,final String filePath, final String content) throws HdfsApiException { + try { synchronized (hdfs) { -stream = hdfs.create(filePath, true); -stream.write(content.getBytes()); -stream.close(); +hdfs.execute(new PrivilegedExceptionAction() { + @Override + public Void run() throws Exception { +final FSDataOutputStream stream = hdfs.create(filePath, true); +stream.write(content.getBytes()); +stream.close(); +return null; + } +}, true); } } catch (IOException e) { throw new HdfsApiException("HDFS020 Could not write file " + filePath, e);
[07/50] [abbrv] ambari git commit: AMBARI-21569.Users randomly getting "HDFS020 Could not write file" exceptions while running query from Hive View(Venkata Sairam)
AMBARI-21569.Users randomly getting "HDFS020 Could not write file" exceptions while running query from Hive View(Venkata Sairam) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/8e36662a Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/8e36662a Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/8e36662a Branch: refs/heads/branch-feature-AMBARI-21674 Commit: 8e36662ae1dffe7cb637b3d1edb38278f0111012 Parents: 5136021 Author: Venkata SairamAuthored: Thu Nov 23 15:48:52 2017 +0530 Committer: Venkata Sairam Committed: Thu Nov 23 15:48:52 2017 +0530 -- .../org/apache/ambari/view/utils/hdfs/HdfsApi.java | 16 +++- .../apache/ambari/view/utils/hdfs/HdfsUtil.java| 17 - 2 files changed, 27 insertions(+), 6 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/8e36662a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java -- diff --git a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java index 3db2081..812cd54 100644 --- a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java +++ b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java @@ -485,7 +485,20 @@ public class HdfsApi { * @throws IOException * @throws InterruptedException */ - public T execute(PrivilegedExceptionAction action) + public T execute(PrivilegedExceptionAction action) throws IOException, InterruptedException { +return this.execute(action, false); + } + + + /** + * Executes action on HDFS using doAs + * @param action strategy object + * @param result type + * @return result of operation + * @throws IOException + * @throws InterruptedException + */ + public T execute(PrivilegedExceptionAction action, boolean alwaysRetry) throws IOException, InterruptedException { T result = null; @@ -508,6 +521,7 @@ public class HdfsApi { } LOG.info("HDFS threw 'IOException: Cannot obtain block length' exception. " + "Retrying... Try #" + (tryNumber + 1)); +LOG.error("Retrying: " + ex.getMessage(),ex); Thread.sleep(1000); //retry after 1 second } } while (!succeeded); http://git-wip-us.apache.org/repos/asf/ambari/blob/8e36662a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java -- diff --git a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java index 0670f1a..810129b 100644 --- a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java +++ b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java @@ -27,6 +27,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; +import java.security.PrivilegedExceptionAction; import java.util.Map; public class HdfsUtil { @@ -38,13 +39,19 @@ public class HdfsUtil { * @param filePath path to file * @param content new content of file */ - public static void putStringToFile(HdfsApi hdfs, String filePath, String content) throws HdfsApiException { -FSDataOutputStream stream; + public static void putStringToFile(final HdfsApi hdfs,final String filePath, final String content) throws HdfsApiException { + try { synchronized (hdfs) { -stream = hdfs.create(filePath, true); -stream.write(content.getBytes()); -stream.close(); +hdfs.execute(new PrivilegedExceptionAction() { + @Override + public Void run() throws Exception { +final FSDataOutputStream stream = hdfs.create(filePath, true); +stream.write(content.getBytes()); +stream.close(); +return null; + } +}, true); } } catch (IOException e) { throw new HdfsApiException("HDFS020 Could not write file " + filePath, e);
[35/49] ambari git commit: AMBARI-21569.Users randomly getting "HDFS020 Could not write file" exceptions while running query from Hive View(Venkata Sairam)
AMBARI-21569.Users randomly getting "HDFS020 Could not write file" exceptions while running query from Hive View(Venkata Sairam) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/8e36662a Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/8e36662a Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/8e36662a Branch: refs/heads/branch-feature-AMBARI-20859 Commit: 8e36662ae1dffe7cb637b3d1edb38278f0111012 Parents: 5136021 Author: Venkata SairamAuthored: Thu Nov 23 15:48:52 2017 +0530 Committer: Venkata Sairam Committed: Thu Nov 23 15:48:52 2017 +0530 -- .../org/apache/ambari/view/utils/hdfs/HdfsApi.java | 16 +++- .../apache/ambari/view/utils/hdfs/HdfsUtil.java| 17 - 2 files changed, 27 insertions(+), 6 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/8e36662a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java -- diff --git a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java index 3db2081..812cd54 100644 --- a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java +++ b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java @@ -485,7 +485,20 @@ public class HdfsApi { * @throws IOException * @throws InterruptedException */ - public T execute(PrivilegedExceptionAction action) + public T execute(PrivilegedExceptionAction action) throws IOException, InterruptedException { +return this.execute(action, false); + } + + + /** + * Executes action on HDFS using doAs + * @param action strategy object + * @param result type + * @return result of operation + * @throws IOException + * @throws InterruptedException + */ + public T execute(PrivilegedExceptionAction action, boolean alwaysRetry) throws IOException, InterruptedException { T result = null; @@ -508,6 +521,7 @@ public class HdfsApi { } LOG.info("HDFS threw 'IOException: Cannot obtain block length' exception. " + "Retrying... Try #" + (tryNumber + 1)); +LOG.error("Retrying: " + ex.getMessage(),ex); Thread.sleep(1000); //retry after 1 second } } while (!succeeded); http://git-wip-us.apache.org/repos/asf/ambari/blob/8e36662a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java -- diff --git a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java index 0670f1a..810129b 100644 --- a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java +++ b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java @@ -27,6 +27,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; +import java.security.PrivilegedExceptionAction; import java.util.Map; public class HdfsUtil { @@ -38,13 +39,19 @@ public class HdfsUtil { * @param filePath path to file * @param content new content of file */ - public static void putStringToFile(HdfsApi hdfs, String filePath, String content) throws HdfsApiException { -FSDataOutputStream stream; + public static void putStringToFile(final HdfsApi hdfs,final String filePath, final String content) throws HdfsApiException { + try { synchronized (hdfs) { -stream = hdfs.create(filePath, true); -stream.write(content.getBytes()); -stream.close(); +hdfs.execute(new PrivilegedExceptionAction() { + @Override + public Void run() throws Exception { +final FSDataOutputStream stream = hdfs.create(filePath, true); +stream.write(content.getBytes()); +stream.close(); +return null; + } +}, true); } } catch (IOException e) { throw new HdfsApiException("HDFS020 Could not write file " + filePath, e);
[08/16] ambari git commit: AMBARI-21569.Users randomly getting "HDFS020 Could not write file" exceptions while running query from Hive View(Venkata Sairam)
AMBARI-21569.Users randomly getting "HDFS020 Could not write file" exceptions while running query from Hive View(Venkata Sairam) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/c57e243d Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/c57e243d Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/c57e243d Branch: refs/heads/branch-feature-AMBARI-22457 Commit: c57e243d2d0c2f480b56693a39e97bb81e258da6 Parents: 099e018 Author: Venkata SairamAuthored: Thu Nov 23 15:52:55 2017 +0530 Committer: Venkata Sairam Committed: Thu Nov 23 15:52:55 2017 +0530 -- .../org/apache/ambari/view/utils/hdfs/HdfsApi.java | 16 +++- .../apache/ambari/view/utils/hdfs/HdfsUtil.java| 17 - 2 files changed, 27 insertions(+), 6 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/c57e243d/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java -- diff --git a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java index 3db2081..812cd54 100644 --- a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java +++ b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java @@ -485,7 +485,20 @@ public class HdfsApi { * @throws IOException * @throws InterruptedException */ - public T execute(PrivilegedExceptionAction action) + public T execute(PrivilegedExceptionAction action) throws IOException, InterruptedException { +return this.execute(action, false); + } + + + /** + * Executes action on HDFS using doAs + * @param action strategy object + * @param result type + * @return result of operation + * @throws IOException + * @throws InterruptedException + */ + public T execute(PrivilegedExceptionAction action, boolean alwaysRetry) throws IOException, InterruptedException { T result = null; @@ -508,6 +521,7 @@ public class HdfsApi { } LOG.info("HDFS threw 'IOException: Cannot obtain block length' exception. " + "Retrying... Try #" + (tryNumber + 1)); +LOG.error("Retrying: " + ex.getMessage(),ex); Thread.sleep(1000); //retry after 1 second } } while (!succeeded); http://git-wip-us.apache.org/repos/asf/ambari/blob/c57e243d/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java -- diff --git a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java index 0670f1a..810129b 100644 --- a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java +++ b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java @@ -27,6 +27,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; +import java.security.PrivilegedExceptionAction; import java.util.Map; public class HdfsUtil { @@ -38,13 +39,19 @@ public class HdfsUtil { * @param filePath path to file * @param content new content of file */ - public static void putStringToFile(HdfsApi hdfs, String filePath, String content) throws HdfsApiException { -FSDataOutputStream stream; + public static void putStringToFile(final HdfsApi hdfs,final String filePath, final String content) throws HdfsApiException { + try { synchronized (hdfs) { -stream = hdfs.create(filePath, true); -stream.write(content.getBytes()); -stream.close(); +hdfs.execute(new PrivilegedExceptionAction() { + @Override + public Void run() throws Exception { +final FSDataOutputStream stream = hdfs.create(filePath, true); +stream.write(content.getBytes()); +stream.close(); +return null; + } +}, true); } } catch (IOException e) { throw new HdfsApiException("HDFS020 Could not write file " + filePath, e);
ambari git commit: AMBARI-21569.Users randomly getting "HDFS020 Could not write file" exceptions while running query from Hive View(Venkata Sairam)
Repository: ambari Updated Branches: refs/heads/branch-2.6 099e0185d -> c57e243d2 AMBARI-21569.Users randomly getting "HDFS020 Could not write file" exceptions while running query from Hive View(Venkata Sairam) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/c57e243d Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/c57e243d Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/c57e243d Branch: refs/heads/branch-2.6 Commit: c57e243d2d0c2f480b56693a39e97bb81e258da6 Parents: 099e018 Author: Venkata SairamAuthored: Thu Nov 23 15:52:55 2017 +0530 Committer: Venkata Sairam Committed: Thu Nov 23 15:52:55 2017 +0530 -- .../org/apache/ambari/view/utils/hdfs/HdfsApi.java | 16 +++- .../apache/ambari/view/utils/hdfs/HdfsUtil.java| 17 - 2 files changed, 27 insertions(+), 6 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/c57e243d/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java -- diff --git a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java index 3db2081..812cd54 100644 --- a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java +++ b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java @@ -485,7 +485,20 @@ public class HdfsApi { * @throws IOException * @throws InterruptedException */ - public T execute(PrivilegedExceptionAction action) + public T execute(PrivilegedExceptionAction action) throws IOException, InterruptedException { +return this.execute(action, false); + } + + + /** + * Executes action on HDFS using doAs + * @param action strategy object + * @param result type + * @return result of operation + * @throws IOException + * @throws InterruptedException + */ + public T execute(PrivilegedExceptionAction action, boolean alwaysRetry) throws IOException, InterruptedException { T result = null; @@ -508,6 +521,7 @@ public class HdfsApi { } LOG.info("HDFS threw 'IOException: Cannot obtain block length' exception. " + "Retrying... Try #" + (tryNumber + 1)); +LOG.error("Retrying: " + ex.getMessage(),ex); Thread.sleep(1000); //retry after 1 second } } while (!succeeded); http://git-wip-us.apache.org/repos/asf/ambari/blob/c57e243d/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java -- diff --git a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java index 0670f1a..810129b 100644 --- a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java +++ b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java @@ -27,6 +27,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; +import java.security.PrivilegedExceptionAction; import java.util.Map; public class HdfsUtil { @@ -38,13 +39,19 @@ public class HdfsUtil { * @param filePath path to file * @param content new content of file */ - public static void putStringToFile(HdfsApi hdfs, String filePath, String content) throws HdfsApiException { -FSDataOutputStream stream; + public static void putStringToFile(final HdfsApi hdfs,final String filePath, final String content) throws HdfsApiException { + try { synchronized (hdfs) { -stream = hdfs.create(filePath, true); -stream.write(content.getBytes()); -stream.close(); +hdfs.execute(new PrivilegedExceptionAction() { + @Override + public Void run() throws Exception { +final FSDataOutputStream stream = hdfs.create(filePath, true); +stream.write(content.getBytes()); +stream.close(); +return null; + } +}, true); } } catch (IOException e) { throw new HdfsApiException("HDFS020 Could not write file " + filePath, e);
ambari git commit: AMBARI-21569.Users randomly getting "HDFS020 Could not write file" exceptions while running query from Hive View(Venkata Sairam)
Repository: ambari Updated Branches: refs/heads/branch-2.5 603c3fd74 -> adc1fed15 AMBARI-21569.Users randomly getting "HDFS020 Could not write file" exceptions while running query from Hive View(Venkata Sairam) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/adc1fed1 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/adc1fed1 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/adc1fed1 Branch: refs/heads/branch-2.5 Commit: adc1fed15f57b427ee6fbaeb4b747004492f49e4 Parents: 603c3fd Author: Venkata SairamAuthored: Thu Nov 23 15:50:22 2017 +0530 Committer: Venkata Sairam Committed: Thu Nov 23 15:50:22 2017 +0530 -- .../apache/ambari/view/utils/hdfs/HdfsApi.java| 18 -- .../apache/ambari/view/utils/hdfs/HdfsUtil.java | 17 - 2 files changed, 28 insertions(+), 7 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/adc1fed1/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java -- diff --git a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java index 90fa483..5bce7ba 100644 --- a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java +++ b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java @@ -386,7 +386,20 @@ public class HdfsApi { * @throws IOException * @throws InterruptedException */ - public T execute(PrivilegedExceptionAction action) + public T execute(PrivilegedExceptionAction action) throws IOException, InterruptedException { +return this.execute(action, false); + } + + + /** + * Executes action on HDFS using doAs + * @param action strategy object + * @param result type + * @return result of operation + * @throws IOException + * @throws InterruptedException + */ + public T execute(PrivilegedExceptionAction action, boolean alwaysRetry) throws IOException, InterruptedException { T result = null; @@ -401,7 +414,7 @@ public class HdfsApi { result = ugi.doAs(action); succeeded = true; } catch (IOException ex) { -if (!ex.getMessage().contains("Cannot obtain block length for")) { +if (!alwaysRetry && !ex.getMessage().contains("Cannot obtain block length for")) { throw ex; } if (tryNumber >= 3) { @@ -409,6 +422,7 @@ public class HdfsApi { } LOG.info("HDFS threw 'IOException: Cannot obtain block length' exception. " + "Retrying... Try #" + (tryNumber + 1)); +LOG.error("Retrying: " + ex.getMessage(),ex); Thread.sleep(1000); //retry after 1 second } } while (!succeeded); http://git-wip-us.apache.org/repos/asf/ambari/blob/adc1fed1/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java -- diff --git a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java index 0670f1a..810129b 100644 --- a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java +++ b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java @@ -27,6 +27,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; +import java.security.PrivilegedExceptionAction; import java.util.Map; public class HdfsUtil { @@ -38,13 +39,19 @@ public class HdfsUtil { * @param filePath path to file * @param content new content of file */ - public static void putStringToFile(HdfsApi hdfs, String filePath, String content) throws HdfsApiException { -FSDataOutputStream stream; + public static void putStringToFile(final HdfsApi hdfs,final String filePath, final String content) throws HdfsApiException { + try { synchronized (hdfs) { -stream = hdfs.create(filePath, true); -stream.write(content.getBytes()); -stream.close(); +hdfs.execute(new PrivilegedExceptionAction() { + @Override + public Void run() throws Exception { +final FSDataOutputStream stream = hdfs.create(filePath, true); +stream.write(content.getBytes()); +stream.close(); +return null; + } +}, true); } } catch (IOException e) { throw new HdfsApiException("HDFS020 Could not write file " + filePath, e);
ambari git commit: AMBARI-21569.Users randomly getting "HDFS020 Could not write file" exceptions while running query from Hive View(Venkata Sairam)
Repository: ambari Updated Branches: refs/heads/trunk 513602198 -> 8e36662ae AMBARI-21569.Users randomly getting "HDFS020 Could not write file" exceptions while running query from Hive View(Venkata Sairam) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/8e36662a Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/8e36662a Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/8e36662a Branch: refs/heads/trunk Commit: 8e36662ae1dffe7cb637b3d1edb38278f0111012 Parents: 5136021 Author: Venkata SairamAuthored: Thu Nov 23 15:48:52 2017 +0530 Committer: Venkata Sairam Committed: Thu Nov 23 15:48:52 2017 +0530 -- .../org/apache/ambari/view/utils/hdfs/HdfsApi.java | 16 +++- .../apache/ambari/view/utils/hdfs/HdfsUtil.java| 17 - 2 files changed, 27 insertions(+), 6 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/8e36662a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java -- diff --git a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java index 3db2081..812cd54 100644 --- a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java +++ b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java @@ -485,7 +485,20 @@ public class HdfsApi { * @throws IOException * @throws InterruptedException */ - public T execute(PrivilegedExceptionAction action) + public T execute(PrivilegedExceptionAction action) throws IOException, InterruptedException { +return this.execute(action, false); + } + + + /** + * Executes action on HDFS using doAs + * @param action strategy object + * @param result type + * @return result of operation + * @throws IOException + * @throws InterruptedException + */ + public T execute(PrivilegedExceptionAction action, boolean alwaysRetry) throws IOException, InterruptedException { T result = null; @@ -508,6 +521,7 @@ public class HdfsApi { } LOG.info("HDFS threw 'IOException: Cannot obtain block length' exception. " + "Retrying... Try #" + (tryNumber + 1)); +LOG.error("Retrying: " + ex.getMessage(),ex); Thread.sleep(1000); //retry after 1 second } } while (!succeeded); http://git-wip-us.apache.org/repos/asf/ambari/blob/8e36662a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java -- diff --git a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java index 0670f1a..810129b 100644 --- a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java +++ b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java @@ -27,6 +27,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; +import java.security.PrivilegedExceptionAction; import java.util.Map; public class HdfsUtil { @@ -38,13 +39,19 @@ public class HdfsUtil { * @param filePath path to file * @param content new content of file */ - public static void putStringToFile(HdfsApi hdfs, String filePath, String content) throws HdfsApiException { -FSDataOutputStream stream; + public static void putStringToFile(final HdfsApi hdfs,final String filePath, final String content) throws HdfsApiException { + try { synchronized (hdfs) { -stream = hdfs.create(filePath, true); -stream.write(content.getBytes()); -stream.close(); +hdfs.execute(new PrivilegedExceptionAction() { + @Override + public Void run() throws Exception { +final FSDataOutputStream stream = hdfs.create(filePath, true); +stream.write(content.getBytes()); +stream.close(); +return null; + } +}, true); } } catch (IOException e) { throw new HdfsApiException("HDFS020 Could not write file " + filePath, e);