Repository: ambari
Updated Branches:
  refs/heads/trunk d804f4451 -> 0a2eccd09


AMBARI-21569.Users randomly getting "HDFS020 Could not write file" exceptions 
while running query from Hive View


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/0a2eccd0
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/0a2eccd0
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/0a2eccd0

Branch: refs/heads/trunk
Commit: 0a2eccd09aa7ec6bc4389603a2668f27d2d79900
Parents: d804f44
Author: Venkata Sairam <venkatasairam.la...@gmail.com>
Authored: Wed Aug 23 15:58:01 2017 +0530
Committer: Venkata Sairam <venkatasairam.la...@gmail.com>
Committed: Wed Aug 23 15:59:02 2017 +0530

----------------------------------------------------------------------
 .../hive20/src/main/resources/ui/yarn.lock      |  2 +-
 .../apache/ambari/view/utils/hdfs/HdfsApi.java  | 18 +++++++++++++--
 .../apache/ambari/view/utils/hdfs/HdfsUtil.java | 23 +++++++++++++++-----
 3 files changed, 34 insertions(+), 9 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/0a2eccd0/contrib/views/hive20/src/main/resources/ui/yarn.lock
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/yarn.lock 
b/contrib/views/hive20/src/main/resources/ui/yarn.lock
index 477a15c..607cf81 100644
--- a/contrib/views/hive20/src/main/resources/ui/yarn.lock
+++ b/contrib/views/hive20/src/main/resources/ui/yarn.lock
@@ -569,7 +569,7 @@ 
babel-plugin-transform-es2015-block-scoped-functions@^6.22.0:
   dependencies:
     babel-runtime "^6.22.0"
 
-babel-plugin-transform-es2015-block-scoping@^6.23.0:
+babel-plugin-transform-es2015-block-scoping@^6.23.0, 
babel-plugin-transform-es2015-block-scoping@^6.24.1:
   version "6.24.1"
   resolved 
"https://registry.yarnpkg.com/babel-plugin-transform-es2015-block-scoping/-/babel-plugin-transform-es2015-block-scoping-6.24.1.tgz#76c295dc3a4741b1665adfd3167215dcff32a576";
   dependencies:

http://git-wip-us.apache.org/repos/asf/ambari/blob/0a2eccd0/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java
----------------------------------------------------------------------
diff --git 
a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java
 
b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java
index 90fa483..5bce7ba 100644
--- 
a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java
+++ 
b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java
@@ -386,7 +386,20 @@ public class HdfsApi {
    * @throws IOException
    * @throws InterruptedException
    */
-  public <T> T execute(PrivilegedExceptionAction<T> action)
+  public <T> T execute(PrivilegedExceptionAction<T> action) throws 
IOException, InterruptedException {
+    return this.execute(action, false);
+  }
+
+
+  /**
+   * Executes action on HDFS using doAs
+   * @param action strategy object
+   * @param <T> result type
+   * @return result of operation
+   * @throws IOException
+   * @throws InterruptedException
+   */
+  public <T> T execute(PrivilegedExceptionAction<T> action, boolean 
alwaysRetry)
       throws IOException, InterruptedException {
     T result = null;
 
@@ -401,7 +414,7 @@ public class HdfsApi {
         result = ugi.doAs(action);
         succeeded = true;
       } catch (IOException ex) {
-        if (!ex.getMessage().contains("Cannot obtain block length for")) {
+        if (!alwaysRetry && !ex.getMessage().contains("Cannot obtain block 
length for")) {
           throw ex;
         }
         if (tryNumber >= 3) {
@@ -409,6 +422,7 @@ public class HdfsApi {
         }
         LOG.info("HDFS threw 'IOException: Cannot obtain block length' 
exception. " +
             "Retrying... Try #" + (tryNumber + 1));
+        LOG.error("Retrying: " + ex.getMessage(),ex);
         Thread.sleep(1000);  //retry after 1 second
       }
     } while (!succeeded);

http://git-wip-us.apache.org/repos/asf/ambari/blob/0a2eccd0/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java
----------------------------------------------------------------------
diff --git 
a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java
 
b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java
index 0670f1a..39958c3 100644
--- 
a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java
+++ 
b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java
@@ -28,6 +28,7 @@ import org.slf4j.LoggerFactory;
 
 import java.io.IOException;
 import java.util.Map;
+import java.security.PrivilegedExceptionAction;
 
 public class HdfsUtil {
   private final static Logger LOG =
@@ -38,19 +39,29 @@ public class HdfsUtil {
    * @param filePath path to file
    * @param content new content of file
    */
-  public static void putStringToFile(HdfsApi hdfs, String filePath, String 
content) throws HdfsApiException {
+  public static void putStringToFile(final HdfsApi hdfs,final String filePath, 
final String content) throws HdfsApiException {
     FSDataOutputStream stream;
-    try {
+      try {
       synchronized (hdfs) {
-        stream = hdfs.create(filePath, true);
-        stream.write(content.getBytes());
-        stream.close();
+        hdfs.execute(new PrivilegedExceptionAction<Void>() {
+          @Override
+          public Void run() throws Exception {
+            stream = hdfs.create(filePath, true);
+            stream.write(content.getBytes());
+            stream.close();
+            return null;
+          }
+        }, true);
       }
     } catch (IOException e) {
       throw new HdfsApiException("HDFS020 Could not write file " + filePath, 
e);
     } catch (InterruptedException e) {
       throw new HdfsApiException("HDFS021 Could not write file " + filePath, 
e);
-    }
+    } finally {
+      if(stream != null) {
+          stream.close()
+        }
+      }
   }
 
   /**

Reply via email to