Repository: ambari
Updated Branches:
  refs/heads/branch-2.6 22fe5ffae -> 63989a2d6


Revert "AMBARI-21569.Users randomly getting "HDFS020 Could not write file" 
exceptions while running query from Hive View"

This reverts commit 4b047c37a7771ffa2bf7a1f2ab1d09d1d464e34e.


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/63989a2d
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/63989a2d
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/63989a2d

Branch: refs/heads/branch-2.6
Commit: 63989a2d635e9bcfcf5034d8dbbaaa644b42032a
Parents: 22fe5ff
Author: Sumit Mohanty <smoha...@hortonworks.com>
Authored: Tue Aug 29 14:17:20 2017 -0700
Committer: Sumit Mohanty <smoha...@hortonworks.com>
Committed: Tue Aug 29 14:17:20 2017 -0700

----------------------------------------------------------------------
 .../hive20/src/main/resources/ui/yarn.lock      |  2 +-
 .../apache/ambari/view/utils/hdfs/HdfsApi.java  | 18 ++-------------
 .../apache/ambari/view/utils/hdfs/HdfsUtil.java | 23 +++++---------------
 3 files changed, 9 insertions(+), 34 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/63989a2d/contrib/views/hive20/src/main/resources/ui/yarn.lock
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/yarn.lock 
b/contrib/views/hive20/src/main/resources/ui/yarn.lock
index 607cf81..477a15c 100644
--- a/contrib/views/hive20/src/main/resources/ui/yarn.lock
+++ b/contrib/views/hive20/src/main/resources/ui/yarn.lock
@@ -569,7 +569,7 @@ 
babel-plugin-transform-es2015-block-scoped-functions@^6.22.0:
   dependencies:
     babel-runtime "^6.22.0"
 
-babel-plugin-transform-es2015-block-scoping@^6.23.0, 
babel-plugin-transform-es2015-block-scoping@^6.24.1:
+babel-plugin-transform-es2015-block-scoping@^6.23.0:
   version "6.24.1"
   resolved 
"https://registry.yarnpkg.com/babel-plugin-transform-es2015-block-scoping/-/babel-plugin-transform-es2015-block-scoping-6.24.1.tgz#76c295dc3a4741b1665adfd3167215dcff32a576";
   dependencies:

http://git-wip-us.apache.org/repos/asf/ambari/blob/63989a2d/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java
----------------------------------------------------------------------
diff --git 
a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java
 
b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java
index 5bce7ba..90fa483 100644
--- 
a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java
+++ 
b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java
@@ -386,20 +386,7 @@ public class HdfsApi {
    * @throws IOException
    * @throws InterruptedException
    */
-  public <T> T execute(PrivilegedExceptionAction<T> action) throws 
IOException, InterruptedException {
-    return this.execute(action, false);
-  }
-
-
-  /**
-   * Executes action on HDFS using doAs
-   * @param action strategy object
-   * @param <T> result type
-   * @return result of operation
-   * @throws IOException
-   * @throws InterruptedException
-   */
-  public <T> T execute(PrivilegedExceptionAction<T> action, boolean 
alwaysRetry)
+  public <T> T execute(PrivilegedExceptionAction<T> action)
       throws IOException, InterruptedException {
     T result = null;
 
@@ -414,7 +401,7 @@ public class HdfsApi {
         result = ugi.doAs(action);
         succeeded = true;
       } catch (IOException ex) {
-        if (!alwaysRetry && !ex.getMessage().contains("Cannot obtain block 
length for")) {
+        if (!ex.getMessage().contains("Cannot obtain block length for")) {
           throw ex;
         }
         if (tryNumber >= 3) {
@@ -422,7 +409,6 @@ public class HdfsApi {
         }
         LOG.info("HDFS threw 'IOException: Cannot obtain block length' 
exception. " +
             "Retrying... Try #" + (tryNumber + 1));
-        LOG.error("Retrying: " + ex.getMessage(),ex);
         Thread.sleep(1000);  //retry after 1 second
       }
     } while (!succeeded);

http://git-wip-us.apache.org/repos/asf/ambari/blob/63989a2d/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java
----------------------------------------------------------------------
diff --git 
a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java
 
b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java
index 39958c3..0670f1a 100644
--- 
a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java
+++ 
b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java
@@ -28,7 +28,6 @@ import org.slf4j.LoggerFactory;
 
 import java.io.IOException;
 import java.util.Map;
-import java.security.PrivilegedExceptionAction;
 
 public class HdfsUtil {
   private final static Logger LOG =
@@ -39,29 +38,19 @@ public class HdfsUtil {
    * @param filePath path to file
    * @param content new content of file
    */
-  public static void putStringToFile(final HdfsApi hdfs,final String filePath, 
final String content) throws HdfsApiException {
+  public static void putStringToFile(HdfsApi hdfs, String filePath, String 
content) throws HdfsApiException {
     FSDataOutputStream stream;
-      try {
+    try {
       synchronized (hdfs) {
-        hdfs.execute(new PrivilegedExceptionAction<Void>() {
-          @Override
-          public Void run() throws Exception {
-            stream = hdfs.create(filePath, true);
-            stream.write(content.getBytes());
-            stream.close();
-            return null;
-          }
-        }, true);
+        stream = hdfs.create(filePath, true);
+        stream.write(content.getBytes());
+        stream.close();
       }
     } catch (IOException e) {
       throw new HdfsApiException("HDFS020 Could not write file " + filePath, 
e);
     } catch (InterruptedException e) {
       throw new HdfsApiException("HDFS021 Could not write file " + filePath, 
e);
-    } finally {
-      if(stream != null) {
-          stream.close()
-        }
-      }
+    }
   }
 
   /**

Reply via email to