HADOOP-14982. Clients using FailoverOnNetworkExceptionRetry can go into a loop 
if they're used without authenticating with kerberos in HA env (pbacsko via 
rkanter)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/f2efaf01
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/f2efaf01
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/f2efaf01

Branch: refs/heads/YARN-6592
Commit: f2efaf013f7577948061abbb49c6d17c375e92cc
Parents: 6bf2c30
Author: Robert Kanter <rkan...@apache.org>
Authored: Thu Nov 16 11:11:19 2017 -0800
Committer: Robert Kanter <rkan...@apache.org>
Committed: Thu Nov 16 11:11:19 2017 -0800

----------------------------------------------------------------------
 .../apache/hadoop/io/retry/RetryPolicies.java   | 22 +++++++++++++++++++-
 .../apache/hadoop/io/retry/TestRetryProxy.java  | 22 ++++++++++++++++++++
 .../io/retry/UnreliableImplementation.java      | 10 +++++++++
 .../hadoop/io/retry/UnreliableInterface.java    |  6 +++++-
 4 files changed, 58 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/f2efaf01/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicies.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicies.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicies.java
index fa0cb6e..adf23c0 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicies.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicies.java
@@ -32,11 +32,14 @@ import java.util.Map.Entry;
 import java.util.concurrent.ThreadLocalRandom;
 import java.util.concurrent.TimeUnit;
 
+import javax.security.sasl.SaslException;
+
 import org.apache.hadoop.ipc.RemoteException;
 import org.apache.hadoop.ipc.RetriableException;
 import org.apache.hadoop.ipc.StandbyException;
 import org.apache.hadoop.net.ConnectTimeoutException;
 import org.apache.hadoop.security.token.SecretManager.InvalidToken;
+import org.ietf.jgss.GSSException;
 
 import com.google.common.annotations.VisibleForTesting;
 import org.slf4j.Logger;
@@ -663,6 +666,11 @@ public class RetryPolicies {
             + retries + ") exceeded maximum allowed (" + maxRetries + ")");
       }
 
+      if (isSaslFailure(e)) {
+          return new RetryAction(RetryAction.RetryDecision.FAIL, 0,
+                  "SASL failure");
+      }
+
       if (e instanceof ConnectException ||
           e instanceof EOFException ||
           e instanceof NoRouteToHostException ||
@@ -716,7 +724,7 @@ public class RetryPolicies {
   private static long calculateExponentialTime(long time, int retries) {
     return calculateExponentialTime(time, retries, Long.MAX_VALUE);
   }
-  
+
   private static boolean isWrappedStandbyException(Exception e) {
     if (!(e instanceof RemoteException)) {
       return false;
@@ -725,6 +733,18 @@ public class RetryPolicies {
         StandbyException.class);
     return unwrapped instanceof StandbyException;
   }
+
+  private static boolean isSaslFailure(Exception e) {
+      Throwable current = e;
+      do {
+          if (current instanceof SaslException) {
+            return true;
+          }
+          current = current.getCause();
+      } while (current != null);
+
+      return false;
+  }
   
   static RetriableException getWrappedRetriableException(Exception e) {
     if (!(e instanceof RemoteException)) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/f2efaf01/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/TestRetryProxy.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/TestRetryProxy.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/TestRetryProxy.java
index 649af89..1accb0a0 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/TestRetryProxy.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/TestRetryProxy.java
@@ -39,6 +39,8 @@ import java.util.concurrent.*;
 import java.util.concurrent.atomic.AtomicInteger;
 import java.util.concurrent.atomic.AtomicReference;
 
+import javax.security.sasl.SaslException;
+
 import static org.apache.hadoop.io.retry.RetryPolicies.*;
 import static org.junit.Assert.*;
 import static org.mockito.Matchers.any;
@@ -326,4 +328,24 @@ public class TestRetryProxy {
     assertEquals(InterruptedException.class, e.getCause().getClass());
     assertEquals("sleep interrupted", e.getCause().getMessage());
   }
+
+  @Test
+  public void testNoRetryOnSaslError() throws Exception {
+    RetryPolicy policy = mock(RetryPolicy.class);
+    RetryPolicy realPolicy = RetryPolicies.failoverOnNetworkException(5);
+    setupMockPolicy(policy, realPolicy);
+
+    UnreliableInterface unreliable = (UnreliableInterface) RetryProxy.create(
+        UnreliableInterface.class, unreliableImpl, policy);
+
+    try {
+      unreliable.failsWithSASLExceptionTenTimes();
+      fail("Should fail");
+    } catch (SaslException e) {
+      // expected
+      verify(policy, times(1)).shouldRetry(any(Exception.class), anyInt(),
+          anyInt(), anyBoolean());
+      assertEquals(RetryDecision.FAIL, caughtRetryAction.action);
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/f2efaf01/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/UnreliableImplementation.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/UnreliableImplementation.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/UnreliableImplementation.java
index 7ce8abc..465dc6f 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/UnreliableImplementation.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/UnreliableImplementation.java
@@ -19,6 +19,8 @@ package org.apache.hadoop.io.retry;
 
 import java.io.IOException;
 
+import javax.security.sasl.SaslException;
+
 import org.apache.hadoop.ipc.RemoteException;
 import org.apache.hadoop.ipc.StandbyException;
 
@@ -29,6 +31,7 @@ class UnreliableImplementation implements UnreliableInterface 
{
     failsOnceIOExceptionInvocationCount,
     failsOnceRemoteExceptionInvocationCount,
     failsTenTimesInvocationCount,
+    failsWithSASLExceptionTenTimesInvocationCount,
     succeedsOnceThenFailsCount,
     succeedsOnceThenFailsIdempotentCount,
     succeedsTenTimesThenFailsCount;
@@ -114,6 +117,13 @@ class UnreliableImplementation implements 
UnreliableInterface {
   }
 
   @Override
+  public void failsWithSASLExceptionTenTimes() throws SaslException {
+    if (failsWithSASLExceptionTenTimesInvocationCount ++ < 10) {
+      throw new SaslException();
+    }
+  }
+
+  @Override
   public String succeedsOnceThenFailsReturningString()
       throws UnreliableException, IOException, StandbyException {
     if (succeedsOnceThenFailsCount++ < 1) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/f2efaf01/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/UnreliableInterface.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/UnreliableInterface.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/UnreliableInterface.java
index 6c9c153..d334542 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/UnreliableInterface.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/UnreliableInterface.java
@@ -20,6 +20,8 @@ package org.apache.hadoop.io.retry;
 
 import java.io.IOException;
 
+import javax.security.sasl.SaslException;
+
 import org.apache.hadoop.ipc.RemoteException;
 import org.apache.hadoop.ipc.StandbyException;
 
@@ -61,7 +63,9 @@ public interface UnreliableInterface {
   boolean failsOnceThenSucceedsWithReturnValue() throws UnreliableException;
 
   void failsTenTimesThenSucceeds() throws UnreliableException;
-  
+
+  void failsWithSASLExceptionTenTimes() throws SaslException;
+
   public String succeedsOnceThenFailsReturningString()
       throws UnreliableException, StandbyException, IOException;
   @Idempotent


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org

Reply via email to