HDFS-12865. RequestHedgingProxyProvider should handle case when none of the proxies are available. Contributed by Mukul Kumar Singh.
Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/c30a26ab Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/c30a26ab Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/c30a26ab Branch: refs/heads/HDFS-7240 Commit: c30a26abc54df669a77e0219fd9b48a47c179a99 Parents: 1e84e46 Author: Arpit Agarwal <[email protected]> Authored: Sat Feb 24 14:25:56 2018 -0800 Committer: Arpit Agarwal <[email protected]> Committed: Sat Feb 24 14:25:56 2018 -0800 ---------------------------------------------------------------------- .../ha/RequestHedgingProxyProvider.java | 6 +++ .../ha/TestRequestHedgingProxyProvider.java | 45 ++++++++++++++++++++ 2 files changed, 51 insertions(+) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/hadoop/blob/c30a26ab/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/RequestHedgingProxyProvider.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/RequestHedgingProxyProvider.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/RequestHedgingProxyProvider.java index 010e9e5..7b9cd64 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/RequestHedgingProxyProvider.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/RequestHedgingProxyProvider.java @@ -17,6 +17,7 @@ */ package org.apache.hadoop.hdfs.server.namenode.ha; +import java.io.IOException; import java.lang.reflect.InvocationHandler; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; @@ -87,6 +88,11 @@ public class RequestHedgingProxyProvider<T> extends // Optimization : if only 2 proxies are configured and one had failed // over, then we dont need to create a threadpool etc. targetProxies.remove(toIgnore); + if (targetProxies.size() == 0) { + LOG.trace("No valid proxies left"); + throw new RemoteException(IOException.class.getName(), + "No valid proxies left. All NameNode proxies have failed over."); + } if (targetProxies.size() == 1) { ProxyInfo<T> proxyInfo = targetProxies.values().iterator().next(); try { http://git-wip-us.apache.org/repos/asf/hadoop/blob/c30a26ab/hadoop-hdfs-project/hadoop-hdfs-client/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestRequestHedgingProxyProvider.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestRequestHedgingProxyProvider.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestRequestHedgingProxyProvider.java index 65fbbf8..8d6b02d 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestRequestHedgingProxyProvider.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestRequestHedgingProxyProvider.java @@ -28,6 +28,7 @@ import java.util.Iterator; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; +import org.apache.commons.lang.RandomStringUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys; import org.apache.hadoop.hdfs.protocol.ClientProtocol; @@ -290,6 +291,50 @@ public class TestRequestHedgingProxyProvider { } @Test + public void testSingleProxyFailover() throws Exception { + String singleNS = "mycluster-" + Time.monotonicNow(); + URI singleNNUri = new URI("hdfs://" + singleNS); + Configuration singleConf = new Configuration(); + singleConf.set(HdfsClientConfigKeys.DFS_NAMESERVICES, singleNS); + singleConf.set(HdfsClientConfigKeys. + DFS_HA_NAMENODES_KEY_PREFIX + "." + singleNS, "nn1"); + + singleConf.set(HdfsClientConfigKeys. + DFS_NAMENODE_RPC_ADDRESS_KEY + "." + singleNS + ".nn1", + RandomStringUtils.randomAlphabetic(8) + ".foo.bar:9820"); + ClientProtocol active = Mockito.mock(ClientProtocol.class); + Mockito + .when(active.getBlockLocations(Matchers.anyString(), + Matchers.anyLong(), Matchers.anyLong())) + .thenThrow(new RemoteException("java.io.FileNotFoundException", + "File does not exist!")); + + RequestHedgingProxyProvider<ClientProtocol> provider = + new RequestHedgingProxyProvider<>(singleConf, singleNNUri, + ClientProtocol.class, createFactory(active)); + try { + provider.getProxy().proxy.getBlockLocations("/tmp/test.file", 0L, 20L); + Assert.fail("Should fail since the active namenode throws" + + " FileNotFoundException!"); + } catch (RemoteException ex) { + Exception rEx = ex.unwrapRemoteException(); + Assert.assertTrue(rEx instanceof FileNotFoundException); + } + //Perform failover now, there will be no active proxies now + provider.performFailover(active); + try { + provider.getProxy().proxy.getBlockLocations("/tmp/test.file", 0L, 20L); + Assert.fail("Should fail since the active namenode throws" + + " FileNotFoundException!"); + } catch (RemoteException ex) { + Exception rEx = ex.unwrapRemoteException(); + Assert.assertTrue(rEx instanceof IOException); + Assert.assertTrue(rEx.getMessage().equals("No valid proxies left." + + " All NameNode proxies have failed over.")); + } + } + + @Test public void testPerformFailoverWith3Proxies() throws Exception { conf.set(HdfsClientConfigKeys.DFS_HA_NAMENODES_KEY_PREFIX + "." + ns, "nn1,nn2,nn3"); --------------------------------------------------------------------- To unsubscribe, e-mail: [email protected] For additional commands, e-mail: [email protected]
