[hadoop] 42/50: HDFS-14160. [SBN read] ObserverReadInvocationHandler should implement RpcInvocationHandler. Contributed by Konstantin V Shvachko.

2019-07-25 Thread cliang
This is an automated email from the ASF dual-hosted git repository.

cliang pushed a commit to branch branch-3.0
in repository https://gitbox.apache.org/repos/asf/hadoop.git

commit 451b8b8f1675366de3f17762385867eda307f904
Author: Konstantin V Shvachko 
AuthorDate: Wed Dec 19 12:39:57 2018 -0800

HDFS-14160. [SBN read] ObserverReadInvocationHandler should implement 
RpcInvocationHandler. Contributed by Konstantin V Shvachko.
---
 .../hdfs/server/namenode/ha/ObserverReadProxyProvider.java  | 13 +++--
 .../org/apache/hadoop/hdfs/server/namenode/TestFsck.java|  2 +-
 .../apache/hadoop/hdfs/server/namenode/ha/HATestUtil.java   | 10 +++---
 .../hadoop/hdfs/server/namenode/ha/TestObserverNode.java| 13 +
 4 files changed, 32 insertions(+), 6 deletions(-)

diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/ObserverReadProxyProvider.java
 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/ObserverReadProxyProvider.java
index e9d53f6..96932a7 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/ObserverReadProxyProvider.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/ObserverReadProxyProvider.java
@@ -19,7 +19,6 @@ package org.apache.hadoop.hdfs.server.namenode.ha;
 
 import java.io.Closeable;
 import java.io.IOException;
-import java.lang.reflect.InvocationHandler;
 import java.lang.reflect.InvocationTargetException;
 import java.lang.reflect.Method;
 import java.lang.reflect.Proxy;
@@ -39,9 +38,11 @@ import org.apache.hadoop.io.retry.RetryPolicies;
 import org.apache.hadoop.io.retry.RetryPolicy;
 import org.apache.hadoop.io.retry.RetryPolicy.RetryAction;
 import org.apache.hadoop.ipc.AlignmentContext;
+import org.apache.hadoop.ipc.Client.ConnectionId;
 import org.apache.hadoop.ipc.ObserverRetryOnActiveException;
 import org.apache.hadoop.ipc.RPC;
 import org.apache.hadoop.ipc.RemoteException;
+import org.apache.hadoop.ipc.RpcInvocationHandler;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -239,7 +240,7 @@ public class ObserverReadProxyProvider
*
* Write requests are always forwarded to the active.
*/
-  private class ObserverReadInvocationHandler implements InvocationHandler {
+  private class ObserverReadInvocationHandler implements RpcInvocationHandler {
 
 @Override
 public Object invoke(Object proxy, final Method method, final Object[] 
args)
@@ -322,6 +323,14 @@ public class ObserverReadProxyProvider
   lastProxy = activeProxy;
   return retVal;
 }
+
+@Override
+public void close() throws IOException {}
+
+@Override
+public ConnectionId getConnectionId() {
+  return RPC.getConnectionIdForProxy(getCurrentProxy().proxy);
+}
   }
 
   @Override
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFsck.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFsck.java
index 2b5d762..3ef8587 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFsck.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFsck.java
@@ -166,7 +166,7 @@ public class TestFsck {
   private static final String LINE_SEPARATOR =
   System.getProperty("line.separator");
 
-  static String runFsck(Configuration conf, int expectedErrCode, 
+  public static String runFsck(Configuration conf, int expectedErrCode,
 boolean checkErrorCode, String... path)
 throws Exception {
 ByteArrayOutputStream bStream = new ByteArrayOutputStream();
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/HATestUtil.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/HATestUtil.java
index 1e9418b..16aa924 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/HATestUtil.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/HATestUtil.java
@@ -175,10 +175,14 @@ public abstract class HATestUtil {
   MiniDFSCluster cluster, Configuration conf,
   Class classFPP, boolean isObserverReadEnabled)
   throws IOException, URISyntaxException {
-conf = new Configuration(conf);
-setupHAConfiguration(cluster, conf, 0, classFPP);
+String logicalName = conf.get(DFSConfigKeys.DFS_NAMESERVICES);
+URI nnUri = new URI(HdfsConstants.HDFS_URI_SCHEME + "://" + logicalName);
+conf.set(HdfsClientConfigKeys.Failover.PROXY_PROVIDER_KEY_PREFIX
++ "." + logicalName, classFPP.getName());
+conf.set("fs.defaultFS", nnUri.toString());
+
 DistributedFileSystem dfs = (DistributedFileSystem)
-

[hadoop] 42/50: HDFS-14160. [SBN read] ObserverReadInvocationHandler should implement RpcInvocationHandler. Contributed by Konstantin V Shvachko.

2019-06-28 Thread cliang
This is an automated email from the ASF dual-hosted git repository.

cliang pushed a commit to branch branch-3.2
in repository https://gitbox.apache.org/repos/asf/hadoop.git

commit 2be93bd54beca5a1beb078024de49fceea8392e3
Author: Konstantin V Shvachko 
AuthorDate: Wed Dec 19 12:39:57 2018 -0800

HDFS-14160. [SBN read] ObserverReadInvocationHandler should implement 
RpcInvocationHandler. Contributed by Konstantin V Shvachko.
---
 .../hdfs/server/namenode/ha/ObserverReadProxyProvider.java  | 13 +++--
 .../org/apache/hadoop/hdfs/server/namenode/TestFsck.java|  2 +-
 .../apache/hadoop/hdfs/server/namenode/ha/HATestUtil.java   | 10 +++---
 .../hadoop/hdfs/server/namenode/ha/TestObserverNode.java| 13 +
 4 files changed, 32 insertions(+), 6 deletions(-)

diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/ObserverReadProxyProvider.java
 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/ObserverReadProxyProvider.java
index e9d53f6..96932a7 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/ObserverReadProxyProvider.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/ObserverReadProxyProvider.java
@@ -19,7 +19,6 @@ package org.apache.hadoop.hdfs.server.namenode.ha;
 
 import java.io.Closeable;
 import java.io.IOException;
-import java.lang.reflect.InvocationHandler;
 import java.lang.reflect.InvocationTargetException;
 import java.lang.reflect.Method;
 import java.lang.reflect.Proxy;
@@ -39,9 +38,11 @@ import org.apache.hadoop.io.retry.RetryPolicies;
 import org.apache.hadoop.io.retry.RetryPolicy;
 import org.apache.hadoop.io.retry.RetryPolicy.RetryAction;
 import org.apache.hadoop.ipc.AlignmentContext;
+import org.apache.hadoop.ipc.Client.ConnectionId;
 import org.apache.hadoop.ipc.ObserverRetryOnActiveException;
 import org.apache.hadoop.ipc.RPC;
 import org.apache.hadoop.ipc.RemoteException;
+import org.apache.hadoop.ipc.RpcInvocationHandler;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -239,7 +240,7 @@ public class ObserverReadProxyProvider
*
* Write requests are always forwarded to the active.
*/
-  private class ObserverReadInvocationHandler implements InvocationHandler {
+  private class ObserverReadInvocationHandler implements RpcInvocationHandler {
 
 @Override
 public Object invoke(Object proxy, final Method method, final Object[] 
args)
@@ -322,6 +323,14 @@ public class ObserverReadProxyProvider
   lastProxy = activeProxy;
   return retVal;
 }
+
+@Override
+public void close() throws IOException {}
+
+@Override
+public ConnectionId getConnectionId() {
+  return RPC.getConnectionIdForProxy(getCurrentProxy().proxy);
+}
   }
 
   @Override
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFsck.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFsck.java
index 0a2b538..88dd788 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFsck.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFsck.java
@@ -165,7 +165,7 @@ public class TestFsck {
   private static final String LINE_SEPARATOR =
   System.getProperty("line.separator");
 
-  static String runFsck(Configuration conf, int expectedErrCode, 
+  public static String runFsck(Configuration conf, int expectedErrCode,
 boolean checkErrorCode, String... path)
 throws Exception {
 ByteArrayOutputStream bStream = new ByteArrayOutputStream();
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/HATestUtil.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/HATestUtil.java
index 9e83fc1..76707d9 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/HATestUtil.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/HATestUtil.java
@@ -176,10 +176,14 @@ public abstract class HATestUtil {
   MiniDFSCluster cluster, Configuration conf,
   Class classFPP, boolean isObserverReadEnabled)
   throws IOException, URISyntaxException {
-conf = new Configuration(conf);
-setupHAConfiguration(cluster, conf, 0, classFPP);
+String logicalName = conf.get(DFSConfigKeys.DFS_NAMESERVICES);
+URI nnUri = new URI(HdfsConstants.HDFS_URI_SCHEME + "://" + logicalName);
+conf.set(HdfsClientConfigKeys.Failover.PROXY_PROVIDER_KEY_PREFIX
++ "." + logicalName, classFPP.getName());
+conf.set("fs.defaultFS", nnUri.toString());
+
 DistributedFileSystem dfs = (DistributedFileSystem)
-