This is an automated email from the ASF dual-hosted git repository.

zhangduo pushed a commit to branch branch-2.4
in repository https://gitbox.apache.org/repos/asf/hbase.git


The following commit(s) were added to refs/heads/branch-2.4 by this push:
     new 7d8603be17a HBASE-28312 The bad auth exception can not be passed to 
client rpc calls properly (#5629)
7d8603be17a is described below

commit 7d8603be17a3279eed9b4dbfd22a8abb7be0a7f1
Author: Duo Zhang <zhang...@apache.org>
AuthorDate: Wed Jan 17 11:20:14 2024 +0800

    HBASE-28312 The bad auth exception can not be passed to client rpc calls 
properly (#5629)
    
    Signed-off-by: Bryan Beaudreault <bbeaudrea...@apache.org>
    (cherry picked from commit 6017937ced115b1ad367faf9f836c3112f6b6f9d)
---
 .../hadoop/hbase/ipc/BlockingRpcConnection.java    | 77 ++++++++++++----------
 .../java/org/apache/hadoop/hbase/ipc/IPCUtil.java  | 19 +++++-
 .../org/apache/hadoop/hbase/ipc/RpcConnection.java |  3 +-
 .../apache/hadoop/hbase/ipc/DummyException.java    | 27 ++++++++
 .../hbase/ipc/DummyFatalConnectionException.java   | 27 ++++++++
 .../org/apache/hadoop/hbase/ipc/TestIPCUtil.java   | 22 +++++++
 .../hbase/ipc/NettyRpcServerPreambleHandler.java   |  9 +++
 .../apache/hadoop/hbase/ipc/AbstractTestIPC.java   | 24 +++++++
 .../hbase/ipc/BadAuthNettyRpcConnection.java       | 36 ++++++++++
 .../apache/hadoop/hbase/ipc/TestBlockingIPC.java   | 20 ++++++
 .../org/apache/hadoop/hbase/ipc/TestNettyIPC.java  | 11 ++++
 11 files changed, 236 insertions(+), 39 deletions(-)

diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcConnection.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcConnection.java
index df46322afd2..977a969cd47 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcConnection.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcConnection.java
@@ -18,9 +18,7 @@
 package org.apache.hadoop.hbase.ipc;
 
 import static org.apache.hadoop.hbase.ipc.IPCUtil.buildRequestHeader;
-import static org.apache.hadoop.hbase.ipc.IPCUtil.createRemoteException;
 import static 
org.apache.hadoop.hbase.ipc.IPCUtil.getTotalSizeWhenWrittenDelimited;
-import static org.apache.hadoop.hbase.ipc.IPCUtil.isFatalConnectionException;
 import static org.apache.hadoop.hbase.ipc.IPCUtil.setCancelled;
 import static org.apache.hadoop.hbase.ipc.IPCUtil.write;
 
@@ -69,8 +67,8 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import org.apache.hbase.thirdparty.com.google.protobuf.Message;
-import org.apache.hbase.thirdparty.com.google.protobuf.Message.Builder;
 import org.apache.hbase.thirdparty.com.google.protobuf.RpcCallback;
+import org.apache.hbase.thirdparty.com.google.protobuf.TextFormat;
 import org.apache.hbase.thirdparty.io.netty.buffer.ByteBuf;
 import org.apache.hbase.thirdparty.io.netty.buffer.PooledByteBufAllocator;
 
@@ -711,6 +709,25 @@ class BlockingRpcConnection extends RpcConnection 
implements Runnable {
       // Read the header
       ResponseHeader responseHeader = ResponseHeader.parseDelimitedFrom(in);
       int id = responseHeader.getCallId();
+      if (LOG.isTraceEnabled()) {
+        LOG.trace("got response header " + 
TextFormat.shortDebugString(responseHeader)
+          + ", totalSize: " + totalSize + " bytes");
+      }
+      RemoteException remoteExc;
+      if (responseHeader.hasException()) {
+        ExceptionResponse exceptionResponse = responseHeader.getException();
+        remoteExc = IPCUtil.createRemoteException(exceptionResponse);
+        if (IPCUtil.isFatalConnectionException(exceptionResponse)) {
+          // Here we will cleanup all calls so do not need to fall back, just 
return.
+          synchronized (this) {
+            closeConn(remoteExc);
+          }
+          return;
+        }
+      } else {
+        remoteExc = null;
+      }
+
       call = calls.remove(id); // call.done have to be set before leaving this 
method
       expectedCall = (call != null && !call.isDone());
       if (!expectedCall) {
@@ -721,46 +738,34 @@ class BlockingRpcConnection extends RpcConnection 
implements Runnable {
         // this connection.
         int readSoFar = getTotalSizeWhenWrittenDelimited(responseHeader);
         int whatIsLeftToRead = totalSize - readSoFar;
+        LOG.debug("Unknown callId: " + id + ", skipping over this response of 
" + whatIsLeftToRead
+          + " bytes");
         IOUtils.skipFully(in, whatIsLeftToRead);
         if (call != null) {
           call.callStats.setResponseSizeBytes(totalSize);
-          call.callStats
-            .setCallTimeMs(EnvironmentEdgeManager.currentTime() - 
call.callStats.getStartTime());
         }
         return;
       }
-      if (responseHeader.hasException()) {
-        ExceptionResponse exceptionResponse = responseHeader.getException();
-        RemoteException re = createRemoteException(exceptionResponse);
-        call.setException(re);
-        call.callStats.setResponseSizeBytes(totalSize);
-        call.callStats
-          .setCallTimeMs(EnvironmentEdgeManager.currentTime() - 
call.callStats.getStartTime());
-        if (isFatalConnectionException(exceptionResponse)) {
-          synchronized (this) {
-            closeConn(re);
-          }
-        }
-      } else {
-        Message value = null;
-        if (call.responseDefaultType != null) {
-          Builder builder = call.responseDefaultType.newBuilderForType();
-          ProtobufUtil.mergeDelimitedFrom(builder, in);
-          value = builder.build();
-        }
-        CellScanner cellBlockScanner = null;
-        if (responseHeader.hasCellBlockMeta()) {
-          int size = responseHeader.getCellBlockMeta().getLength();
-          byte[] cellBlock = new byte[size];
-          IOUtils.readFully(this.in, cellBlock, 0, cellBlock.length);
-          cellBlockScanner = 
this.rpcClient.cellBlockBuilder.createCellScanner(this.codec,
-            this.compressor, cellBlock);
-        }
-        call.setResponse(value, cellBlockScanner);
-        call.callStats.setResponseSizeBytes(totalSize);
-        call.callStats
-          .setCallTimeMs(EnvironmentEdgeManager.currentTime() - 
call.callStats.getStartTime());
+      call.callStats.setResponseSizeBytes(totalSize);
+      if (remoteExc != null) {
+        call.setException(remoteExc);
+        return;
+      }
+      Message value = null;
+      if (call.responseDefaultType != null) {
+        Message.Builder builder = call.responseDefaultType.newBuilderForType();
+        ProtobufUtil.mergeDelimitedFrom(builder, in);
+        value = builder.build();
+      }
+      CellScanner cellBlockScanner = null;
+      if (responseHeader.hasCellBlockMeta()) {
+        int size = responseHeader.getCellBlockMeta().getLength();
+        byte[] cellBlock = new byte[size];
+        IOUtils.readFully(this.in, cellBlock, 0, cellBlock.length);
+        cellBlockScanner =
+          this.rpcClient.cellBlockBuilder.createCellScanner(this.codec, 
this.compressor, cellBlock);
       }
+      call.setResponse(value, cellBlockScanner);
     } catch (IOException e) {
       if (expectedCall) {
         call.setException(e);
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java
index 86be0584329..6a439754183 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java
@@ -38,6 +38,8 @@ import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
 import org.apache.hadoop.ipc.RemoteException;
 import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
 import org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream;
@@ -56,6 +58,8 @@ import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader
 @InterfaceAudience.Private
 class IPCUtil {
 
+  private static final Logger LOG = LoggerFactory.getLogger(IPCUtil.class);
+
   /**
    * Write out header, param, and cell block if there is one.
    * @param dos       Stream to write into
@@ -145,8 +149,19 @@ class IPCUtil {
   }
 
   /** Returns True if the exception is a fatal connection exception. */
-  static boolean isFatalConnectionException(final ExceptionResponse e) {
-    return 
e.getExceptionClassName().equals(FatalConnectionException.class.getName());
+  static boolean isFatalConnectionException(ExceptionResponse e) {
+    if 
(e.getExceptionClassName().equals(FatalConnectionException.class.getName())) {
+      return true;
+    }
+    // try our best to check for sub classes of FatalConnectionException
+    try {
+      return e.getExceptionClassName() != null && 
FatalConnectionException.class.isAssignableFrom(
+        Class.forName(e.getExceptionClassName(), false, 
IPCUtil.class.getClassLoader()));
+      // Class.forName may throw ExceptionInInitializerError so we have to 
catch Throwable here
+    } catch (Throwable t) {
+      LOG.debug("Can not get class object for {}", e.getExceptionClassName(), 
t);
+      return false;
+    }
   }
 
   static IOException toIOE(Throwable t) {
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcConnection.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcConnection.java
index 912fa4fb065..7cccc56dbe6 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcConnection.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcConnection.java
@@ -140,7 +140,8 @@ abstract class RpcConnection {
     }
   }
 
-  protected final byte[] getConnectionHeaderPreamble() {
+  // will be overridden in tests
+  protected byte[] getConnectionHeaderPreamble() {
     // Assemble the preamble up in a buffer first and then send it. Writing 
individual elements,
     // they are getting sent across piecemeal according to wireshark and then 
server is messing
     // up the reading on occasion (the passed in stream is not buffered yet).
diff --git 
a/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/DummyException.java 
b/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/DummyException.java
new file mode 100644
index 00000000000..407c1248a98
--- /dev/null
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/DummyException.java
@@ -0,0 +1,27 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.ipc;
+
+/**
+ * Just a dummy exception for testing IPCUtil.isFatalConnectionException.
+ */
+public class DummyException extends Exception {
+
+  private static final long serialVersionUID = 215191975455115118L;
+
+}
diff --git 
a/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/DummyFatalConnectionException.java
 
b/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/DummyFatalConnectionException.java
new file mode 100644
index 00000000000..437b60b031b
--- /dev/null
+++ 
b/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/DummyFatalConnectionException.java
@@ -0,0 +1,27 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.ipc;
+
+/**
+ * Just a dummy exception for testing IPCUtil.isFatalConnectionException.
+ */
+public class DummyFatalConnectionException extends FatalConnectionException {
+
+  private static final long serialVersionUID = -1966815615846798490L;
+
+}
diff --git 
a/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestIPCUtil.java 
b/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestIPCUtil.java
index c327896f72a..ea49389a158 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestIPCUtil.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestIPCUtil.java
@@ -19,6 +19,7 @@ package org.apache.hadoop.hbase.ipc;
 
 import static org.hamcrest.CoreMatchers.instanceOf;
 import static org.hamcrest.MatcherAssert.assertThat;
+import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
 
 import java.io.IOException;
@@ -44,6 +45,8 @@ import org.junit.experimental.categories.Category;
 import org.apache.hbase.thirdparty.io.netty.channel.DefaultEventLoop;
 import org.apache.hbase.thirdparty.io.netty.channel.EventLoop;
 
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse;
+
 @Category({ ClientTests.class, SmallTests.class })
 public class TestIPCUtil {
 
@@ -159,4 +162,23 @@ public class TestIPCUtil {
       eventLoop.shutdownGracefully();
     }
   }
+
+  @Test
+  public void testIsFatalConnectionException() {
+    // intentionally not reference the class object directly, so here we will 
not load the class, to
+    // make sure that in isFatalConnectionException, we can use initialized = 
false when calling
+    // Class.forName
+    ExceptionResponse resp = ExceptionResponse.newBuilder()
+      
.setExceptionClassName("org.apache.hadoop.hbase.ipc.DummyFatalConnectionException").build();
+    assertTrue(IPCUtil.isFatalConnectionException(resp));
+
+    resp = ExceptionResponse.newBuilder()
+      
.setExceptionClassName("org.apache.hadoop.hbase.ipc.DummyException").build();
+    assertFalse(IPCUtil.isFatalConnectionException(resp));
+
+    // class not found
+    resp = ExceptionResponse.newBuilder()
+      .setExceptionClassName("org.apache.hadoop.hbase.ipc.WhatEver").build();
+    assertFalse(IPCUtil.isFatalConnectionException(resp));
+  }
 }
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcServerPreambleHandler.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcServerPreambleHandler.java
index cf2551e1c08..be6619a00b0 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcServerPreambleHandler.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcServerPreambleHandler.java
@@ -34,6 +34,7 @@ import 
org.apache.hbase.thirdparty.io.netty.channel.SimpleChannelInboundHandler;
 class NettyRpcServerPreambleHandler extends 
SimpleChannelInboundHandler<ByteBuf> {
 
   private final NettyRpcServer rpcServer;
+  private boolean processPreambleError;
 
   public NettyRpcServerPreambleHandler(NettyRpcServer rpcServer) {
     this.rpcServer = rpcServer;
@@ -41,11 +42,19 @@ class NettyRpcServerPreambleHandler extends 
SimpleChannelInboundHandler<ByteBuf>
 
   @Override
   protected void channelRead0(ChannelHandlerContext ctx, ByteBuf msg) throws 
Exception {
+    if (processPreambleError) {
+      // if we failed to process preamble, we will close the connection 
immediately, but it is
+      // possible that we have already received some bytes after the 
'preamble' so when closing, the
+      // netty framework will still pass them here. So we set a flag here to 
just skip processing
+      // these broken messages.
+      return;
+    }
     NettyServerRpcConnection conn = 
createNettyServerRpcConnection(ctx.channel());
     ByteBuffer buf = ByteBuffer.allocate(msg.readableBytes());
     msg.readBytes(buf);
     buf.flip();
     if (!conn.processPreamble(buf)) {
+      processPreambleError = true;
       conn.close();
       return;
     }
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/AbstractTestIPC.java 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/AbstractTestIPC.java
index 2de02d4bf7c..7b058bf4526 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/AbstractTestIPC.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/AbstractTestIPC.java
@@ -20,10 +20,14 @@ package org.apache.hadoop.hbase.ipc;
 import static org.apache.hadoop.hbase.ipc.TestProtobufRpcServiceImpl.SERVICE;
 import static 
org.apache.hadoop.hbase.ipc.TestProtobufRpcServiceImpl.newBlockingStub;
 import static org.apache.hadoop.hbase.ipc.TestProtobufRpcServiceImpl.newStub;
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.containsString;
+import static org.hamcrest.Matchers.instanceOf;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertThrows;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 import static org.mockito.ArgumentMatchers.anyObject;
@@ -34,6 +38,7 @@ import static 
org.mockito.internal.verification.VerificationModeFactory.times;
 import java.io.IOException;
 import java.net.InetSocketAddress;
 import java.util.ArrayList;
+import java.util.Collections;
 import java.util.List;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.Cell;
@@ -414,4 +419,23 @@ public abstract class AbstractTestIPC {
     }
   }
 
+  protected abstract AbstractRpcClient<?> createBadAuthRpcClient(Configuration 
conf);
+
+  @Test
+  public void testBadPreambleHeader() throws IOException, ServiceException {
+    Configuration clientConf = new Configuration(CONF);
+    RpcServer rpcServer = createRpcServer(null, "testRpcServer", 
Collections.emptyList(),
+      new InetSocketAddress("localhost", 0), CONF, new FifoRpcScheduler(CONF, 
1));
+    try (AbstractRpcClient<?> client = createBadAuthRpcClient(clientConf)) {
+      rpcServer.start();
+      BlockingInterface stub = newBlockingStub(client, 
rpcServer.getListenerAddress());
+      ServiceException se = assertThrows(ServiceException.class,
+        () -> stub.echo(null, 
EchoRequestProto.newBuilder().setMessage("hello").build()));
+      IOException ioe = ProtobufUtil.handleRemoteException(se);
+      assertThat(ioe, instanceOf(BadAuthException.class));
+      assertThat(ioe.getMessage(), containsString("authName=unknown"));
+    } finally {
+      rpcServer.stop();
+    }
+  }
 }
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/BadAuthNettyRpcConnection.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/BadAuthNettyRpcConnection.java
new file mode 100644
index 00000000000..63554421dfb
--- /dev/null
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/BadAuthNettyRpcConnection.java
@@ -0,0 +1,36 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.ipc;
+
+import java.io.IOException;
+
+public class BadAuthNettyRpcConnection extends NettyRpcConnection {
+
+  public BadAuthNettyRpcConnection(NettyRpcClient rpcClient, ConnectionId 
remoteId)
+    throws IOException {
+    super(rpcClient, remoteId);
+  }
+
+  @Override
+  protected byte[] getConnectionHeaderPreamble() {
+    byte[] header = super.getConnectionHeaderPreamble();
+    // set an invalid auth code
+    header[header.length - 1] = -10;
+    return header;
+  }
+}
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestBlockingIPC.java 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestBlockingIPC.java
index bb71ac2611a..9c9a6d5d608 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestBlockingIPC.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestBlockingIPC.java
@@ -107,4 +107,24 @@ public class TestBlockingIPC extends AbstractTestIPC {
     Configuration conf, RpcScheduler scheduler) throws IOException {
     return new TestFailingRpcServer(server, name, services, bindAddress, conf, 
scheduler);
   }
+
+  @Override
+  protected AbstractRpcClient<?> createBadAuthRpcClient(Configuration conf) {
+    return new BlockingRpcClient(conf) {
+
+      @Override
+      protected BlockingRpcConnection createConnection(ConnectionId remoteId) 
throws IOException {
+        return new BlockingRpcConnection(this, remoteId) {
+          @Override
+          protected byte[] getConnectionHeaderPreamble() {
+            byte[] header = super.getConnectionHeaderPreamble();
+            // set an invalid auth code
+            header[header.length - 1] = -10;
+            return header;
+          }
+        };
+      }
+
+    };
+  }
 }
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestNettyIPC.java 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestNettyIPC.java
index 1cffa7aaf6c..40fb61e23df 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestNettyIPC.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestNettyIPC.java
@@ -182,4 +182,15 @@ public class TestNettyIPC extends AbstractTestIPC {
     Configuration conf, RpcScheduler scheduler) throws IOException {
     return new TestFailingRpcServer(server, name, services, bindAddress, conf, 
scheduler);
   }
+
+  @Override
+  protected AbstractRpcClient<?> createBadAuthRpcClient(Configuration conf) {
+    return new NettyRpcClient(conf) {
+
+      @Override
+      protected NettyRpcConnection createConnection(ConnectionId remoteId) 
throws IOException {
+        return new BadAuthNettyRpcConnection(this, remoteId);
+      }
+    };
+  }
 }

Reply via email to