This is an automated email from the ASF dual-hosted git repository.
zhangduo pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hbase.git
The following commit(s) were added to refs/heads/master by this push:
new 19c9d33cf2b HBASE-29144 Client request fails for KERBEROS with
RpcConnectionRegistry (#7588)
19c9d33cf2b is described below
commit 19c9d33cf2bac83bfe1416ee0364e7b4c9a5d03b
Author: Duo Zhang <[email protected]>
AuthorDate: Wed Jan 14 22:00:43 2026 +0800
HBASE-29144 Client request fails for KERBEROS with RpcConnectionRegistry
(#7588)
Signed-off-by: Junegunn Choi <[email protected]>
---
.../apache/hadoop/hbase/ipc/AbstractRpcClient.java | 5 +
.../hadoop/hbase/ipc/BlockingRpcConnection.java | 5 +-
.../hadoop/hbase/ipc/NettyRpcConnection.java | 5 +-
.../org/apache/hadoop/hbase/ipc/RpcConnection.java | 8 +-
.../provider/AuthenticationProviderSelector.java | 7 +-
.../SaslClientAuthenticationProviders.java | 78 +++++------
.../TestSaslClientAuthenticationProviders.java | 48 ++-----
.../hbase/HBaseParameterizedInvocationContext.java | 3 +-
.../hbase/HBaseParameterizedParameterResolver.java | 18 ++-
.../hbase/mapreduce/TestTableMapReduceUtil.java | 7 -
.../org/apache/hadoop/hbase/ipc/RpcServer.java | 3 +
.../hadoop/hbase/ipc/ServerRpcConnection.java | 7 +-
.../SaslServerAuthenticationProviders.java | 103 +++++++-------
...WithDifferentConnectionRegistriesTestBase.java} | 93 ++++---------
...ReadWriteWithDifferentConnectionRegistries.java | 154 +++------------------
...ReadWriteWithDifferentConnectionRegistries.java | 84 +++++++++++
.../hadoop/hbase/ipc/TestRpcConnectionHeader.java | 16 ++-
.../hbase/security/AbstractTestSecureIPC.java | 152 +++++---------------
.../hadoop/hbase/security/TestSaslTlsIPC.java | 74 +++++-----
.../hadoop/hbase/security/TestSecureIPC.java | 49 +++----
.../TestSaslServerAuthenticationProviders.java | 62 ++-------
21 files changed, 398 insertions(+), 583 deletions(-)
diff --git
a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java
b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java
index 3742eb8118a..60f4b7932bf 100644
---
a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java
+++
b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java
@@ -43,6 +43,7 @@ import org.apache.hadoop.hbase.codec.KeyValueCodec;
import org.apache.hadoop.hbase.net.Address;
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.security.UserProvider;
+import
org.apache.hadoop.hbase.security.provider.SaslClientAuthenticationProviders;
import org.apache.hadoop.hbase.trace.TraceUtil;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.PoolMap;
@@ -116,6 +117,8 @@ public abstract class AbstractRpcClient<T extends
RpcConnection> implements RpcC
protected final UserProvider userProvider;
protected final CellBlockBuilder cellBlockBuilder;
+ protected final SaslClientAuthenticationProviders providers;
+
protected final int minIdleTimeBeforeClose; // if the connection is idle for
more than this
// time (in ms), it will be closed at any moment.
protected final int maxRetries; // the max. no. of retries for socket
connections
@@ -184,6 +187,8 @@ public abstract class AbstractRpcClient<T extends
RpcConnection> implements RpcC
conf.getInt(HConstants.HBASE_CLIENT_PERSERVER_REQUESTS_THRESHOLD,
HConstants.DEFAULT_HBASE_CLIENT_PERSERVER_REQUESTS_THRESHOLD);
+ this.providers = new SaslClientAuthenticationProviders(conf);
+
this.connections = new PoolMap<>(getPoolType(conf), getPoolSize(conf));
this.cleanupIdleConnectionTask = IDLE_CONN_SWEEPER.scheduleAtFixedRate(new
Runnable() {
diff --git
a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcConnection.java
b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcConnection.java
index 4b3d2de466b..964bbed749a 100644
---
a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcConnection.java
+++
b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcConnection.java
@@ -212,8 +212,9 @@ class BlockingRpcConnection extends RpcConnection
implements Runnable {
BlockingRpcConnection(BlockingRpcClient rpcClient, ConnectionId remoteId)
throws IOException {
super(rpcClient.conf, AbstractRpcClient.WHEEL_TIMER, remoteId,
rpcClient.clusterId,
- rpcClient.userProvider.isHBaseSecurityEnabled(), rpcClient.codec,
rpcClient.compressor,
- rpcClient.cellBlockBuilder, rpcClient.metrics,
rpcClient.connectionAttributes);
+ rpcClient.userProvider.isHBaseSecurityEnabled(), rpcClient.providers,
rpcClient.codec,
+ rpcClient.compressor, rpcClient.cellBlockBuilder, rpcClient.metrics,
+ rpcClient.connectionAttributes);
this.rpcClient = rpcClient;
this.connectionHeaderPreamble = getConnectionHeaderPreamble();
ConnectionHeader header = getConnectionHeader();
diff --git
a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcConnection.java
b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcConnection.java
index 85f7c0a3e61..7445578f460 100644
---
a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcConnection.java
+++
b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcConnection.java
@@ -106,8 +106,9 @@ class NettyRpcConnection extends RpcConnection {
NettyRpcConnection(NettyRpcClient rpcClient, ConnectionId remoteId) throws
IOException {
super(rpcClient.conf, AbstractRpcClient.WHEEL_TIMER, remoteId,
rpcClient.clusterId,
- rpcClient.userProvider.isHBaseSecurityEnabled(), rpcClient.codec,
rpcClient.compressor,
- rpcClient.cellBlockBuilder, rpcClient.metrics,
rpcClient.connectionAttributes);
+ rpcClient.userProvider.isHBaseSecurityEnabled(), rpcClient.providers,
rpcClient.codec,
+ rpcClient.compressor, rpcClient.cellBlockBuilder, rpcClient.metrics,
+ rpcClient.connectionAttributes);
this.rpcClient = rpcClient;
this.eventLoop = rpcClient.group.next();
byte[] connectionHeaderPreamble = getConnectionHeaderPreamble();
diff --git
a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcConnection.java
b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcConnection.java
index bee5719d455..56b64d0f5ba 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcConnection.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcConnection.java
@@ -121,9 +121,9 @@ abstract class RpcConnection {
private String lastSucceededServerPrincipal;
protected RpcConnection(Configuration conf, HashedWheelTimer timeoutTimer,
ConnectionId remoteId,
- String clusterId, boolean isSecurityEnabled, Codec codec, CompressionCodec
compressor,
- CellBlockBuilder cellBlockBuilder, MetricsConnection metrics,
- Map<String, byte[]> connectionAttributes) throws IOException {
+ String clusterId, boolean isSecurityEnabled,
SaslClientAuthenticationProviders providers,
+ Codec codec, CompressionCodec compressor, CellBlockBuilder
cellBlockBuilder,
+ MetricsConnection metrics, Map<String, byte[]> connectionAttributes)
throws IOException {
this.timeoutTimer = timeoutTimer;
this.codec = codec;
this.compressor = compressor;
@@ -136,8 +136,6 @@ abstract class RpcConnection {
this.useSasl = isSecurityEnabled;
// Choose the correct Token and AuthenticationProvider for this client to
use
- SaslClientAuthenticationProviders providers =
- SaslClientAuthenticationProviders.getInstance(conf);
Pair<SaslClientAuthenticationProvider, Token<? extends TokenIdentifier>>
pair;
if (useSasl && securityInfo != null) {
pair = providers.selectProvider(clusterId, ticket);
diff --git
a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/provider/AuthenticationProviderSelector.java
b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/provider/AuthenticationProviderSelector.java
index cdd1fdb381f..4653217b90d 100644
---
a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/provider/AuthenticationProviderSelector.java
+++
b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/provider/AuthenticationProviderSelector.java
@@ -32,9 +32,10 @@ import org.apache.yetus.audience.InterfaceStability;
public interface AuthenticationProviderSelector {
/**
- * Initializes the implementation with configuration and a set of providers
available. This method
- * should be called exactly once per implementation prior to calling
- * {@link #selectProvider(String, User)}.
+ * Initializes the implementation with configuration and a set of providers
available.
+ * <p>
+ * This method is called once upon construction, before {@link
#selectProvider(String, User)} is
+ * invoked.
*/
void configure(Configuration conf,
Collection<SaslClientAuthenticationProvider> availableProviders);
diff --git
a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/provider/SaslClientAuthenticationProviders.java
b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/provider/SaslClientAuthenticationProviders.java
index a78ff3386a4..a0ad1b405d0 100644
---
a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/provider/SaslClientAuthenticationProviders.java
+++
b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/provider/SaslClientAuthenticationProviders.java
@@ -48,16 +48,36 @@ public final class SaslClientAuthenticationProviders {
public static final String SELECTOR_KEY = "hbase.client.sasl.provider.class";
public static final String EXTRA_PROVIDERS_KEY =
"hbase.client.sasl.provider.extras";
- private static final AtomicReference<SaslClientAuthenticationProviders>
providersRef =
+ private static final AtomicReference<SaslClientAuthenticationProviders>
PROVIDER_REF =
new AtomicReference<>();
private final Collection<SaslClientAuthenticationProvider> providers;
private final AuthenticationProviderSelector selector;
- private
SaslClientAuthenticationProviders(Collection<SaslClientAuthenticationProvider>
providers,
- AuthenticationProviderSelector selector) {
- this.providers = providers;
- this.selector = selector;
+ /**
+ * Creates a new instance of SaslClientAuthenticationProviders.
+ * @param conf the configuration to use for loading providers and selector
+ */
+ public SaslClientAuthenticationProviders(Configuration conf) {
+ ServiceLoader<SaslClientAuthenticationProvider> loader =
+ ServiceLoader.load(SaslClientAuthenticationProvider.class,
+ SaslClientAuthenticationProviders.class.getClassLoader());
+ HashMap<Byte, SaslClientAuthenticationProvider> providerMap = new
HashMap<>();
+ for (SaslClientAuthenticationProvider provider : loader) {
+ addProviderIfNotExists(provider, providerMap);
+ }
+
+ addExplicitProviders(conf, providerMap);
+
+ providers = Collections.unmodifiableCollection(providerMap.values());
+
+ if (LOG.isTraceEnabled()) {
+ String loadedProviders = providers.stream().map((provider) ->
provider.getClass().getName())
+ .collect(Collectors.joining(", "));
+ LOG.trace("Found SaslClientAuthenticationProviders {}", loadedProviders);
+ }
+
+ selector = instantiateSelector(conf, providers);
}
/**
@@ -69,12 +89,16 @@ public final class SaslClientAuthenticationProviders {
/**
* Returns a singleton instance of {@link SaslClientAuthenticationProviders}.
+ * @deprecated Since 2.5.14 and 2.6.4, will be removed in newer minor
release lines. This class
+ * should not be singleton, please do not use it any more. see
HBASE-29144 for more
+ * details.
*/
+ @Deprecated
public static synchronized SaslClientAuthenticationProviders
getInstance(Configuration conf) {
- SaslClientAuthenticationProviders providers = providersRef.get();
+ SaslClientAuthenticationProviders providers = PROVIDER_REF.get();
if (providers == null) {
- providers = instantiate(conf);
- providersRef.set(providers);
+ providers = new SaslClientAuthenticationProviders(conf);
+ PROVIDER_REF.set(providers);
}
return providers;
@@ -82,9 +106,13 @@ public final class SaslClientAuthenticationProviders {
/**
* Removes the cached singleton instance of {@link
SaslClientAuthenticationProviders}.
+ * @deprecated Since 2.5.14 and 2.6.4, will be removed in newer minor
release lines. This class
+ * should not be singleton, please do not use it any more. see
HBASE-29144 for more
+ * details.
*/
+ @Deprecated
public static synchronized void reset() {
- providersRef.set(null);
+ PROVIDER_REF.set(null);
}
/**
@@ -93,7 +121,7 @@ public final class SaslClientAuthenticationProviders {
*/
static void addProviderIfNotExists(SaslClientAuthenticationProvider provider,
HashMap<Byte, SaslClientAuthenticationProvider> providers) {
- Byte code = provider.getSaslAuthMethod().getCode();
+ byte code = provider.getSaslAuthMethod().getCode();
SaslClientAuthenticationProvider existingProvider = providers.get(code);
if (existingProvider != null) {
throw new RuntimeException("Already registered authentication provider
with " + code + " "
@@ -105,7 +133,7 @@ public final class SaslClientAuthenticationProviders {
/**
* Instantiates the ProviderSelector implementation from the provided
configuration.
*/
- static AuthenticationProviderSelector instantiateSelector(Configuration conf,
+ private static AuthenticationProviderSelector
instantiateSelector(Configuration conf,
Collection<SaslClientAuthenticationProvider> providers) {
Class<? extends AuthenticationProviderSelector> clz =
conf.getClass(SELECTOR_KEY,
BuiltInProviderSelector.class, AuthenticationProviderSelector.class);
@@ -161,34 +189,6 @@ public final class SaslClientAuthenticationProviders {
}
}
- /**
- * Instantiates all client authentication providers and returns an instance
of
- * {@link SaslClientAuthenticationProviders}.
- */
- static SaslClientAuthenticationProviders instantiate(Configuration conf) {
- ServiceLoader<SaslClientAuthenticationProvider> loader =
- ServiceLoader.load(SaslClientAuthenticationProvider.class,
- SaslClientAuthenticationProviders.class.getClassLoader());
- HashMap<Byte, SaslClientAuthenticationProvider> providerMap = new
HashMap<>();
- for (SaslClientAuthenticationProvider provider : loader) {
- addProviderIfNotExists(provider, providerMap);
- }
-
- addExplicitProviders(conf, providerMap);
-
- Collection<SaslClientAuthenticationProvider> providers =
- Collections.unmodifiableCollection(providerMap.values());
-
- if (LOG.isTraceEnabled()) {
- String loadedProviders = providers.stream().map((provider) ->
provider.getClass().getName())
- .collect(Collectors.joining(", "));
- LOG.trace("Found SaslClientAuthenticationProviders {}", loadedProviders);
- }
-
- AuthenticationProviderSelector selector = instantiateSelector(conf,
providers);
- return new SaslClientAuthenticationProviders(providers, selector);
- }
-
/**
* Returns the provider and token pair for SIMPLE authentication. This
method is a "hack" while
* SIMPLE authentication for HBase does not flow through the SASL codepath.
diff --git
a/hbase-client/src/test/java/org/apache/hadoop/hbase/security/provider/TestSaslClientAuthenticationProviders.java
b/hbase-client/src/test/java/org/apache/hadoop/hbase/security/provider/TestSaslClientAuthenticationProviders.java
index 029c880600b..9b4fdef0613 100644
---
a/hbase-client/src/test/java/org/apache/hadoop/hbase/security/provider/TestSaslClientAuthenticationProviders.java
+++
b/hbase-client/src/test/java/org/apache/hadoop/hbase/security/provider/TestSaslClientAuthenticationProviders.java
@@ -17,9 +17,9 @@
*/
package org.apache.hadoop.hbase.security.provider;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotSame;
-import static org.junit.Assert.assertSame;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertSame;
+import static org.junit.jupiter.api.Assertions.assertThrows;
import java.io.IOException;
import java.net.InetAddress;
@@ -27,7 +27,6 @@ import java.util.HashMap;
import java.util.Map;
import javax.security.sasl.SaslClient;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.security.SecurityInfo;
import org.apache.hadoop.hbase.security.User;
@@ -36,19 +35,15 @@ import
org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenIdentifier;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
import
org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation;
-@Category({ SmallTests.class, SecurityTests.class })
+@Tag(SmallTests.TAG)
+@Tag(SecurityTests.TAG)
public class TestSaslClientAuthenticationProviders {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestSaslClientAuthenticationProviders.class);
-
@Test
public void testCannotAddTheSameProviderTwice() {
HashMap<Byte, SaslClientAuthenticationProvider> registeredProviders = new
HashMap<>();
@@ -58,38 +53,19 @@ public class TestSaslClientAuthenticationProviders {
SaslClientAuthenticationProviders.addProviderIfNotExists(p1,
registeredProviders);
assertEquals(1, registeredProviders.size());
- try {
- SaslClientAuthenticationProviders.addProviderIfNotExists(p2,
registeredProviders);
- } catch (RuntimeException e) {
- }
+ assertThrows(RuntimeException.class,
+ () -> SaslClientAuthenticationProviders.addProviderIfNotExists(p2,
registeredProviders));
- assertSame("Expected the original provider to be present", p1,
- registeredProviders.entrySet().iterator().next().getValue());
+ assertSame(p1, registeredProviders.entrySet().iterator().next().getValue(),
+ "Expected the original provider to be present");
}
@Test
- public void testInstanceIsCached() {
- Configuration conf = HBaseConfiguration.create();
- SaslClientAuthenticationProviders providers1 =
- SaslClientAuthenticationProviders.getInstance(conf);
- SaslClientAuthenticationProviders providers2 =
- SaslClientAuthenticationProviders.getInstance(conf);
- assertSame(providers1, providers2);
-
- SaslClientAuthenticationProviders.reset();
-
- SaslClientAuthenticationProviders providers3 =
- SaslClientAuthenticationProviders.getInstance(conf);
- assertNotSame(providers1, providers3);
- assertEquals(providers1.getNumRegisteredProviders(),
providers3.getNumRegisteredProviders());
- }
-
- @Test(expected = RuntimeException.class)
public void testDifferentConflictingImplementationsFail() {
Configuration conf = HBaseConfiguration.create();
conf.setStrings(SaslClientAuthenticationProviders.EXTRA_PROVIDERS_KEY,
ConflictingProvider1.class.getName(),
ConflictingProvider2.class.getName());
- SaslClientAuthenticationProviders.getInstance(conf);
+ assertThrows(RuntimeException.class, () -> new
SaslClientAuthenticationProviders(conf));
}
static class ConflictingProvider1 implements
SaslClientAuthenticationProvider {
diff --git
a/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseParameterizedInvocationContext.java
b/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseParameterizedInvocationContext.java
index 602b06bf9cb..e13c7e58c80 100644
---
a/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseParameterizedInvocationContext.java
+++
b/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseParameterizedInvocationContext.java
@@ -17,6 +17,7 @@
*/
package org.apache.hadoop.hbase;
+import java.util.Collections;
import java.util.List;
import org.junit.jupiter.api.extension.Extension;
import org.junit.jupiter.api.extension.TestTemplateInvocationContext;
@@ -49,6 +50,6 @@ public class HBaseParameterizedInvocationContext implements
TestTemplateInvocati
@Override
public List<Extension> getAdditionalExtensions() {
- return List.of(new HBaseParameterizedParameterResolver(arguments));
+ return Collections.singletonList(new
HBaseParameterizedParameterResolver(arguments));
}
}
diff --git
a/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseParameterizedParameterResolver.java
b/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseParameterizedParameterResolver.java
index de17960d151..1df5b22a702 100644
---
a/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseParameterizedParameterResolver.java
+++
b/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseParameterizedParameterResolver.java
@@ -23,6 +23,8 @@ import
org.junit.jupiter.api.extension.ParameterResolutionException;
import org.junit.jupiter.api.extension.ParameterResolver;
import org.junit.jupiter.params.provider.Arguments;
+import org.apache.hbase.thirdparty.com.google.common.primitives.Primitives;
+
/**
* @see HBaseParameterizedTestTemplate
*/
@@ -38,8 +40,20 @@ public class HBaseParameterizedParameterResolver implements
ParameterResolver {
public boolean supportsParameter(ParameterContext pc, ExtensionContext ec)
throws ParameterResolutionException {
int index = pc.getIndex();
- return index < values.length
- &&
pc.getParameter().getType().isAssignableFrom(values[index].getClass());
+ if (index >= values.length) {
+ return false;
+ }
+ Object value = values[index];
+ Class<?> expectedType = pc.getParameter().getType();
+ if (expectedType.isPrimitive()) {
+ // primitive type can not accept null value
+ if (value == null) {
+ return false;
+ }
+ // test with wrapper type, otherwise it will always return false
+ return Primitives.wrap(expectedType).isAssignableFrom(value.getClass());
+ }
+ return expectedType.isAssignableFrom(value.getClass());
}
@Override
diff --git
a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduceUtil.java
b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduceUtil.java
index 17a94c99956..32f8d635ab4 100644
---
a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduceUtil.java
+++
b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduceUtil.java
@@ -30,7 +30,6 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.client.Scan;
-import
org.apache.hadoop.hbase.security.provider.SaslClientAuthenticationProviders;
import org.apache.hadoop.hbase.security.token.AuthenticationTokenIdentifier;
import org.apache.hadoop.hbase.testclassification.MapReduceTests;
import org.apache.hadoop.hbase.testclassification.MediumTests;
@@ -44,7 +43,6 @@ import org.apache.hadoop.security.Credentials;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenIdentifier;
-import org.junit.After;
import org.junit.ClassRule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
@@ -60,11 +58,6 @@ public class TestTableMapReduceUtil {
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestTableMapReduceUtil.class);
- @After
- public void after() {
- SaslClientAuthenticationProviders.reset();
- }
-
/*
* initTableSnapshotMapperJob is tested in {@link
TestTableSnapshotInputFormat} because the method
* depends on an online cluster.
diff --git
a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java
b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java
index 189803f9e01..3e702541acb 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java
@@ -55,6 +55,7 @@ import org.apache.hadoop.hbase.security.SaslUtil;
import org.apache.hadoop.hbase.security.SaslUtil.QualityOfProtection;
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.security.UserProvider;
+import
org.apache.hadoop.hbase.security.provider.SaslServerAuthenticationProviders;
import org.apache.hadoop.hbase.security.token.AuthenticationTokenSecretManager;
import org.apache.hadoop.hbase.util.CoprocessorConfigurationUtil;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
@@ -100,6 +101,7 @@ public abstract class RpcServer implements
RpcServerInterface, ConfigurationObse
private final boolean authorize;
private volatile boolean isOnlineLogProviderEnabled;
protected boolean isSecurityEnabled;
+ protected final SaslServerAuthenticationProviders saslProviders;
public static final byte CURRENT_VERSION = 0;
@@ -314,6 +316,7 @@ public abstract class RpcServer implements
RpcServerInterface, ConfigurationObse
saslProps = Collections.emptyMap();
serverPrincipal = HConstants.EMPTY_STRING;
}
+ this.saslProviders = new SaslServerAuthenticationProviders(conf);
this.isOnlineLogProviderEnabled = getIsOnlineLogProviderEnabled(conf);
this.scheduler = scheduler;
diff --git
a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/ServerRpcConnection.java
b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/ServerRpcConnection.java
index 7339f678d37..aaea74d5933 100644
---
a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/ServerRpcConnection.java
+++
b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/ServerRpcConnection.java
@@ -55,7 +55,6 @@ import org.apache.hadoop.hbase.security.SaslStatus;
import org.apache.hadoop.hbase.security.SaslUtil;
import org.apache.hadoop.hbase.security.User;
import
org.apache.hadoop.hbase.security.provider.SaslServerAuthenticationProvider;
-import
org.apache.hadoop.hbase.security.provider.SaslServerAuthenticationProviders;
import
org.apache.hadoop.hbase.security.provider.SimpleSaslServerAuthenticationProvider;
import org.apache.hadoop.hbase.trace.TraceUtil;
import org.apache.hadoop.hbase.util.ByteBufferUtils;
@@ -137,13 +136,11 @@ abstract class ServerRpcConnection implements Closeable {
protected User user = null;
protected UserGroupInformation ugi = null;
- protected SaslServerAuthenticationProviders saslProviders = null;
protected X509Certificate[] clientCertificateChain = null;
public ServerRpcConnection(RpcServer rpcServer) {
this.rpcServer = rpcServer;
this.callCleanup = null;
- this.saslProviders =
SaslServerAuthenticationProviders.getInstance(rpcServer.getConf());
}
@Override
@@ -784,7 +781,7 @@ abstract class ServerRpcConnection implements Closeable {
return PreambleResponse.CLOSE;
}
- this.provider = this.saslProviders.selectProvider(authByte);
+ this.provider = rpcServer.saslProviders.selectProvider(authByte);
if (this.provider == null) {
String msg = getFatalConnectionString(version, authByte);
doBadPreambleHandling(msg, new BadAuthException(msg));
@@ -804,7 +801,7 @@ abstract class ServerRpcConnection implements Closeable {
if (!this.rpcServer.isSecurityEnabled && !isSimpleAuthentication()) {
doRawSaslReply(SaslStatus.SUCCESS, new
IntWritable(SaslUtil.SWITCH_TO_SIMPLE_AUTH), null,
null);
- provider = saslProviders.getSimpleProvider();
+ provider = rpcServer.saslProviders.getSimpleProvider();
// client has already sent the initial Sasl message and we
// should ignore it. Both client and server should fall back
// to simple auth from now on.
diff --git
a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/provider/SaslServerAuthenticationProviders.java
b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/provider/SaslServerAuthenticationProviders.java
index 17480cb4659..385b2fb79f4 100644
---
a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/provider/SaslServerAuthenticationProviders.java
+++
b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/provider/SaslServerAuthenticationProviders.java
@@ -19,7 +19,9 @@ package org.apache.hadoop.hbase.security.provider;
import java.io.IOException;
import java.lang.reflect.InvocationTargetException;
+import java.util.Collections;
import java.util.HashMap;
+import java.util.Map;
import java.util.Optional;
import java.util.ServiceLoader;
import java.util.concurrent.atomic.AtomicReference;
@@ -35,14 +37,46 @@ public final class SaslServerAuthenticationProviders {
LoggerFactory.getLogger(SaslClientAuthenticationProviders.class);
public static final String EXTRA_PROVIDERS_KEY =
"hbase.server.sasl.provider.extras";
- private static final AtomicReference<SaslServerAuthenticationProviders>
holder =
+
+ private static final AtomicReference<SaslServerAuthenticationProviders>
HOLDER =
new AtomicReference<>();
- private final HashMap<Byte, SaslServerAuthenticationProvider> providers;
+ private final Map<Byte, SaslServerAuthenticationProvider> providers;
- private SaslServerAuthenticationProviders(Configuration conf,
- HashMap<Byte, SaslServerAuthenticationProvider> providers) {
- this.providers = providers;
+ /**
+ * Creates a new instance of SaslServerAuthenticationProviders.
+ * @param conf the configuration to use for loading providers
+ */
+ public SaslServerAuthenticationProviders(Configuration conf) {
+ ServiceLoader<SaslServerAuthenticationProvider> loader =
+ ServiceLoader.load(SaslServerAuthenticationProvider.class);
+ HashMap<Byte, SaslServerAuthenticationProvider> providerMap = new
HashMap<>();
+ for (SaslServerAuthenticationProvider provider : loader) {
+ addProviderIfNotExists(provider, providerMap);
+ }
+
+ addExtraProviders(conf, providerMap);
+
+ if (LOG.isTraceEnabled()) {
+ String loadedProviders = providerMap.values().stream()
+ .map((provider) ->
provider.getClass().getName()).collect(Collectors.joining(", "));
+ if (loadedProviders.isEmpty()) {
+ loadedProviders = "None!";
+ }
+ LOG.trace("Found SaslServerAuthenticationProviders {}", loadedProviders);
+ }
+
+ // Initialize the providers once, before we get into the RPC path.
+ providerMap.forEach((b, provider) -> {
+ try {
+ // Give them a copy, just to make sure there is no funny-business
going on.
+ provider.init(new Configuration(conf));
+ } catch (IOException e) {
+ LOG.error("Failed to initialize {}", provider.getClass(), e);
+ throw new RuntimeException("Failed to initialize " +
provider.getClass().getName(), e);
+ }
+ });
+ this.providers = Collections.unmodifiableMap(providerMap);
}
/**
@@ -54,19 +88,23 @@ public final class SaslServerAuthenticationProviders {
/**
* Returns a singleton instance of {@link SaslServerAuthenticationProviders}.
+ * @deprecated Since 2.5.14 and 2.6.4, will be removed in newer minor
release lines. This class
+ * should not be singleton, please do not use it any more. see
HBASE-29144 for more
+ * details.
*/
+ @Deprecated
public static SaslServerAuthenticationProviders getInstance(Configuration
conf) {
- SaslServerAuthenticationProviders providers = holder.get();
+ SaslServerAuthenticationProviders providers = HOLDER.get();
if (null == providers) {
- synchronized (holder) {
+ synchronized (HOLDER) {
// Someone else beat us here
- providers = holder.get();
+ providers = HOLDER.get();
if (null != providers) {
return providers;
}
- providers = createProviders(conf);
- holder.set(providers);
+ providers = new SaslServerAuthenticationProviders(conf);
+ HOLDER.set(providers);
}
}
return providers;
@@ -74,10 +112,14 @@ public final class SaslServerAuthenticationProviders {
/**
* Removes the cached singleton instance of {@link
SaslServerAuthenticationProviders}.
+ * @deprecated Since 2.5.14 and 2.6.4, will be removed in newer minor
release lines. This class
+ * should not be singleton, please do not use it any more. see
HBASE-29144 for more
+ * details.
*/
+ @Deprecated
public static void reset() {
- synchronized (holder) {
- holder.set(null);
+ synchronized (HOLDER) {
+ HOLDER.set(null);
}
}
@@ -129,43 +171,6 @@ public final class SaslServerAuthenticationProviders {
}
}
- /**
- * Loads server authentication providers from the classpath and
configuration, and then creates
- * the SaslServerAuthenticationProviders instance.
- */
- static SaslServerAuthenticationProviders createProviders(Configuration conf)
{
- ServiceLoader<SaslServerAuthenticationProvider> loader =
- ServiceLoader.load(SaslServerAuthenticationProvider.class);
- HashMap<Byte, SaslServerAuthenticationProvider> providers = new
HashMap<>();
- for (SaslServerAuthenticationProvider provider : loader) {
- addProviderIfNotExists(provider, providers);
- }
-
- addExtraProviders(conf, providers);
-
- if (LOG.isTraceEnabled()) {
- String loadedProviders = providers.values().stream()
- .map((provider) ->
provider.getClass().getName()).collect(Collectors.joining(", "));
- if (loadedProviders.isEmpty()) {
- loadedProviders = "None!";
- }
- LOG.trace("Found SaslServerAuthenticationProviders {}", loadedProviders);
- }
-
- // Initialize the providers once, before we get into the RPC path.
- providers.forEach((b, provider) -> {
- try {
- // Give them a copy, just to make sure there is no funny-business
going on.
- provider.init(new Configuration(conf));
- } catch (IOException e) {
- LOG.error("Failed to initialize {}", provider.getClass(), e);
- throw new RuntimeException("Failed to initialize " +
provider.getClass().getName(), e);
- }
- });
-
- return new SaslServerAuthenticationProviders(conf, providers);
- }
-
/**
* Selects the appropriate SaslServerAuthenticationProvider from those
available. If there is no
* matching provider for the given {@code authByte}, this method will return
null.
diff --git
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestBasicReadWriteWithDifferentConnectionRegistries.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/BasicReadWriteWithDifferentConnectionRegistriesTestBase.java
similarity index 63%
copy from
hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestBasicReadWriteWithDifferentConnectionRegistries.java
copy to
hbase-server/src/test/java/org/apache/hadoop/hbase/client/BasicReadWriteWithDifferentConnectionRegistriesTestBase.java
index 5746ffa67f6..ba15df7ea88 100644
---
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestBasicReadWriteWithDifferentConnectionRegistries.java
+++
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/BasicReadWriteWithDifferentConnectionRegistriesTestBase.java
@@ -20,49 +20,27 @@ package org.apache.hadoop.hbase.client;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertFalse;
+import java.io.IOException;
import java.net.URI;
-import java.util.ArrayList;
-import java.util.List;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
-import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.TableNameTestRule;
-import org.apache.hadoop.hbase.testclassification.ClientTests;
-import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.util.Bytes;
-import org.junit.After;
-import org.junit.AfterClass;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.ClassRule;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
-import org.junit.runners.Parameterized.Parameter;
-import org.junit.runners.Parameterized.Parameters;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.TestInfo;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.EnumSource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-/**
- * Test basic read write operation with different {@link ConnectionRegistry}
implementations.
- */
-@RunWith(Parameterized.class)
-@Category({ MediumTests.class, ClientTests.class })
-public class TestBasicReadWriteWithDifferentConnectionRegistries {
-
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
-
HBaseClassTestRule.forClass(TestBasicReadWriteWithDifferentConnectionRegistries.class);
+public abstract class BasicReadWriteWithDifferentConnectionRegistriesTestBase {
private static final Logger LOG =
LoggerFactory.getLogger(TestBasicReadWriteWithDifferentConnectionRegistries.class);
- private static final HBaseTestingUtil UTIL = new HBaseTestingUtil();
+ protected static final HBaseTestingUtil UTIL = new HBaseTestingUtil();
public enum RegistryImpl {
ZK,
@@ -71,40 +49,20 @@ public class
TestBasicReadWriteWithDifferentConnectionRegistries {
RPC_URI
}
- @Parameter
- public RegistryImpl impl;
-
- @Rule
- public final TableNameTestRule name = new TableNameTestRule();
+ private TableName tableName;
private byte[] FAMILY = Bytes.toBytes("family");
private Connection conn;
- @Parameters(name = "{index}: impl={0}")
- public static List<Object[]> data() {
- List<Object[]> data = new ArrayList<Object[]>();
- for (RegistryImpl impl : RegistryImpl.values()) {
- data.add(new Object[] { impl });
- }
- return data;
- }
-
- @BeforeClass
- public static void setUpBeforeClass() throws Exception {
- UTIL.startMiniCluster();
- }
+ protected abstract Configuration getConf();
- @AfterClass
- public static void tearDownAfterClass() throws Exception {
- UTIL.shutdownMiniCluster();
- }
+ protected abstract Connection createConn(URI uri) throws IOException;
- @Before
- public void setUp() throws Exception {
+ private void init(RegistryImpl impl) throws Exception {
switch (impl) {
case ZK: {
- Configuration conf = HBaseConfiguration.create();
+ Configuration conf = getConf();
conf.setClass(HConstants.CLIENT_CONNECTION_REGISTRY_IMPL_CONF_KEY,
ZKConnectionRegistry.class, ConnectionRegistry.class);
String quorum = UTIL.getZkCluster().getAddress().toString();
@@ -116,7 +74,7 @@ public class
TestBasicReadWriteWithDifferentConnectionRegistries {
break;
}
case RPC: {
- Configuration conf = HBaseConfiguration.create();
+ Configuration conf = getConf();
conf.setClass(HConstants.CLIENT_CONNECTION_REGISTRY_IMPL_CONF_KEY,
RpcConnectionRegistry.class, ConnectionRegistry.class);
String bootstrapServers =
@@ -131,28 +89,33 @@ public class
TestBasicReadWriteWithDifferentConnectionRegistries {
String path =
UTIL.getConfiguration().get(HConstants.ZOOKEEPER_ZNODE_PARENT);
URI connectionUri = new URI("hbase+zk://" + quorum + path);
LOG.info("connect to cluster through connection url: {}",
connectionUri);
- conn = ConnectionFactory.createConnection(connectionUri);
+ conn = createConn(connectionUri);
break;
}
case RPC_URI: {
URI connectionUri = new URI("hbase+rpc://"
+
UTIL.getMiniHBaseCluster().getMaster().getServerName().getAddress().toString());
LOG.info("connect to cluster through connection url: {}",
connectionUri);
- conn = ConnectionFactory.createConnection(connectionUri);
+ conn = createConn(connectionUri);
break;
}
default:
throw new IllegalArgumentException("Unknown impl: " + impl);
}
try (Admin admin = conn.getAdmin()) {
- admin.createTable(TableDescriptorBuilder.newBuilder(name.getTableName())
+ admin.createTable(TableDescriptorBuilder.newBuilder(tableName)
.setColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILY)).build());
}
}
- @After
+ @BeforeEach
+ public void setUp(TestInfo testInfo) {
+ String displayName = testInfo.getTestMethod().get().getName() +
testInfo.getDisplayName();
+ tableName = TableName.valueOf(displayName.replaceAll("[ \\[\\]]", "_"));
+ }
+
+ @AfterEach
public void tearDown() throws Exception {
- TableName tableName = name.getTableName();
try (Admin admin = conn.getAdmin()) {
admin.disableTable(tableName);
admin.deleteTable(tableName);
@@ -160,12 +123,14 @@ public class
TestBasicReadWriteWithDifferentConnectionRegistries {
conn.close();
}
- @Test
- public void testReadWrite() throws Exception {
+ @ParameterizedTest
+ @EnumSource(value = RegistryImpl.class)
+ public void testReadWrite(RegistryImpl impl) throws Exception {
+ init(impl);
byte[] row = Bytes.toBytes("row");
byte[] qualifier = Bytes.toBytes("qualifier");
byte[] value = Bytes.toBytes("value");
- try (Table table = conn.getTable(name.getTableName())) {
+ try (Table table = conn.getTable(tableName)) {
Put put = new Put(row).addColumn(FAMILY, qualifier, value);
table.put(put);
Result result = table.get(new Get(row));
diff --git
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestBasicReadWriteWithDifferentConnectionRegistries.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestBasicReadWriteWithDifferentConnectionRegistries.java
index 5746ffa67f6..d431c50b003 100644
---
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestBasicReadWriteWithDifferentConnectionRegistries.java
+++
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestBasicReadWriteWithDifferentConnectionRegistries.java
@@ -17,161 +17,43 @@
*/
package org.apache.hadoop.hbase.client;
-import static org.junit.Assert.assertArrayEquals;
-import static org.junit.Assert.assertFalse;
-
+import java.io.IOException;
import java.net.URI;
-import java.util.ArrayList;
-import java.util.List;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseConfiguration;
-import org.apache.hadoop.hbase.HBaseTestingUtil;
-import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.TableNameTestRule;
import org.apache.hadoop.hbase.testclassification.ClientTests;
import org.apache.hadoop.hbase.testclassification.MediumTests;
-import org.apache.hadoop.hbase.util.Bytes;
-import org.junit.After;
-import org.junit.AfterClass;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.ClassRule;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
-import org.junit.runners.Parameterized.Parameter;
-import org.junit.runners.Parameterized.Parameters;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Tag;
/**
* Test basic read write operation with different {@link ConnectionRegistry}
implementations.
*/
-@RunWith(Parameterized.class)
-@Category({ MediumTests.class, ClientTests.class })
-public class TestBasicReadWriteWithDifferentConnectionRegistries {
-
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
-
HBaseClassTestRule.forClass(TestBasicReadWriteWithDifferentConnectionRegistries.class);
-
- private static final Logger LOG =
-
LoggerFactory.getLogger(TestBasicReadWriteWithDifferentConnectionRegistries.class);
-
- private static final HBaseTestingUtil UTIL = new HBaseTestingUtil();
-
- public enum RegistryImpl {
- ZK,
- RPC,
- ZK_URI,
- RPC_URI
- }
-
- @Parameter
- public RegistryImpl impl;
+@Tag(MediumTests.TAG)
+@Tag(ClientTests.TAG)
+public class TestBasicReadWriteWithDifferentConnectionRegistries
+ extends BasicReadWriteWithDifferentConnectionRegistriesTestBase {
- @Rule
- public final TableNameTestRule name = new TableNameTestRule();
-
- private byte[] FAMILY = Bytes.toBytes("family");
-
- private Connection conn;
-
- @Parameters(name = "{index}: impl={0}")
- public static List<Object[]> data() {
- List<Object[]> data = new ArrayList<Object[]>();
- for (RegistryImpl impl : RegistryImpl.values()) {
- data.add(new Object[] { impl });
- }
- return data;
- }
-
- @BeforeClass
+ @BeforeAll
public static void setUpBeforeClass() throws Exception {
UTIL.startMiniCluster();
}
- @AfterClass
+ @AfterAll
public static void tearDownAfterClass() throws Exception {
UTIL.shutdownMiniCluster();
}
- @Before
- public void setUp() throws Exception {
- switch (impl) {
- case ZK: {
- Configuration conf = HBaseConfiguration.create();
- conf.setClass(HConstants.CLIENT_CONNECTION_REGISTRY_IMPL_CONF_KEY,
- ZKConnectionRegistry.class, ConnectionRegistry.class);
- String quorum = UTIL.getZkCluster().getAddress().toString();
- String path =
UTIL.getConfiguration().get(HConstants.ZOOKEEPER_ZNODE_PARENT);
- conf.set(HConstants.CLIENT_ZOOKEEPER_QUORUM, quorum);
- conf.set(HConstants.ZOOKEEPER_ZNODE_PARENT, path);
- LOG.info("connect to cluster through zk quorum={} and parent={}",
quorum, path);
- conn = ConnectionFactory.createConnection(conf);
- break;
- }
- case RPC: {
- Configuration conf = HBaseConfiguration.create();
- conf.setClass(HConstants.CLIENT_CONNECTION_REGISTRY_IMPL_CONF_KEY,
- RpcConnectionRegistry.class, ConnectionRegistry.class);
- String bootstrapServers =
-
UTIL.getMiniHBaseCluster().getMaster().getServerName().getAddress().toString();
- conf.set(RpcConnectionRegistry.BOOTSTRAP_NODES, bootstrapServers);
- LOG.info("connect to cluster through rpc bootstrap servers={}",
bootstrapServers);
- conn = ConnectionFactory.createConnection(conf);
- break;
- }
- case ZK_URI: {
- String quorum = UTIL.getZkCluster().getAddress().toString();
- String path =
UTIL.getConfiguration().get(HConstants.ZOOKEEPER_ZNODE_PARENT);
- URI connectionUri = new URI("hbase+zk://" + quorum + path);
- LOG.info("connect to cluster through connection url: {}",
connectionUri);
- conn = ConnectionFactory.createConnection(connectionUri);
- break;
- }
- case RPC_URI: {
- URI connectionUri = new URI("hbase+rpc://"
- +
UTIL.getMiniHBaseCluster().getMaster().getServerName().getAddress().toString());
- LOG.info("connect to cluster through connection url: {}",
connectionUri);
- conn = ConnectionFactory.createConnection(connectionUri);
- break;
- }
- default:
- throw new IllegalArgumentException("Unknown impl: " + impl);
- }
- try (Admin admin = conn.getAdmin()) {
- admin.createTable(TableDescriptorBuilder.newBuilder(name.getTableName())
- .setColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILY)).build());
- }
- }
-
- @After
- public void tearDown() throws Exception {
- TableName tableName = name.getTableName();
- try (Admin admin = conn.getAdmin()) {
- admin.disableTable(tableName);
- admin.deleteTable(tableName);
- }
- conn.close();
+ // here we want to test connecting to hbase cluster with connection registry
only configuration or
+ // only connection URI, so in the below methods we just use an empty
configuration
+ @Override
+ protected Configuration getConf() {
+ return HBaseConfiguration.create();
}
- @Test
- public void testReadWrite() throws Exception {
- byte[] row = Bytes.toBytes("row");
- byte[] qualifier = Bytes.toBytes("qualifier");
- byte[] value = Bytes.toBytes("value");
- try (Table table = conn.getTable(name.getTableName())) {
- Put put = new Put(row).addColumn(FAMILY, qualifier, value);
- table.put(put);
- Result result = table.get(new Get(row));
- assertArrayEquals(value, result.getValue(FAMILY, qualifier));
- table.delete(new Delete(row));
- assertFalse(table.exists(new Get(row)));
- }
+ @Override
+ protected Connection createConn(URI uri) throws IOException {
+ return ConnectionFactory.createConnection(uri);
}
}
diff --git
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSecureBasicReadWriteWithDifferentConnectionRegistries.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSecureBasicReadWriteWithDifferentConnectionRegistries.java
new file mode 100644
index 00000000000..a1b31f0cc36
--- /dev/null
+++
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSecureBasicReadWriteWithDifferentConnectionRegistries.java
@@ -0,0 +1,84 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.client;
+
+import java.io.File;
+import java.io.IOException;
+import java.net.URI;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.ipc.RpcClient;
+import org.apache.hadoop.hbase.testclassification.ClientTests;
+import org.apache.hadoop.hbase.testclassification.MediumTests;
+import org.apache.hadoop.minikdc.MiniKdc;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Tag;
+
+/**
+ * Test basic read write operation with different {@link ConnectionRegistry}
implementations when
+ * security is enabled.
+ */
+@Tag(MediumTests.TAG)
+@Tag(ClientTests.TAG)
+public class TestSecureBasicReadWriteWithDifferentConnectionRegistries
+ extends BasicReadWriteWithDifferentConnectionRegistriesTestBase {
+
+ protected static final File KEYTAB_FILE =
+ new File(UTIL.getDataTestDir("keytab").toUri().getPath());
+
+ private static MiniKdc KDC;
+ private static String HOST = "localhost";
+ private static String PRINCIPAL;
+ private static String HTTP_PRINCIPAL;
+
+ protected static void stopKDC() {
+ if (KDC != null) {
+ KDC.stop();
+ }
+ }
+
+ @BeforeAll
+ public static void setUpBeforeClass() throws Exception {
+ KDC = UTIL.setupMiniKdc(KEYTAB_FILE);
+ PRINCIPAL = "hbase/" + HOST;
+ HTTP_PRINCIPAL = "HTTP/" + HOST;
+ KDC.createPrincipal(KEYTAB_FILE, PRINCIPAL, HTTP_PRINCIPAL);
+ // set a smaller timeout and retry to speed up tests
+ UTIL.getConfiguration().setInt(RpcClient.SOCKET_TIMEOUT_READ, 2000);
+ UTIL.getConfiguration().setInt("hbase.security.relogin.maxretries", 1);
+ UTIL.getConfiguration().setInt("hbase.security.relogin.maxbackoff", 100);
+ UTIL.startSecureMiniCluster(KDC, PRINCIPAL, HTTP_PRINCIPAL);
+ }
+
+ @AfterAll
+ public static void tearDownAfterClass() throws Exception {
+ UTIL.shutdownMiniCluster();
+ }
+
+ // for connecting to secure hbase cluster, we need to get some information
from Configuration, so
+ // here we need to use UTIL.getConfiguration to get the security related
information
+ @Override
+ protected Configuration getConf() {
+ return new Configuration(UTIL.getConfiguration());
+ }
+
+ @Override
+ protected Connection createConn(URI uri) throws IOException {
+ return ConnectionFactory.createConnection(uri, UTIL.getConfiguration());
+ }
+}
diff --git
a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcConnectionHeader.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcConnectionHeader.java
index 512501be82a..c86411f8c1b 100644
---
a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcConnectionHeader.java
+++
b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcConnectionHeader.java
@@ -20,12 +20,14 @@ package org.apache.hadoop.hbase.ipc;
import static org.junit.Assert.assertEquals;
import java.io.IOException;
+import java.util.Collections;
import java.util.Map;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.net.Address;
import org.apache.hadoop.hbase.security.User;
+import
org.apache.hadoop.hbase.security.provider.SaslClientAuthenticationProviders;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.junit.ClassRule;
@@ -34,6 +36,8 @@ import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
+import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableMap;
+
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
import
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MasterService;
import org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos;
@@ -57,10 +61,11 @@ public class TestRpcConnectionHeader {
}
private class MyRpcConnection extends RpcConnection {
+
protected MyRpcConnection(Configuration conf, Map<String, byte[]>
connectionAttributes)
throws IOException {
- super(conf, null, connectionId, "cluster-id", false, null, null, null,
null,
- connectionAttributes);
+ super(conf, null, connectionId, "cluster-id", false,
+ new SaslClientAuthenticationProviders(conf), null, null, null, null,
connectionAttributes);
}
@Override
@@ -89,7 +94,7 @@ public class TestRpcConnectionHeader {
public void testEmptyHeaders() throws IOException {
Configuration configuration = HBaseConfiguration.create();
- MyRpcConnection connection = new MyRpcConnection(configuration, Map.of());
+ MyRpcConnection connection = new MyRpcConnection(configuration,
Collections.emptyMap());
RPCProtos.ConnectionHeader connectionHeader =
connection.getConnectionHeader();
assertEquals(0, connectionHeader.getAttributeCount());
@@ -100,7 +105,7 @@ public class TestRpcConnectionHeader {
Configuration configuration = HBaseConfiguration.create();
configuration.set("hbase.client.header.test", "true");
- MyRpcConnection connection = new MyRpcConnection(configuration, Map.of());
+ MyRpcConnection connection = new MyRpcConnection(configuration,
Collections.emptyMap());
RPCProtos.ConnectionHeader connectionHeader =
connection.getConnectionHeader();
assertEquals(1, connectionHeader.getAttributeCount());
@@ -116,7 +121,8 @@ public class TestRpcConnectionHeader {
configuration.set("hbase.client.header.test", "true");
configuration.set("hbase.client.header.test2", "true");
- Map<String, byte[]> attributes = Map.of("hbase.client.header.test",
Bytes.toBytes("false"));
+ Map<String, byte[]> attributes =
+ ImmutableMap.of("hbase.client.header.test", Bytes.toBytes("false"));
MyRpcConnection connection = new MyRpcConnection(configuration,
attributes);
RPCProtos.ConnectionHeader connectionHeader =
connection.getConnectionHeader();
diff --git
a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/AbstractTestSecureIPC.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/AbstractTestSecureIPC.java
index 31e01a98ad6..ad61b8131fe 100644
---
a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/AbstractTestSecureIPC.java
+++
b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/AbstractTestSecureIPC.java
@@ -23,11 +23,11 @@ import static
org.apache.hadoop.hbase.security.HBaseKerberosUtils.getKeytabFileF
import static
org.apache.hadoop.hbase.security.HBaseKerberosUtils.getPrincipalForTesting;
import static
org.apache.hadoop.hbase.security.HBaseKerberosUtils.loginKerberosPrincipal;
import static
org.apache.hadoop.hbase.security.HBaseKerberosUtils.setSecuredConfiguration;
-import static
org.apache.hadoop.hbase.security.provider.SaslClientAuthenticationProviders.SELECTOR_KEY;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.either;
import static org.hamcrest.Matchers.instanceOf;
import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertNotSame;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.assertThrows;
@@ -42,8 +42,6 @@ import java.net.InetSocketAddress;
import java.security.PrivilegedExceptionAction;
import java.util.ArrayList;
import java.util.Collections;
-import java.util.Map;
-import javax.security.sasl.SaslClient;
import javax.security.sasl.SaslException;
import org.apache.commons.lang3.RandomStringUtils;
import org.apache.hadoop.conf.Configuration;
@@ -56,17 +54,10 @@ import org.apache.hadoop.hbase.ipc.RpcClient;
import org.apache.hadoop.hbase.ipc.RpcClientFactory;
import org.apache.hadoop.hbase.ipc.RpcServer;
import org.apache.hadoop.hbase.ipc.RpcServerFactory;
-import
org.apache.hadoop.hbase.security.provider.AuthenticationProviderSelector;
-import org.apache.hadoop.hbase.security.provider.BuiltInProviderSelector;
-import org.apache.hadoop.hbase.security.provider.SaslAuthMethod;
-import
org.apache.hadoop.hbase.security.provider.SaslClientAuthenticationProvider;
-import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.minikdc.MiniKdc;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
-import org.apache.hadoop.security.token.Token;
-import org.apache.hadoop.security.token.TokenIdentifier;
-import org.junit.Test;
+import org.junit.jupiter.api.TestTemplate;
import org.mockito.Mockito;
import org.apache.hbase.thirdparty.com.google.common.collect.Lists;
@@ -74,7 +65,6 @@ import
org.apache.hbase.thirdparty.com.google.protobuf.BlockingService;
import org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos;
import
org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestRpcServiceProtos.TestProtobufRpcProto.BlockingInterface;
-import
org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation;
public class AbstractTestSecureIPC {
@@ -120,7 +110,7 @@ public class AbstractTestSecureIPC {
setSecuredConfiguration(serverConf);
}
- @Test
+ @TestTemplate
public void testRpcCallWithEnabledKerberosSaslAuth() throws Exception {
UserGroupInformation ugi2 = UserGroupInformation.getCurrentUser();
@@ -132,8 +122,16 @@ public class AbstractTestSecureIPC {
callRpcService(User.create(ugi2));
}
- @Test
- public void testRpcCallWithEnabledKerberosSaslAuthCanonicalHostname() throws
Exception {
+ private static void setCanonicalHostName(InetAddress addr, String
canonicalHostName)
+ throws Exception {
+ final Field field =
InetAddress.class.getDeclaredField("canonicalHostName");
+ field.setAccessible(true);
+ field.set(addr, canonicalHostName);
+
+ }
+
+ @TestTemplate
+ public void testRpcCallWithKerberosSaslAuthCanonicalHostname() throws
Exception {
UserGroupInformation ugi2 = UserGroupInformation.getCurrentUser();
// check that the login user is okay:
@@ -141,109 +139,33 @@ public class AbstractTestSecureIPC {
assertEquals(AuthenticationMethod.KERBEROS, ugi.getAuthenticationMethod());
assertEquals(krbPrincipal, ugi.getUserName());
- enableCanonicalHostnameTesting(clientConf, "localhost");
clientConf.setBoolean(
SecurityConstants.UNSAFE_HBASE_CLIENT_KERBEROS_HOSTNAME_DISABLE_REVERSEDNS,
false);
clientConf.set(HBaseKerberosUtils.KRB_PRINCIPAL, "hbase/_HOST@" +
KDC.getRealm());
- callRpcService(User.create(ugi2));
- }
-
- @Test
- public void testRpcCallWithEnabledKerberosSaslAuthNoCanonicalHostname()
throws Exception {
- UserGroupInformation ugi2 = UserGroupInformation.getCurrentUser();
-
- // check that the login user is okay:
- assertSame(ugi2, ugi);
- assertEquals(AuthenticationMethod.KERBEROS, ugi.getAuthenticationMethod());
- assertEquals(krbPrincipal, ugi.getUserName());
+ // The InetAddress for localhost is always the same, so here we just
modify it to simulate
+ // hostname mismatch
+ InetAddress addr = InetAddress.getByName(HOST);
+ String originalCanonicalHostname = addr.getCanonicalHostName();
+ assertNotEquals("127.0.0.1", originalCanonicalHostname);
+ setCanonicalHostName(addr, "127.0.0.1");
+ // should fail because of canonical hostname does not match the principal
name
+ assertThrows(Exception.class, () -> callRpcService(User.create(ugi2)));
- enableCanonicalHostnameTesting(clientConf, "127.0.0.1");
clientConf
.setBoolean(SecurityConstants.UNSAFE_HBASE_CLIENT_KERBEROS_HOSTNAME_DISABLE_REVERSEDNS,
true);
- clientConf.set(HBaseKerberosUtils.KRB_PRINCIPAL, "hbase/_HOST@" +
KDC.getRealm());
-
+ // should pass since we do not use canonical hostname
callRpcService(User.create(ugi2));
- }
-
- private static void enableCanonicalHostnameTesting(Configuration conf,
String canonicalHostname) {
- conf.setClass(SELECTOR_KEY,
CanonicalHostnameTestingAuthenticationProviderSelector.class,
- AuthenticationProviderSelector.class);
-
conf.set(CanonicalHostnameTestingAuthenticationProviderSelector.CANONICAL_HOST_NAME_KEY,
- canonicalHostname);
- }
-
- public static class CanonicalHostnameTestingAuthenticationProviderSelector
- extends BuiltInProviderSelector {
- private static final String CANONICAL_HOST_NAME_KEY =
-
"CanonicalHostnameTestingAuthenticationProviderSelector.canonicalHostName";
-
- @Override
- public Pair<SaslClientAuthenticationProvider, Token<? extends
TokenIdentifier>>
- selectProvider(String clusterId, User user) {
- final Pair<SaslClientAuthenticationProvider, Token<? extends
TokenIdentifier>> pair =
- super.selectProvider(clusterId, user);
- pair.setFirst(createCanonicalHostNameTestingProvider(pair.getFirst()));
- return pair;
- }
-
- SaslClientAuthenticationProvider
- createCanonicalHostNameTestingProvider(SaslClientAuthenticationProvider
delegate) {
- return new SaslClientAuthenticationProvider() {
- @Override
- public SaslClient createClient(Configuration conf, InetAddress
serverAddr,
- String serverPrincipal, Token<? extends TokenIdentifier> token,
boolean fallbackAllowed,
- Map<String, String> saslProps) throws IOException {
- final String s = conf.get(CANONICAL_HOST_NAME_KEY);
- if (s != null) {
- try {
- final Field canonicalHostName =
- InetAddress.class.getDeclaredField("canonicalHostName");
- canonicalHostName.setAccessible(true);
- canonicalHostName.set(serverAddr, s);
- } catch (NoSuchFieldException | IllegalAccessException e) {
- throw new RuntimeException(e);
- }
- }
-
- return delegate.createClient(conf, serverAddr, serverPrincipal,
token, fallbackAllowed,
- saslProps);
- }
-
- @Override
- public UserInformation getUserInfo(User user) {
- return delegate.getUserInfo(user);
- }
-
- @Override
- public UserGroupInformation getRealUser(User ugi) {
- return delegate.getRealUser(ugi);
- }
-
- @Override
- public boolean canRetry() {
- return delegate.canRetry();
- }
- @Override
- public void relogin() throws IOException {
- delegate.relogin();
- }
-
- @Override
- public SaslAuthMethod getSaslAuthMethod() {
- return delegate.getSaslAuthMethod();
- }
-
- @Override
- public String getTokenKind() {
- return delegate.getTokenKind();
- }
- };
- }
+ clientConf.setBoolean(
+
SecurityConstants.UNSAFE_HBASE_CLIENT_KERBEROS_HOSTNAME_DISABLE_REVERSEDNS,
false);
+ setCanonicalHostName(addr, originalCanonicalHostname);
+ // should pass since we set the canonical hostname back, which should be
same with the one in
+ // the principal name
+ callRpcService(User.create(ugi2));
}
- @Test
+ @TestTemplate
public void testRpcServerFallbackToSimpleAuth() throws Exception {
String clientUsername = "testuser";
UserGroupInformation clientUgi =
@@ -259,7 +181,7 @@ public class AbstractTestSecureIPC {
callRpcService(User.create(clientUgi));
}
- @Test
+ @TestTemplate
public void testRpcServerDisallowFallbackToSimpleAuth() throws Exception {
String clientUsername = "testuser";
UserGroupInformation clientUgi =
@@ -281,7 +203,7 @@ public class AbstractTestSecureIPC {
}
}
- @Test
+ @TestTemplate
public void testRpcClientFallbackToSimpleAuth() throws Exception {
String serverUsername = "testuser";
UserGroupInformation serverUgi =
@@ -296,7 +218,7 @@ public class AbstractTestSecureIPC {
callRpcService(User.create(serverUgi), User.create(ugi));
}
- @Test
+ @TestTemplate
public void testRpcClientDisallowFallbackToSimpleAuth() throws Exception {
String serverUsername = "testuser";
UserGroupInformation serverUgi =
@@ -320,7 +242,7 @@ public class AbstractTestSecureIPC {
/**
* Test various combinations of Server and Client qops.
*/
- @Test
+ @TestTemplate
public void testSaslWithCommonQop() throws Exception {
setRpcProtection("privacy,authentication", "authentication");
callRpcService();
@@ -338,7 +260,7 @@ public class AbstractTestSecureIPC {
callRpcService();
}
- @Test
+ @TestTemplate
public void testSaslNoCommonQop() throws Exception {
setRpcProtection("integrity", "privacy");
SaslException se = assertThrows(SaslException.class, () ->
callRpcService());
@@ -348,7 +270,7 @@ public class AbstractTestSecureIPC {
/**
* Test sasl encryption with Crypto AES.
*/
- @Test
+ @TestTemplate
public void testSaslWithCryptoAES() throws Exception {
setRpcProtection("privacy", "privacy");
setCryptoAES("true", "true");
@@ -358,7 +280,7 @@ public class AbstractTestSecureIPC {
/**
* Test various combinations of Server and Client configuration for Crypto
AES.
*/
- @Test
+ @TestTemplate
public void testDifferentConfWithCryptoAES() throws Exception {
setRpcProtection("privacy", "privacy");
@@ -446,7 +368,7 @@ public class AbstractTestSecureIPC {
try {
int[] messageSize = new int[] { 100, 1000, 10000 };
for (int i = 0; i < messageSize.length; i++) {
- String input = RandomStringUtils.random(messageSize[i]);
+ String input = RandomStringUtils.insecure().next(messageSize[i]);
String result =
stub.echo(null,
TestProtos.EchoRequestProto.newBuilder().setMessage(input).build())
.getMessage();
diff --git
a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestSaslTlsIPC.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestSaslTlsIPC.java
index 1120d56fb9f..10863dbcb19 100644
---
a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestSaslTlsIPC.java
+++
b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestSaslTlsIPC.java
@@ -21,9 +21,10 @@ import java.io.File;
import java.security.Security;
import java.util.ArrayList;
import java.util.List;
+import java.util.stream.Stream;
import org.apache.commons.io.FileUtils;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
+import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate;
import org.apache.hadoop.hbase.io.crypto.tls.KeyStoreFileType;
import org.apache.hadoop.hbase.io.crypto.tls.X509KeyType;
import org.apache.hadoop.hbase.io.crypto.tls.X509TestContext;
@@ -38,63 +39,60 @@ import org.apache.hadoop.hbase.ipc.RpcServerFactory;
import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.testclassification.SecurityTests;
import org.bouncycastle.jce.provider.BouncyCastleProvider;
-import org.junit.After;
-import org.junit.AfterClass;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.ClassRule;
-import org.junit.experimental.categories.Category;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.params.provider.Arguments;
-@RunWith(Parameterized.class)
-@Category({ SecurityTests.class, LargeTests.class })
+@Tag(SecurityTests.TAG)
+@Tag(LargeTests.TAG)
+@HBaseParameterizedTestTemplate(name = "{index}: caKeyType={0},
certKeyType={1}, keyPassword={2},"
+ + " acceptPlainText={3}, clientTlsEnabled={4}")
public class TestSaslTlsIPC extends AbstractTestSecureIPC {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestSaslTlsIPC.class);
-
private static X509TestContextProvider PROVIDER;
- @Parameterized.Parameter(0)
- public X509KeyType caKeyType;
+ private X509KeyType caKeyType;
+
+ private X509KeyType certKeyType;
- @Parameterized.Parameter(1)
- public X509KeyType certKeyType;
+ private char[] keyPassword;
- @Parameterized.Parameter(2)
- public char[] keyPassword;
+ private boolean acceptPlainText;
- @Parameterized.Parameter(3)
- public boolean acceptPlainText;
+ private boolean clientTlsEnabled;
- @Parameterized.Parameter(4)
- public boolean clientTlsEnabled;
+ public TestSaslTlsIPC(X509KeyType caKeyType, X509KeyType certKeyType, char[]
keyPassword,
+ boolean acceptPlainText, boolean clientTlsEnabled) {
+ this.caKeyType = caKeyType;
+ this.certKeyType = certKeyType;
+ this.keyPassword = keyPassword;
+ this.acceptPlainText = acceptPlainText;
+ this.clientTlsEnabled = clientTlsEnabled;
+ }
private X509TestContext x509TestContext;
- @Parameterized.Parameters(
- name = "{index}: caKeyType={0}, certKeyType={1}, keyPassword={2},
acceptPlainText={3},"
- + " clientTlsEnabled={4}")
- public static List<Object[]> data() {
- List<Object[]> params = new ArrayList<>();
+ public static Stream<Arguments> parameters() {
+ List<Arguments> params = new ArrayList<>();
for (X509KeyType caKeyType : X509KeyType.values()) {
for (X509KeyType certKeyType : X509KeyType.values()) {
for (char[] keyPassword : new char[][] { "".toCharArray(),
"pa$$w0rd".toCharArray() }) {
// do not accept plain text
- params.add(new Object[] { caKeyType, certKeyType, keyPassword,
false, true });
+ params.add(Arguments.of(caKeyType, certKeyType, keyPassword, false,
true));
// support plain text and client enables tls
- params.add(new Object[] { caKeyType, certKeyType, keyPassword, true,
true });
+ params.add(Arguments.of(caKeyType, certKeyType, keyPassword, true,
true));
// support plain text and client disables tls
- params.add(new Object[] { caKeyType, certKeyType, keyPassword, true,
false });
+ params.add(Arguments.of(caKeyType, certKeyType, keyPassword, true,
false));
}
}
}
- return params;
+ return params.stream();
}
- @BeforeClass
+ @BeforeAll
public static void setUpBeforeClass() throws Exception {
Security.addProvider(new BouncyCastleProvider());
File dir = new
File(TEST_UTIL.getDataTestDir(TestSaslTlsIPC.class.getSimpleName()).toString())
@@ -112,14 +110,14 @@ public class TestSaslTlsIPC extends AbstractTestSecureIPC
{
PROVIDER = new X509TestContextProvider(conf, dir);
}
- @AfterClass
+ @AfterAll
public static void tearDownAfterClass() throws InterruptedException {
stopKDC();
Security.removeProvider(BouncyCastleProvider.PROVIDER_NAME);
TEST_UTIL.cleanupTestDir();
}
- @Before
+ @BeforeEach
public void setUp() throws Exception {
x509TestContext = PROVIDER.get(caKeyType, certKeyType, keyPassword);
x509TestContext.setConfigurations(KeyStoreFileType.JKS,
KeyStoreFileType.JKS);
@@ -129,7 +127,7 @@ public class TestSaslTlsIPC extends AbstractTestSecureIPC {
setUpPrincipalAndConf();
}
- @After
+ @AfterEach
public void tearDown() {
x509TestContext.clearConfigurations();
x509TestContext.getConf().unset(X509Util.TLS_CONFIG_OCSP);
diff --git
a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestSecureIPC.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestSecureIPC.java
index 991c3a936cb..346a77d66b9 100644
---
a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestSecureIPC.java
+++
b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestSecureIPC.java
@@ -19,9 +19,9 @@ package org.apache.hadoop.hbase.security;
import java.util.ArrayList;
import java.util.Arrays;
-import java.util.Collection;
import java.util.List;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
+import java.util.stream.Stream;
+import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate;
import org.apache.hadoop.hbase.ipc.BlockingRpcClient;
import org.apache.hadoop.hbase.ipc.NettyRpcClient;
import org.apache.hadoop.hbase.ipc.NettyRpcServer;
@@ -30,57 +30,52 @@ import org.apache.hadoop.hbase.ipc.RpcServerFactory;
import org.apache.hadoop.hbase.ipc.SimpleRpcServer;
import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.testclassification.SecurityTests;
-import org.junit.AfterClass;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.ClassRule;
-import org.junit.experimental.categories.Category;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
-import org.junit.runners.Parameterized.Parameter;
-import org.junit.runners.Parameterized.Parameters;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.params.provider.Arguments;
-@RunWith(Parameterized.class)
-@Category({ SecurityTests.class, LargeTests.class })
+@Tag(SecurityTests.TAG)
+@Tag(LargeTests.TAG)
+@HBaseParameterizedTestTemplate(name = "{index}: rpcClientImpl={0},
rpcServerImpl={1}")
public class TestSecureIPC extends AbstractTestSecureIPC {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestSecureIPC.class);
-
- @Parameters(name = "{index}: rpcClientImpl={0}, rpcServerImpl={1}")
- public static Collection<Object[]> parameters() {
- List<Object[]> params = new ArrayList<>();
+ public static Stream<Arguments> parameters() {
+ List<Arguments> params = new ArrayList<>();
List<String> rpcClientImpls =
Arrays.asList(BlockingRpcClient.class.getName(),
NettyRpcClient.class.getName());
List<String> rpcServerImpls =
Arrays.asList(SimpleRpcServer.class.getName(),
NettyRpcServer.class.getName());
for (String rpcClientImpl : rpcClientImpls) {
for (String rpcServerImpl : rpcServerImpls) {
- params.add(new Object[] { rpcClientImpl, rpcServerImpl });
+ params.add(Arguments.of(rpcClientImpl, rpcServerImpl));
}
}
- return params;
+ return params.stream();
}
- @Parameter(0)
public String rpcClientImpl;
- @Parameter(1)
public String rpcServerImpl;
- @BeforeClass
+ public TestSecureIPC(String rpcClientImpl, String rpcServerImpl) {
+ this.rpcClientImpl = rpcClientImpl;
+ this.rpcServerImpl = rpcServerImpl;
+ }
+
+ @BeforeAll
public static void setUp() throws Exception {
initKDCAndConf();
}
- @AfterClass
+ @AfterAll
public static void tearDown() throws Exception {
stopKDC();
TEST_UTIL.cleanupTestDir();
}
- @Before
+ @BeforeEach
public void setUpTest() throws Exception {
setUpPrincipalAndConf();
clientConf.set(RpcClientFactory.CUSTOM_RPC_CLIENT_IMPL_CONF_KEY,
rpcClientImpl);
diff --git
a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/provider/TestSaslServerAuthenticationProviders.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/provider/TestSaslServerAuthenticationProviders.java
index 7a55aac74fb..ac557f55790 100644
---
a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/provider/TestSaslServerAuthenticationProviders.java
+++
b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/provider/TestSaslServerAuthenticationProviders.java
@@ -17,16 +17,15 @@
*/
package org.apache.hadoop.hbase.security.provider;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotSame;
-import static org.junit.Assert.assertSame;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertSame;
+import static org.junit.jupiter.api.Assertions.assertThrows;
+import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.testclassification.SecurityTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
@@ -34,24 +33,13 @@ import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
import org.apache.hadoop.security.token.SecretManager;
import org.apache.hadoop.security.token.TokenIdentifier;
-import org.junit.Before;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
-@Category({ SmallTests.class, SecurityTests.class })
+@Tag(SmallTests.TAG)
+@Tag(SecurityTests.TAG)
public class TestSaslServerAuthenticationProviders {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestSaslServerAuthenticationProviders.class);
-
- @Before
- public void reset() {
- // Clear out any potentially bogus state from the providers class
- SaslServerAuthenticationProviders.reset();
- }
-
@Test
public void testCannotAddTheSameProviderTwice() {
HashMap<Byte, SaslServerAuthenticationProvider> registeredProviders = new
HashMap<>();
@@ -61,30 +49,11 @@ public class TestSaslServerAuthenticationProviders {
SaslServerAuthenticationProviders.addProviderIfNotExists(p1,
registeredProviders);
assertEquals(1, registeredProviders.size());
- try {
- SaslServerAuthenticationProviders.addProviderIfNotExists(p2,
registeredProviders);
- } catch (RuntimeException e) {
- }
-
- assertSame("Expected the original provider to be present", p1,
- registeredProviders.entrySet().iterator().next().getValue());
- }
+ assertThrows(RuntimeException.class,
+ () -> SaslServerAuthenticationProviders.addProviderIfNotExists(p2,
registeredProviders));
- @Test
- public void testInstanceIsCached() {
- Configuration conf = HBaseConfiguration.create();
- SaslServerAuthenticationProviders providers1 =
- SaslServerAuthenticationProviders.getInstance(conf);
- SaslServerAuthenticationProviders providers2 =
- SaslServerAuthenticationProviders.getInstance(conf);
- assertSame(providers1, providers2);
-
- SaslServerAuthenticationProviders.reset();
-
- SaslServerAuthenticationProviders providers3 =
- SaslServerAuthenticationProviders.getInstance(conf);
- assertNotSame(providers1, providers3);
- assertEquals(providers1.getNumRegisteredProviders(),
providers3.getNumRegisteredProviders());
+ assertSame(p1, registeredProviders.entrySet().iterator().next().getValue(),
+ "Expected the original provider to be present");
}
@Test
@@ -93,15 +62,14 @@ public class TestSaslServerAuthenticationProviders {
conf.set(SaslServerAuthenticationProviders.EXTRA_PROVIDERS_KEY,
InitCheckingSaslServerAuthenticationProvider.class.getName());
- SaslServerAuthenticationProviders providers =
- SaslServerAuthenticationProviders.getInstance(conf);
+ SaslServerAuthenticationProviders providers = new
SaslServerAuthenticationProviders(conf);
SaslServerAuthenticationProvider provider =
providers.selectProvider(InitCheckingSaslServerAuthenticationProvider.ID);
assertEquals(InitCheckingSaslServerAuthenticationProvider.class,
provider.getClass());
- assertTrue("Provider was not inititalized",
- ((InitCheckingSaslServerAuthenticationProvider)
provider).isInitialized());
+ assertTrue(((InitCheckingSaslServerAuthenticationProvider)
provider).isInitialized(),
+ "Provider was not inititalized");
}
public static class InitCheckingSaslServerAuthenticationProvider