This is an automated email from the ASF dual-hosted git repository.

elserj pushed a commit to branch branch-2.1
in repository https://gitbox.apache.org/repos/asf/hbase.git


The following commit(s) were added to refs/heads/branch-2.1 by this push:
     new 45ecb21  HBASE-20950 HBASE-23760 Helper method to configure secure DFS 
cluster for tests
45ecb21 is described below

commit 45ecb21dbfe8fc94260079f44a82cf110e9612e5
Author: Wei-Chiu Chuang <weic...@cloudera.com>
AuthorDate: Fri Jul 27 15:45:33 2018 -0700

    HBASE-20950 HBASE-23760 Helper method to configure secure DFS cluster for 
tests
    
    Create a helper method HBaseKerberosUtils#setSecuredConfiguration().
    TestSecureExport, TestSaslFanOutOneBlockAsyncDFSOutput,
    SecureTestCluster and TestThriftSpnegoHttpServer uses this new helper
    method.
    
    Signed-off-by: tedyu <yuzhih...@gmail.com>
---
 .../hadoop/hbase/coprocessor/TestSecureExport.java | 49 ++-------------
 .../TestSaslFanOutOneBlockAsyncDFSOutput.java      | 38 +-----------
 .../hadoop/hbase/security/HBaseKerberosUtils.java  | 72 +++++++++++++++++++++-
 .../hbase/security/token/SecureTestCluster.java    | 35 +----------
 .../hbase/thrift/TestThriftSpnegoHttpServer.java   | 21 +------
 5 files changed, 80 insertions(+), 135 deletions(-)

diff --git 
a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestSecureExport.java
 
b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestSecureExport.java
index b2ca1d4..31bfd37 100644
--- 
a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestSecureExport.java
+++ 
b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestSecureExport.java
@@ -27,7 +27,6 @@ import java.util.Arrays;
 import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
-import java.util.Properties;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
@@ -47,7 +46,6 @@ import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.client.TableDescriptor;
 import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
-import org.apache.hadoop.hbase.http.ssl.KeyStoreTestUtil;
 import org.apache.hadoop.hbase.mapreduce.ExportUtils;
 import org.apache.hadoop.hbase.mapreduce.Import;
 import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos;
@@ -68,12 +66,9 @@ import 
org.apache.hadoop.hbase.security.visibility.VisibilityTestUtil;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.Pair;
-import org.apache.hadoop.hdfs.DFSConfigKeys;
-import org.apache.hadoop.http.HttpConfig;
 import org.apache.hadoop.minikdc.MiniKdc;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.util.ToolRunner;
-import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.junit.After;
 import org.junit.AfterClass;
 import org.junit.Before;
@@ -128,11 +123,7 @@ public class TestSecureExport {
   @Rule
   public final TestName name = new TestName();
   private static void setUpKdcServer() throws Exception {
-    Properties conf = MiniKdc.createConf();
-    conf.put(MiniKdc.DEBUG, true);
-    File kdcFile = new File(UTIL.getDataTestDir("kdc").toUri().getPath());
-    KDC = new MiniKdc(conf, kdcFile);
-    KDC.start();
+    KDC = UTIL.setupMiniKdc(KEYTAB_FILE);
     USERNAME = UserGroupInformation.getLoginUser().getShortUserName();
     SERVER_PRINCIPAL = USERNAME + "/" + LOCALHOST;
     HTTP_PRINCIPAL = "HTTP/" + LOCALHOST;
@@ -157,42 +148,10 @@ public class TestSecureExport {
   }
 
   private static void setUpClusterKdc() throws Exception {
-    HBaseKerberosUtils.setKeytabFileForTesting(KEYTAB_FILE.getAbsolutePath());
-    HBaseKerberosUtils.setPrincipalForTesting(SERVER_PRINCIPAL + "@" + 
KDC.getRealm());
-    HBaseKerberosUtils.setSecuredConfiguration(UTIL.getConfiguration());
-    // if we drop support for hadoop-2.4.0 and hadoop-2.4.1,
-    // the following key should be changed.
-    // 1) DFS_NAMENODE_USER_NAME_KEY -> DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY
-    // 2) DFS_DATANODE_USER_NAME_KEY -> DFS_DATANODE_KERBEROS_PRINCIPAL_KEY
-    UTIL.getConfiguration().set(DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY,
-        SERVER_PRINCIPAL + "@" + KDC.getRealm());
-    UTIL.getConfiguration().set(DFSConfigKeys.DFS_DATANODE_USER_NAME_KEY,
-        SERVER_PRINCIPAL + "@" + KDC.getRealm());
-    UTIL.getConfiguration().set(DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY,
-        KEYTAB_FILE.getAbsolutePath());
-    UTIL.getConfiguration().set(DFSConfigKeys.DFS_DATANODE_KEYTAB_FILE_KEY,
-        KEYTAB_FILE.getAbsolutePath());
-    // set yarn principal
-    UTIL.getConfiguration().set(YarnConfiguration.RM_PRINCIPAL,
-        SERVER_PRINCIPAL + "@" + KDC.getRealm());
-    UTIL.getConfiguration().set(YarnConfiguration.NM_PRINCIPAL,
-        SERVER_PRINCIPAL + "@" + KDC.getRealm());
-    
UTIL.getConfiguration().set(DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY,
-        HTTP_PRINCIPAL + "@" + KDC.getRealm());
-    
UTIL.getConfiguration().setBoolean(DFSConfigKeys.DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY,
 true);
-    UTIL.getConfiguration().set(DFSConfigKeys.DFS_HTTP_POLICY_KEY,
-        HttpConfig.Policy.HTTPS_ONLY.name());
-    UTIL.getConfiguration().set(DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY, 
LOCALHOST + ":0");
-    UTIL.getConfiguration().set(DFSConfigKeys.DFS_DATANODE_HTTPS_ADDRESS_KEY, 
LOCALHOST + ":0");
+    HBaseKerberosUtils.setSecuredConfiguration(UTIL.getConfiguration(),
+        SERVER_PRINCIPAL + "@" + KDC.getRealm(), HTTP_PRINCIPAL + "@" + 
KDC.getRealm());
+    HBaseKerberosUtils.setSSLConfiguration(UTIL, TestSecureExport.class);
 
-    File keystoresDir = new 
File(UTIL.getDataTestDir("keystore").toUri().getPath());
-    keystoresDir.mkdirs();
-    String sslConfDir = 
KeyStoreTestUtil.getClasspathDir(TestSecureExport.class);
-    KeyStoreTestUtil.setupSSLConfig(keystoresDir.getAbsolutePath(), sslConfDir,
-        UTIL.getConfiguration(), false);
-
-    UTIL.getConfiguration().setBoolean("ignore.secure.ports.for.testing", 
true);
-    UserGroupInformation.setConfiguration(UTIL.getConfiguration());
     UTIL.getConfiguration().set(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY,
         UTIL.getConfiguration().get(
             CoprocessorHost.REGION_COPROCESSOR_CONF_KEY) + "," + 
Export.class.getName());
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/asyncfs/TestSaslFanOutOneBlockAsyncDFSOutput.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/asyncfs/TestSaslFanOutOneBlockAsyncDFSOutput.java
index a221a01..cf0ffa2 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/asyncfs/TestSaslFanOutOneBlockAsyncDFSOutput.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/asyncfs/TestSaslFanOutOneBlockAsyncDFSOutput.java
@@ -17,19 +17,10 @@
  */
 package org.apache.hadoop.hbase.io.asyncfs;
 
-import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY;
 import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_SOCKET_TIMEOUT_KEY;
-import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_HTTPS_ADDRESS_KEY;
-import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_KERBEROS_PRINCIPAL_KEY;
-import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_KEYTAB_FILE_KEY;
 import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATA_ENCRYPTION_ALGORITHM_KEY;
 import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_ENCRYPT_DATA_TRANSFER_CIPHER_SUITES_KEY;
 import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_ENCRYPT_DATA_TRANSFER_KEY;
-import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_HTTP_POLICY_KEY;
-import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY;
-import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY;
-import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY;
-import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY;
 
 import java.io.File;
 import java.io.IOException;
@@ -47,13 +38,10 @@ import org.apache.hadoop.crypto.key.KeyProviderFactory;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
-import org.apache.hadoop.hbase.http.ssl.KeyStoreTestUtil;
 import org.apache.hadoop.hbase.security.HBaseKerberosUtils;
-import org.apache.hadoop.hbase.security.token.TestGenerateDelegationToken;
 import org.apache.hadoop.hbase.testclassification.LargeTests;
 import org.apache.hadoop.hbase.testclassification.MiscTests;
 import org.apache.hadoop.hdfs.DistributedFileSystem;
-import org.apache.hadoop.http.HttpConfig;
 import org.apache.hadoop.minikdc.MiniKdc;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.junit.After;
@@ -134,25 +122,6 @@ public class TestSaslFanOutOneBlockAsyncDFSOutput {
     return params;
   }
 
-  private static void setHdfsSecuredConfiguration(Configuration conf) throws 
Exception {
-    conf.set(DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY, PRINCIPAL + "@" + 
KDC.getRealm());
-    conf.set(DFS_NAMENODE_KEYTAB_FILE_KEY, KEYTAB_FILE.getAbsolutePath());
-    conf.set(DFS_DATANODE_KERBEROS_PRINCIPAL_KEY, PRINCIPAL + "@" + 
KDC.getRealm());
-    conf.set(DFS_DATANODE_KEYTAB_FILE_KEY, KEYTAB_FILE.getAbsolutePath());
-    conf.set(DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY, HTTP_PRINCIPAL + 
"@" + KDC.getRealm());
-    conf.setBoolean(DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, true);
-    conf.set(DFS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTPS_ONLY.name());
-    conf.set(DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0");
-    conf.set(DFS_DATANODE_HTTPS_ADDRESS_KEY, "localhost:0");
-
-    File keystoresDir = new 
File(TEST_UTIL.getDataTestDir("keystore").toUri().getPath());
-    keystoresDir.mkdirs();
-    String sslConfDir = 
KeyStoreTestUtil.getClasspathDir(TestGenerateDelegationToken.class);
-    KeyStoreTestUtil.setupSSLConfig(keystoresDir.getAbsolutePath(), 
sslConfDir, conf, false);
-
-    conf.setBoolean("ignore.secure.ports.for.testing", true);
-  }
-
   private static void setUpKeyProvider(Configuration conf) throws Exception {
     URI keyProviderUri =
       new URI("jceks://file" + 
TEST_UTIL.getDataTestDir("test.jks").toUri().toString());
@@ -175,10 +144,9 @@ public class TestSaslFanOutOneBlockAsyncDFSOutput {
     KDC.createPrincipal(KEYTAB_FILE, PRINCIPAL, HTTP_PRINCIPAL);
 
     setUpKeyProvider(TEST_UTIL.getConfiguration());
-    setHdfsSecuredConfiguration(TEST_UTIL.getConfiguration());
-    HBaseKerberosUtils.setPrincipalForTesting(PRINCIPAL + "@" + 
KDC.getRealm());
-    HBaseKerberosUtils.setSecuredConfiguration(TEST_UTIL.getConfiguration());
-    UserGroupInformation.setConfiguration(TEST_UTIL.getConfiguration());
+    HBaseKerberosUtils.setSecuredConfiguration(TEST_UTIL.getConfiguration(),
+        PRINCIPAL + "@" + KDC.getRealm(), HTTP_PRINCIPAL + "@" + 
KDC.getRealm());
+    HBaseKerberosUtils.setSSLConfiguration(TEST_UTIL, 
TestSaslFanOutOneBlockAsyncDFSOutput.class);
   }
 
   @AfterClass
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/HBaseKerberosUtils.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/HBaseKerberosUtils.java
index b946e74..41314ce 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/HBaseKerberosUtils.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/HBaseKerberosUtils.java
@@ -17,18 +17,24 @@
  */
 package org.apache.hadoop.hbase.security;
 
+import java.io.File;
+import java.io.IOException;
+import java.net.InetAddress;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
 import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.http.ssl.KeyStoreTestUtil;
+import org.apache.hadoop.hdfs.DFSConfigKeys;
+import org.apache.hadoop.http.HttpConfig;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.yetus.audience.InterfaceAudience;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.apache.hbase.thirdparty.com.google.common.base.Strings;
 import org.apache.hadoop.security.UserGroupInformation;
 
-import java.io.IOException;
-import java.net.InetAddress;
-
 @InterfaceAudience.Private
 public class HBaseKerberosUtils {
   private static final Logger LOG = 
LoggerFactory.getLogger(HBaseKerberosUtils.class);
@@ -80,6 +86,19 @@ public class HBaseKerberosUtils {
     return conf;
   }
 
+  /**
+   * Set up configuration for a secure HDFS+HBase cluster.
+   * @param conf configuration object.
+   * @param servicePrincipal service principal used by NN, HM and RS.
+   * @param spnegoPrincipal SPNEGO principal used by NN web UI.
+   */
+  public static void setSecuredConfiguration(Configuration conf,
+      String servicePrincipal, String spnegoPrincipal) {
+    setPrincipalForTesting(servicePrincipal);
+    setSecuredConfiguration(conf);
+    setSecuredHadoopConfiguration(conf, spnegoPrincipal);
+  }
+
   public static void setSecuredConfiguration(Configuration conf) {
     conf.set(CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION, 
"kerberos");
     conf.set(User.HBASE_SECURITY_CONF_KEY, "kerberos");
@@ -89,6 +108,53 @@ public class HBaseKerberosUtils {
     conf.set(MASTER_KRB_PRINCIPAL, System.getProperty(KRB_PRINCIPAL));
   }
 
+  private static void setSecuredHadoopConfiguration(Configuration conf,
+      String spnegoServerPrincipal) {
+    // if we drop support for hadoop-2.4.0 and hadoop-2.4.1,
+    // the following key should be changed.
+    // 1) DFS_NAMENODE_USER_NAME_KEY -> DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY
+    // 2) DFS_DATANODE_USER_NAME_KEY -> DFS_DATANODE_KERBEROS_PRINCIPAL_KEY
+    String serverPrincipal = System.getProperty(KRB_PRINCIPAL);
+    String keytabFilePath = System.getProperty(KRB_KEYTAB_FILE);
+    // HDFS
+    conf.set(DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY, serverPrincipal);
+    conf.set(DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY, keytabFilePath);
+    conf.set(DFSConfigKeys.DFS_DATANODE_USER_NAME_KEY, serverPrincipal);
+    conf.set(DFSConfigKeys.DFS_DATANODE_KEYTAB_FILE_KEY, keytabFilePath);
+    conf.setBoolean(DFSConfigKeys.DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, true);
+    // YARN
+    conf.set(YarnConfiguration.RM_PRINCIPAL, KRB_PRINCIPAL);
+    conf.set(YarnConfiguration.NM_PRINCIPAL, KRB_PRINCIPAL);
+
+    if (spnegoServerPrincipal != null) {
+      conf.set(DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY,
+          spnegoServerPrincipal);
+    }
+
+    conf.setBoolean("ignore.secure.ports.for.testing", true);
+
+    UserGroupInformation.setConfiguration(conf);
+  }
+
+  /**
+   * Set up SSL configuration for HDFS NameNode and DataNode.
+   * @param utility a HBaseTestingUtility object.
+   * @param clazz the caller test class.
+   * @throws Exception if unable to set up SSL configuration
+   */
+  public static void setSSLConfiguration(HBaseTestingUtility utility, Class 
clazz)
+      throws Exception {
+    Configuration conf = utility.getConfiguration();
+    conf.set(DFSConfigKeys.DFS_HTTP_POLICY_KEY, 
HttpConfig.Policy.HTTPS_ONLY.name());
+    conf.set(DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0");
+    conf.set(DFSConfigKeys.DFS_DATANODE_HTTPS_ADDRESS_KEY, "localhost:0");
+
+    File keystoresDir = new 
File(utility.getDataTestDir("keystore").toUri().getPath());
+    keystoresDir.mkdirs();
+    String sslConfDir = KeyStoreTestUtil.getClasspathDir(clazz);
+    KeyStoreTestUtil.setupSSLConfig(keystoresDir.getAbsolutePath(), 
sslConfDir, conf, false);
+  }
+
   public static UserGroupInformation loginAndReturnUGI(Configuration conf, 
String username)
       throws IOException {
     String hostname = InetAddress.getLocalHost().getHostName();
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/SecureTestCluster.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/SecureTestCluster.java
index f5f6859..2263bde 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/SecureTestCluster.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/SecureTestCluster.java
@@ -18,16 +18,12 @@
 
 package org.apache.hadoop.hbase.security.token;
 
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.LocalHBaseCluster;
 import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
-import org.apache.hadoop.hbase.http.ssl.KeyStoreTestUtil;
 import org.apache.hadoop.hbase.security.HBaseKerberosUtils;
 import org.apache.hadoop.hbase.util.FSUtils;
-import org.apache.hadoop.hdfs.DFSConfigKeys;
-import org.apache.hadoop.http.HttpConfig;
 import org.apache.hadoop.minikdc.MiniKdc;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.junit.AfterClass;
@@ -56,30 +52,6 @@ public class SecureTestCluster {
   private static String HTTP_PRINCIPAL;
 
   /**
-   * Setup the security configuration for hdfs.
-   */
-  private static void setHdfsSecuredConfiguration(Configuration conf) throws 
Exception {
-    // change XXX_USER_NAME_KEY to XXX_KERBEROS_PRINCIPAL_KEY after we drop 
support for hadoop-2.4.1
-    conf.set(DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY, PRINCIPAL + "@" + 
KDC.getRealm());
-    conf.set(DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY, 
KEYTAB_FILE.getAbsolutePath());
-    conf.set(DFSConfigKeys.DFS_DATANODE_USER_NAME_KEY, PRINCIPAL + "@" + 
KDC.getRealm());
-    conf.set(DFSConfigKeys.DFS_DATANODE_KEYTAB_FILE_KEY, 
KEYTAB_FILE.getAbsolutePath());
-    conf.set(DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY, 
HTTP_PRINCIPAL + "@"
-        + KDC.getRealm());
-    conf.setBoolean(DFSConfigKeys.DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, true);
-    conf.set(DFSConfigKeys.DFS_HTTP_POLICY_KEY, 
HttpConfig.Policy.HTTPS_ONLY.name());
-    conf.set(DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0");
-    conf.set(DFSConfigKeys.DFS_DATANODE_HTTPS_ADDRESS_KEY, "localhost:0");
-
-    File keystoresDir = new 
File(TEST_UTIL.getDataTestDir("keystore").toUri().getPath());
-    keystoresDir.mkdirs();
-    String sslConfDir = 
KeyStoreTestUtil.getClasspathDir(TestGenerateDelegationToken.class);
-    KeyStoreTestUtil.setupSSLConfig(keystoresDir.getAbsolutePath(), 
sslConfDir, conf, false);
-
-    conf.setBoolean("ignore.secure.ports.for.testing", true);
-  }
-
-  /**
    * Setup and start kerberos, hbase
    */
   @BeforeClass
@@ -91,11 +63,10 @@ public class SecureTestCluster {
     KDC.createPrincipal(KEYTAB_FILE, PRINCIPAL, HTTP_PRINCIPAL);
     TEST_UTIL.startMiniZKCluster();
 
-    HBaseKerberosUtils.setPrincipalForTesting(PRINCIPAL + "@" + 
KDC.getRealm());
-    HBaseKerberosUtils.setSecuredConfiguration(TEST_UTIL.getConfiguration());
+    HBaseKerberosUtils.setSecuredConfiguration(TEST_UTIL.getConfiguration(),
+        PRINCIPAL + "@" + KDC.getRealm(), HTTP_PRINCIPAL + "@" + 
KDC.getRealm());
+    HBaseKerberosUtils.setSSLConfiguration(TEST_UTIL, SecureTestCluster.class);
 
-    setHdfsSecuredConfiguration(TEST_UTIL.getConfiguration());
-    UserGroupInformation.setConfiguration(TEST_UTIL.getConfiguration());
     
TEST_UTIL.getConfiguration().setStrings(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY,
         TokenProvider.class.getName());
     TEST_UTIL.startMiniDFSCluster(1);
diff --git 
a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftSpnegoHttpServer.java
 
b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftSpnegoHttpServer.java
index 2d5c1f2..f9144d6 100644
--- 
a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftSpnegoHttpServer.java
+++ 
b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftSpnegoHttpServer.java
@@ -37,7 +37,6 @@ import org.apache.hadoop.hbase.security.HBaseKerberosUtils;
 import org.apache.hadoop.hbase.testclassification.ClientTests;
 import org.apache.hadoop.hbase.testclassification.LargeTests;
 import org.apache.hadoop.hbase.thrift.generated.Hbase;
-import org.apache.hadoop.hdfs.DFSConfigKeys;
 import org.apache.hadoop.security.authentication.util.KerberosName;
 import org.apache.http.HttpHeaders;
 import org.apache.http.auth.AuthSchemeProvider;
@@ -112,25 +111,7 @@ public class TestThriftSpnegoHttpServer extends 
TestThriftHttpServer {
     KerberosName.setRules("DEFAULT");
 
     HBaseKerberosUtils.setKeytabFileForTesting(serverKeytab.getAbsolutePath());
-    HBaseKerberosUtils.setPrincipalForTesting(serverPrincipal);
-    HBaseKerberosUtils.setSecuredConfiguration(conf);
-
-    // if we drop support for hadoop-2.4.0 and hadoop-2.4.1,
-    // the following key should be changed.
-    // 1) DFS_NAMENODE_USER_NAME_KEY -> DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY
-    // 2) DFS_DATANODE_USER_NAME_KEY -> DFS_DATANODE_KERBEROS_PRINCIPAL_KEY
-    conf.set(DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY, serverPrincipal);
-    conf.set(DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY, 
serverKeytab.getAbsolutePath());
-    conf.set(DFSConfigKeys.DFS_DATANODE_USER_NAME_KEY, serverPrincipal);
-    conf.set(DFSConfigKeys.DFS_DATANODE_KEYTAB_FILE_KEY, 
serverKeytab.getAbsolutePath());
-
-    conf.setBoolean(DFSConfigKeys.DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, true);
-
-    conf.set(DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY, 
spnegoServerPrincipal);
-    conf.set(DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_KEYTAB_KEY,
-        spnegoServerKeytab.getAbsolutePath());
-
-    conf.setBoolean("ignore.secure.ports.for.testing", true);
+    HBaseKerberosUtils.setSecuredConfiguration(conf, serverPrincipal, 
spnegoServerPrincipal);
 
     conf.setBoolean(ThriftServerRunner.THRIFT_SUPPORT_PROXYUSER_KEY, true);
     conf.setBoolean(ThriftServerRunner.USE_HTTP_CONF_KEY, true);

Reply via email to