This is an automated email from the ASF dual-hosted git repository.
taklwu pushed a commit to branch branch-3
in repository https://gitbox.apache.org/repos/asf/hbase.git
The following commit(s) were added to refs/heads/branch-3 by this push:
new 790139cde21 HBASE-29790: Backport 'HBASE-29761: The HBase UI's Debug
Dump is not redacting sensitive information' to branch-3 (#7569)
790139cde21 is described below
commit 790139cde21b428dacac42b4c26b83a325f9b34e
Author: Kevin Geiszler <[email protected]>
AuthorDate: Mon Dec 22 10:22:28 2025 -0800
HBASE-29790: Backport 'HBASE-29761: The HBase UI's Debug Dump is not
redacting sensitive information' to branch-3 (#7569)
Signed-off-by: Tak Lon (Stephen) Wu <[email protected]>
---
.../hbase/master/http/MasterDumpServlet.java | 12 +-
.../hadoop/hbase/monitoring/StateDumpServlet.java | 20 ++
.../hbase/regionserver/http/RSDumpServlet.java | 14 +-
.../hadoop/hbase/http/TestDebugDumpRedaction.java | 224 +++++++++++++++++++++
.../hbase/master/http/TestMasterStatusPage.java | 33 +--
.../hbase/regionserver/http/TestRSStatusPage.java | 29 +--
.../hadoop/hbase/util/TestServerHttpUtils.java | 68 +++++++
7 files changed, 334 insertions(+), 66 deletions(-)
diff --git
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/http/MasterDumpServlet.java
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/http/MasterDumpServlet.java
index 402c248072f..daabfad937c 100644
---
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/http/MasterDumpServlet.java
+++
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/http/MasterDumpServlet.java
@@ -18,9 +18,10 @@
package org.apache.hadoop.hbase.master.http;
import java.io.IOException;
-import java.io.OutputStream;
+import java.io.OutputStreamWriter;
import java.io.PrintStream;
import java.io.PrintWriter;
+import java.nio.charset.StandardCharsets;
import java.util.Date;
import java.util.Map;
import javax.servlet.http.HttpServletRequest;
@@ -53,7 +54,8 @@ public class MasterDumpServlet extends StateDumpServlet {
assert master != null : "No Master in context!";
response.setContentType("text/plain");
- OutputStream os = response.getOutputStream();
+ OutputStreamWriter os =
+ new OutputStreamWriter(response.getOutputStream(),
StandardCharsets.UTF_8);
try (PrintWriter out = new PrintWriter(os)) {
out.println("Master status for " + master.getServerName() + " as of " +
new Date());
@@ -81,15 +83,15 @@ public class MasterDumpServlet extends StateDumpServlet {
out.println("\n\nStacks:");
out.println(LINE);
out.flush();
- PrintStream ps = new PrintStream(response.getOutputStream(), false,
"UTF-8");
+ PrintStream ps = new PrintStream(response.getOutputStream(), false,
StandardCharsets.UTF_8);
Threads.printThreadInfo(ps, "");
ps.flush();
out.println("\n\nMaster configuration:");
out.println(LINE);
- Configuration conf = master.getConfiguration();
+ Configuration redactedConf =
getRedactedConfiguration(master.getConfiguration());
out.flush();
- conf.writeXml(os);
+ redactedConf.writeXml(os);
os.flush();
out.println("\n\nRecent regionserver aborts:");
diff --git
a/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/StateDumpServlet.java
b/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/StateDumpServlet.java
index ff6d2939f17..fa84eb6853d 100644
---
a/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/StateDumpServlet.java
+++
b/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/StateDumpServlet.java
@@ -22,6 +22,7 @@ import java.io.PrintWriter;
import java.util.Map;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
+import org.apache.hadoop.conf.ConfigRedactor;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.executor.ExecutorService;
import org.apache.hadoop.hbase.executor.ExecutorService.ExecutorStatus;
@@ -32,6 +33,8 @@ import org.apache.yetus.audience.InterfaceAudience;
public abstract class StateDumpServlet extends HttpServlet {
static final long DEFAULT_TAIL_KB = 100;
private static final long serialVersionUID = 1L;
+ protected static final String REDACTED = "<redacted>";
+ protected static final String REDACTED_TEXT = "******";
protected void dumpVersionInfo(PrintWriter out) {
VersionInfo.writeTo(out);
@@ -66,4 +69,21 @@ public abstract class StateDumpServlet extends HttpServlet {
status.dumpTo(out, " ");
}
}
+
+ protected Configuration getRedactedConfiguration(Configuration conf) {
+ // YARN-11308 introduced a new method signature to the overloaded
Configuration.writeXml()
+ // method. Within this new method, the ConfigRedactor is used on the
Configuration object if
+ // that object is not null. This allows the XML output to have sensitive
content redacted
+ // automatically. However, this new method is only available in Hadoop 3.4
and later, so we are
+ // performing the redaction here manually in order to ensure backward
compatibility.
+ ConfigRedactor redactor = new ConfigRedactor(conf);
+ String redactResult;
+ for (Map.Entry<String, String> entry : conf) {
+ redactResult = redactor.redact(entry.getKey(), entry.getValue());
+ if (REDACTED.equals(redactResult)) {
+ conf.set(entry.getKey(), REDACTED_TEXT);
+ }
+ }
+ return conf;
+ }
}
diff --git
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/http/RSDumpServlet.java
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/http/RSDumpServlet.java
index 4c98c08b072..3bdf7b61cda 100644
---
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/http/RSDumpServlet.java
+++
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/http/RSDumpServlet.java
@@ -18,9 +18,10 @@
package org.apache.hadoop.hbase.regionserver.http;
import java.io.IOException;
-import java.io.OutputStream;
+import java.io.OutputStreamWriter;
import java.io.PrintStream;
import java.io.PrintWriter;
+import java.nio.charset.StandardCharsets;
import java.util.Date;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
@@ -58,7 +59,8 @@ public class RSDumpServlet extends StateDumpServlet {
return;
}
- OutputStream os = response.getOutputStream();
+ OutputStreamWriter os =
+ new OutputStreamWriter(response.getOutputStream(),
StandardCharsets.UTF_8);
try (PrintWriter out = new PrintWriter(os)) {
out.println("RegionServer status for " + hrs.getServerName() + " as of "
+ new Date());
@@ -81,15 +83,15 @@ public class RSDumpServlet extends StateDumpServlet {
out.println("\n\nStacks:");
out.println(LINE);
- PrintStream ps = new PrintStream(response.getOutputStream(), false,
"UTF-8");
+ PrintStream ps = new PrintStream(response.getOutputStream(), false,
StandardCharsets.UTF_8);
Threads.printThreadInfo(ps, "");
ps.flush();
out.println("\n\nRS Configuration:");
out.println(LINE);
- Configuration conf = hrs.getConfiguration();
+ Configuration redactedConf =
getRedactedConfiguration(hrs.getConfiguration());
out.flush();
- conf.writeXml(os);
+ redactedConf.writeXml(os);
os.flush();
out.println("\n\nLogs");
@@ -99,7 +101,7 @@ public class RSDumpServlet extends StateDumpServlet {
out.println("\n\nRS Queue:");
out.println(LINE);
- if (isShowQueueDump(conf)) {
+ if (isShowQueueDump(hrs.getConfiguration())) {
dumpQueue(hrs, out);
}
diff --git
a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestDebugDumpRedaction.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestDebugDumpRedaction.java
new file mode 100644
index 00000000000..0b4dd4d00ab
--- /dev/null
+++
b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestDebugDumpRedaction.java
@@ -0,0 +1,224 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.http;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotEquals;
+import static org.junit.Assert.assertTrue;
+
+import java.io.ByteArrayInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.nio.charset.StandardCharsets;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.List;
+import java.util.Map;
+import java.util.stream.Stream;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.HBaseClassTestRule;
+import org.apache.hadoop.hbase.HBaseTestingUtil;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.LocalHBaseCluster;
+import org.apache.hadoop.hbase.ServerName;
+import org.apache.hadoop.hbase.master.HMaster;
+import org.apache.hadoop.hbase.master.ServerManager;
+import org.apache.hadoop.hbase.testclassification.MiscTests;
+import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.apache.hadoop.hbase.util.CommonFSUtils;
+import org.apache.hadoop.hbase.util.TestServerHttpUtils;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.ClassRule;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * This class performs tests that ensure sensitive config values found in the
HBase UI's Debug Dump
+ * are redacted. A config property name must follow one of the regex patterns
specified in
+ * hadoop.security.sensitive-config-keys in order to have its value redacted.
+ */
+@Category({ MiscTests.class, SmallTests.class })
+public class TestDebugDumpRedaction {
+ private static final Logger LOG =
LoggerFactory.getLogger(TestDebugDumpRedaction.class);
+ private static final HBaseTestingUtil UTIL = new HBaseTestingUtil();
+ private static final String XML_CONFIGURATION_START_TAG = "<configuration>";
+ private static final String XML_CONFIGURATION_END_TAG = "</configuration>";
+ private static final int SUBSTRING_OFFSET =
XML_CONFIGURATION_END_TAG.length();
+ private static final String REDACTED_TEXT = "******";
+
+ // These are typical configuration properties whose values we would want to
see redacted.
+ private static final List<String> SENSITIVE_CONF_PROPERTIES =
+ Arrays.asList("hbase.zookeeper.property.ssl.trustStore.password",
+ "ssl.client.truststore.password", "hbase.rpc.tls.truststore.password",
+ "ssl.server.keystore.password", "fs.s3a.server-side-encryption.key",
+ "fs.s3a.encryption.algorithm", "fs.s3a.encryption.key",
"fs.s3a.secret.key",
+ "fs.s3a.important.secret.key", "fs.s3a.session.key",
"fs.s3a.secret.session.key",
+ "fs.s3a.session.token", "fs.s3a.secret.session.token",
"fs.azure.account.key.importantKey",
+ "fs.azure.oauth2.token", "fs.adl.oauth2.token",
"fs.gs.encryption.sensitive",
+ "fs.gs.proxy.important", "fs.gs.auth.sensitive.info",
"sensitive.credential",
+ "oauth.important.secret", "oauth.important.password",
"oauth.important.token",
+ "fs.adl.oauth2.access.token.provider.type",
"hadoop.security.sensitive-config-keys");
+
+ // These are not typical configuration properties whose values we would want
to see redacted,
+ // but we are testing their redaction anyway because we want to see how the
redaction behaves
+ // with booleans and ints.
+ private static final List<String> NON_SENSITIVE_KEYS_WITH_DEFAULT_VALUES =
Arrays.asList(
+ "hbase.zookeeper.quorum", "hbase.cluster.distributed",
"hbase.master.logcleaner.ttl",
+ "hbase.master.hfilecleaner.plugins", "hbase.master.infoserver.redirect",
+ "hbase.thrift.minWorkerThreads", "hbase.table.lock.enable");
+
+ // We also want to verify the behavior for a string with value "null" and an
empty string.
+ // (giving a config property an actual null value will throw an error)
+ private static final String NULL_CONFIG_KEY = "null.key";
+ private static final String EMPTY_CONFIG_KEY = "empty.key";
+
+ // Combine all properties we want to redact into one list
+ private static final List<String> REDACTED_PROPS =
+ Stream.of(SENSITIVE_CONF_PROPERTIES,
NON_SENSITIVE_KEYS_WITH_DEFAULT_VALUES,
+ List.of(NULL_CONFIG_KEY,
EMPTY_CONFIG_KEY)).flatMap(Collection::stream).toList();
+
+ private static LocalHBaseCluster CLUSTER;
+
+ @ClassRule
+ public static final HBaseClassTestRule CLASS_RULE =
+ HBaseClassTestRule.forClass(TestDebugDumpRedaction.class);
+
+ @BeforeClass
+ public static void beforeClass() throws Exception {
+ Configuration conf = UTIL.getConfiguration();
+
+ // Add various config properties with sensitive information that should be
redacted
+ // when the Debug Dump is performed in the UI. These properties are
following the
+ // regexes specified by the hadoop.security.sensitive-config-keys property.
+ for (String property : SENSITIVE_CONF_PROPERTIES) {
+ conf.set(property, "testPassword");
+ }
+
+ // Also verify a null string and empty string will get redacted.
+ // Setting the config to use an actual null value throws an error.
+ conf.set(NULL_CONFIG_KEY, "null");
+ conf.set(EMPTY_CONFIG_KEY, "");
+
+ // Config properties following these regex patterns will have their values
redacted in the
+ // Debug Dump
+ String sensitiveKeyRegexes = "secret$,password$,ssl.keystore.pass$,"
+ +
"fs.s3a.server-side-encryption.key,fs.s3a.*.server-side-encryption.key,"
+ + "fs.s3a.encryption.algorithm,fs.s3a.encryption.key,fs.s3a.secret.key,"
+ +
"fs.s3a.*.secret.key,fs.s3a.session.key,fs.s3a.*.session.key,fs.s3a.session.token,"
+ +
"fs.s3a.*.session.token,fs.azure.account.key.*,fs.azure.oauth2.*,fs.adl.oauth2.*,"
+ +
"fs.gs.encryption.*,fs.gs.proxy.*,fs.gs.auth.*,credential$,oauth.*secret,"
+ + "oauth.*password,oauth.*token,hadoop.security.sensitive-config-keys,"
+ + String.join(",", NON_SENSITIVE_KEYS_WITH_DEFAULT_VALUES) + "," +
NULL_CONFIG_KEY + ","
+ + EMPTY_CONFIG_KEY;
+
+ conf.set("hadoop.security.sensitive-config-keys", sensitiveKeyRegexes);
+
+ UTIL.startMiniZKCluster();
+
+ UTIL.startMiniDFSCluster(1);
+ Path rootdir = UTIL.getDataTestDirOnTestFS("TestDebugDumpServlet");
+ CommonFSUtils.setRootDir(conf, rootdir);
+
+ // The info servers do not run in tests by default.
+ // Set them to ephemeral ports so they will start
+ // setup configuration
+ conf.setInt(HConstants.MASTER_INFO_PORT, 0);
+ conf.setInt(HConstants.REGIONSERVER_INFO_PORT, 0);
+
+ CLUSTER = new LocalHBaseCluster(conf, 1);
+ CLUSTER.startup();
+ CLUSTER.getActiveMaster().waitForMetaOnline();
+ }
+
+ @AfterClass
+ public static void afterClass() throws Exception {
+ if (CLUSTER != null) {
+ CLUSTER.shutdown();
+ CLUSTER.join();
+ }
+ UTIL.shutdownMiniCluster();
+ }
+
+ @Test
+ public void testMasterPasswordsAreRedacted() throws IOException {
+ String response = TestServerHttpUtils.getMasterPageContent(CLUSTER);
+
+ // Verify this is the master server's debug dump
+ assertTrue(
+ response.startsWith("Master status for " +
CLUSTER.getActiveMaster().getServerName()));
+
+ verifyDebugDumpResponseConfig(response);
+ }
+
+ @Test
+ public void testRegionServerPasswordsAreRedacted() throws IOException {
+ HMaster master = CLUSTER.getActiveMaster();
+
+ ServerManager serverManager = master.getServerManager();
+ List<ServerName> onlineServersList = serverManager.getOnlineServersList();
+
+ assertEquals(1, onlineServersList.size());
+
+ ServerName regionServerName = onlineServersList.get(0);
+ int regionServerInfoPort =
master.getRegionServerInfoPort(regionServerName);
+ String regionServerHostname = regionServerName.getHostname();
+
+ String response =
+ TestServerHttpUtils.getRegionServerPageContent(regionServerHostname,
regionServerInfoPort);
+
+ // Verify this is the region server's debug dump
+ assertTrue(response.startsWith("RegionServer status for " +
regionServerName));
+
+ verifyDebugDumpResponseConfig(response);
+ }
+
+ private void verifyDebugDumpResponseConfig(String response) throws
IOException {
+ // Grab the server's config from the Debug Dump.
+ String xmlString =
response.substring(response.indexOf(XML_CONFIGURATION_START_TAG),
+ response.indexOf(XML_CONFIGURATION_END_TAG) + SUBSTRING_OFFSET);
+
+ // Convert the XML string into an InputStream.
+ Configuration conf = new Configuration(false);
+ try (InputStream is = new
ByteArrayInputStream(xmlString.getBytes(StandardCharsets.UTF_8))) {
+ // Add the InputStream as a resource to the Configuration object
+ conf.addResource(is, "DebugDumpXmlConfig");
+ }
+
+ // Verify the expected properties had their values redacted.
+ for (String property : REDACTED_PROPS) {
+ LOG.info("Verifying property has been redacted: {}", property);
+ assertEquals("Expected " + property + " to have its value redacted",
REDACTED_TEXT,
+ conf.get(property));
+ }
+
+ // Verify all other props have not had their values redacted.
+ String propertyName;
+ for (Map.Entry<String, String> property : conf) {
+ propertyName = property.getKey();
+ if (!REDACTED_PROPS.contains(propertyName)) {
+ LOG.info("Verifying {} property has not had its value redacted",
propertyName);
+ assertNotEquals("Expected property " + propertyName + " to not have
its value redacted",
+ REDACTED_TEXT, conf.get(propertyName));
+ }
+ }
+ }
+}
diff --git
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/http/TestMasterStatusPage.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/http/TestMasterStatusPage.java
index 481de91ea0e..a7496c78aa9 100644
---
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/http/TestMasterStatusPage.java
+++
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/http/TestMasterStatusPage.java
@@ -21,10 +21,7 @@ import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
-import java.io.BufferedReader;
import java.io.IOException;
-import java.io.InputStreamReader;
-import java.net.HttpURLConnection;
import java.net.URL;
import java.util.ArrayList;
import java.util.List;
@@ -45,6 +42,7 @@ import org.apache.hadoop.hbase.master.ServerManager;
import org.apache.hadoop.hbase.testclassification.MasterTests;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.util.CommonFSUtils;
+import org.apache.hadoop.hbase.util.TestServerHttpUtils;
import org.apache.hadoop.hbase.util.VersionInfo;
import org.junit.AfterClass;
import org.junit.BeforeClass;
@@ -108,7 +106,9 @@ public class TestMasterStatusPage {
createTestTables(master);
- String page = getMasterStatusPageContent();
+ URL url =
+ new URL(TestServerHttpUtils.getMasterInfoServerHostAndPort(CLUSTER) +
"/master-status");
+ String page = TestServerHttpUtils.getPageContent(url,
"text/html;charset=utf-8");
String hostname = master.getServerName().getHostname();
assertTrue(page.contains("<h1>Master <small>" + hostname +
"</small></h1>"));
@@ -127,17 +127,6 @@ public class TestMasterStatusPage {
assertTrue(page.contains(VersionInfo.getVersion()));
}
- private String getMasterStatusPageContent() throws IOException {
- URL url = new URL(getInfoServerHostAndPort() + "/master-status");
- HttpURLConnection conn = (HttpURLConnection) url.openConnection();
- conn.connect();
-
- assertEquals(200, conn.getResponseCode());
- assertEquals("text/html;charset=utf-8", conn.getContentType());
-
- return getResponseBody(conn);
- }
-
private static void createTestTables(HMaster master) throws IOException {
ColumnFamilyDescriptor cf = ColumnFamilyDescriptorBuilder.of("CF");
TableDescriptor tableDescriptor1 = TableDescriptorBuilder
@@ -149,20 +138,6 @@ public class TestMasterStatusPage {
master.flushMasterStore();
}
- private String getInfoServerHostAndPort() {
- return "http://localhost:" +
CLUSTER.getActiveMaster().getInfoServer().getPort();
- }
-
- private static String getResponseBody(HttpURLConnection conn) throws
IOException {
- StringBuilder sb = new StringBuilder();
- BufferedReader br = new BufferedReader(new
InputStreamReader(conn.getInputStream()));
- String output;
- while ((output = br.readLine()) != null) {
- sb.append(output);
- }
- return sb.toString();
- }
-
private static void assertRegionServerLinks(HMaster master, String
responseBody) {
ServerManager serverManager = master.getServerManager();
List<ServerName> servers = serverManager.getOnlineServersList();
diff --git
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/http/TestRSStatusPage.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/http/TestRSStatusPage.java
index b4b7214b037..12b7e0d7857 100644
---
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/http/TestRSStatusPage.java
+++
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/http/TestRSStatusPage.java
@@ -20,10 +20,7 @@ package org.apache.hadoop.hbase.regionserver.http;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
-import java.io.BufferedReader;
import java.io.IOException;
-import java.io.InputStreamReader;
-import java.net.HttpURLConnection;
import java.net.URL;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
@@ -43,6 +40,7 @@ import org.apache.hadoop.hbase.master.ServerManager;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
import org.apache.hadoop.hbase.util.CommonFSUtils;
+import org.apache.hadoop.hbase.util.TestServerHttpUtils;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.ClassRule;
@@ -123,7 +121,8 @@ public class TestRSStatusPage {
String hostname = firstServerName.getHostname();
int port = firstServerName.getPort();
- String page = getRegionServerStatusPageContent(hostname, infoPort);
+ URL url = new URL("http://" + hostname + ":" + infoPort +
"/regionserver.jsp");
+ String page = TestServerHttpUtils.getPageContent(url,
"text/html;charset=utf-8");
assertTrue(page.contains("<title>HBase Region Server: " + masterHostname +
"</title>"));
@@ -153,26 +152,4 @@ public class TestRSStatusPage {
master.createTable(tableDescriptor2, null, 0, 0);
master.flushMasterStore();
}
-
- private static String getRegionServerStatusPageContent(String hostname, int
infoPort)
- throws IOException {
- URL url = new URL("http://" + hostname + ":" + infoPort +
"/regionserver.jsp");
- HttpURLConnection conn = (HttpURLConnection) url.openConnection();
- conn.connect();
-
- assertEquals(200, conn.getResponseCode());
- assertEquals("text/html;charset=utf-8", conn.getContentType());
-
- return getResponseBody(conn);
- }
-
- private static String getResponseBody(HttpURLConnection conn) throws
IOException {
- StringBuilder sb = new StringBuilder();
- BufferedReader br = new BufferedReader(new
InputStreamReader(conn.getInputStream()));
- String output;
- while ((output = br.readLine()) != null) {
- sb.append(output);
- }
- return sb.toString();
- }
}
diff --git
a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestServerHttpUtils.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestServerHttpUtils.java
new file mode 100644
index 00000000000..a69971e1f36
--- /dev/null
+++
b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestServerHttpUtils.java
@@ -0,0 +1,68 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.util;
+
+import static org.junit.Assert.assertEquals;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.net.HttpURLConnection;
+import java.net.URL;
+import org.apache.hadoop.hbase.LocalHBaseCluster;
+
+public class TestServerHttpUtils {
+ private static final String PLAIN_TEXT_UTF8 = "text/plain;charset=utf-8";
+
+ private TestServerHttpUtils() {
+ }
+
+ public static String getMasterInfoServerHostAndPort(LocalHBaseCluster
cluster) {
+ return "http://localhost:" +
cluster.getActiveMaster().getInfoServer().getPort();
+ }
+
+ public static String getPageContent(URL url, String mimeType) throws
IOException {
+ HttpURLConnection conn = (HttpURLConnection) url.openConnection();
+ conn.connect();
+
+ assertEquals(200, conn.getResponseCode());
+ assertEquals(mimeType, conn.getContentType());
+
+ return TestServerHttpUtils.getResponseBody(conn);
+ }
+
+ public static String getMasterPageContent(LocalHBaseCluster cluster) throws
IOException {
+ URL debugDumpUrl = new URL(getMasterInfoServerHostAndPort(cluster) +
"/dump");
+ return getPageContent(debugDumpUrl, PLAIN_TEXT_UTF8);
+ }
+
+ public static String getRegionServerPageContent(String hostName, int port)
throws IOException {
+ URL debugDumpUrl = new URL("http://" + hostName + ":" + port + "/dump");
+ return getPageContent(debugDumpUrl, PLAIN_TEXT_UTF8);
+ }
+
+ private static String getResponseBody(HttpURLConnection conn) throws
IOException {
+ StringBuilder sb = new StringBuilder();
+ BufferedReader br = new BufferedReader(new
InputStreamReader(conn.getInputStream()));
+ String output;
+ while ((output = br.readLine()) != null) {
+ sb.append(output);
+ }
+ return sb.toString();
+ }
+}