This is an automated email from the ASF dual-hosted git repository.
difin pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git
The following commit(s) were added to refs/heads/master by this push:
new faff3979a47 HIVE-28775: HiveServer2: Implement HA Health Check
endpoint on a separate port from WebUI (#5652) (Dmitriy Fingerman, reviewed by
Denys Kuzmenko)
faff3979a47 is described below
commit faff3979a47f846828c3560ceaee06d7f250cf3b
Author: Dmitriy Fingerman <[email protected]>
AuthorDate: Fri Mar 28 17:42:34 2025 -0400
HIVE-28775: HiveServer2: Implement HA Health Check endpoint on a separate
port from WebUI (#5652) (Dmitriy Fingerman, reviewed by Denys Kuzmenko)
* HIVE-28775: HiveServer2: Implement HA Health Check endpoint on a separate
port from WebUI
---------
Co-authored-by: Dmitriy Fingerman <[email protected]>
---
.../java/org/apache/hadoop/hive/conf/HiveConf.java | 2 +
.../src/java/org/apache/hive/http/HttpServer.java | 299 ++++++++++++++++-----
data/conf/hive-site.xml | 2 +-
data/conf/mr/hive-site.xml | 2 +-
.../org/apache/hive/jdbc/TestActivePassiveHA.java | 145 +++++++++-
.../hive/jdbc/miniHS2/AbstractHiveService.java | 8 +-
.../java/org/apache/hive/jdbc/miniHS2/MiniHS2.java | 3 +
.../apache/hive/service/server/HiveServer2.java | 191 +++++++------
...ershipStatus.java => HS2LeadershipManager.java} | 40 +--
.../hive/service/servlet/HS2LeadershipStatus.java | 80 +-----
.../org/apache/hive/service/servlet/HS2Peers.java | 2 +-
.../hive-webapps/ha-healthcheck/WEB-INF/web.xml | 10 +
12 files changed, 500 insertions(+), 284 deletions(-)
diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index 107a848e78c..6638e00829e 100644
--- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -4056,6 +4056,8 @@ public static enum ConfVars {
"hs2ActivePassiveHA",
"When HiveServer2 Active/Passive High Availability is enabled, uses this
namespace for registering HS2\n" +
"instances with zookeeper"),
+
HIVE_SERVER2_ACTIVE_PASSIVE_HA_HEALTHCHECK_PORT("hive.server2.active.passive.ha.healthcheck.port",
11002,
+ "The port the HiveServer2 ha-healthcheck web app will listen on"),
HIVE_SERVER2_TEZ_INTERACTIVE_QUEUE("hive.server2.tez.interactive.queue",
"",
"A single YARN queues to use for Hive Interactive sessions. When this
is specified,\n" +
"workload management is enabled and used for these sessions."),
diff --git a/common/src/java/org/apache/hive/http/HttpServer.java
b/common/src/java/org/apache/hive/http/HttpServer.java
index f884ff8df24..0499f16e305 100644
--- a/common/src/java/org/apache/hive/http/HttpServer.java
+++ b/common/src/java/org/apache/hive/http/HttpServer.java
@@ -82,15 +82,16 @@
import org.eclipse.jetty.security.ConstraintMapping;
import org.eclipse.jetty.security.ConstraintSecurityHandler;
import org.eclipse.jetty.security.LoginService;
-import org.eclipse.jetty.server.Connector;
import org.eclipse.jetty.server.Handler;
import org.eclipse.jetty.server.HttpConfiguration;
import org.eclipse.jetty.server.HttpConnectionFactory;
import org.eclipse.jetty.server.LowResourceMonitor;
+import org.eclipse.jetty.server.Request;
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.server.handler.ContextHandler.Context;
import org.eclipse.jetty.server.handler.ContextHandlerCollection;
import org.eclipse.jetty.server.ServerConnector;
+import org.eclipse.jetty.server.handler.HandlerCollection;
import org.eclipse.jetty.servlet.DefaultServlet;
import org.eclipse.jetty.servlet.FilterHolder;
import org.eclipse.jetty.servlet.FilterMapping;
@@ -135,9 +136,10 @@ public class HttpServer {
private final String name;
- private String appDir;
- private WebAppContext webAppContext;
+ private WebAppContext rootWebAppContext;
private Server webServer;
+ private QueuedThreadPool threadPool;
+ private PortHandlerWrapper portHandlerWrapper;
/**
* Create a status server on the given port.
@@ -179,6 +181,7 @@ public static class Builder {
new LinkedList<Pair<String, Class<? extends HttpServlet>>>();
private boolean disableDirListing = false;
private final Map<String, Pair<String, Filter>> globalFilters = new
LinkedHashMap<>();
+ private String contextPath = "/";
public Builder(String name) {
Preconditions.checkArgument(name != null && !name.isEmpty(), "Name must
be specified");
@@ -189,6 +192,10 @@ public HttpServer build() throws IOException {
return new HttpServer(this);
}
+ public void setContextPath(String contextPath) {
+ this.contextPath = contextPath;
+ }
+
public Builder setConf(HiveConf origConf) {
this.conf = new HiveConf(origConf);
origConf.stripHiddenConfigurations(conf);
@@ -489,13 +496,13 @@ static boolean userHasAdministratorAccess(ServletContext
servletContext,
/**
* Create the web context for the application of specified name
*/
- WebAppContext createWebAppContext(Builder b) {
+ WebAppContext createWebAppContext(Builder b) throws FileNotFoundException {
WebAppContext ctx = new WebAppContext();
setContextAttributes(ctx.getServletContext(), b.contextAttrs);
ctx.getServletContext().getSessionCookieConfig().setHttpOnly(true);
ctx.setDisplayName(b.name);
- ctx.setContextPath("/");
- ctx.setWar(appDir + "/" + b.name);
+ ctx.setContextPath(b.contextPath);
+ ctx.setWar(getWebAppsPath(b.name) + "/" + b.name);
return ctx;
}
@@ -519,8 +526,9 @@ void setupSpnegoFilter(Builder b, ServletContextHandler
ctx) throws IOException
/**
* Setup cross-origin requests (CORS) filter.
* @param b - builder
+ * @param webAppContext - webAppContext
*/
- private void setupCORSFilter(Builder b) {
+ private void setupCORSFilter(Builder b, WebAppContext webAppContext) {
FilterHolder holder = new FilterHolder();
holder.setClassName(CrossOriginFilter.class.getName());
Map<String, String> params = new HashMap<>();
@@ -534,9 +542,112 @@ private void setupCORSFilter(Builder b) {
}
/**
- * Create a channel connector for "http/https" requests
+ * Creates a port connector and initializes a web application that processes
requests through the newly
+ * created port connector.
+ *
+ * @param builder - The builder object used to configure and create the port
connector and web application.
+ * @return ContextHandlerCollection - A collection of request handlers
associated with the new port connector,
+ * which includes the newly initialized web application.
+ */
+ public ContextHandlerCollection createAndAddWebApp(Builder builder) throws
IOException {
+ WebAppContext webAppContext = createWebAppContext(builder);
+ initWebAppContext(builder, webAppContext);
+ RewriteHandler rwHandler = createRewriteHandler(builder, webAppContext);
+
+ ContextHandlerCollection portHandler = new ContextHandlerCollection();
+ portHandler.addHandler(rwHandler);
+
+ for (Pair<String, Class<? extends HttpServlet>> p : builder.servlets) {
+ addServlet(p.getKey(), "/" + p.getKey(), p.getValue(), webAppContext);
+ }
+
+ builder.globalFilters.forEach((k, v) ->
+ addFilter(k, v.getKey(), v.getValue(),
webAppContext.getServletHandler()));
+
+ // Associate the port handler with the a new connector and add it to the
server
+ ServerConnector connector =
createAndAddChannelConnector(threadPool.getQueueSize(), builder);
+ portHandlerWrapper.addHandler(connector, portHandler);
+
+ if (builder.contextPath.equals("/")) {
+ rootWebAppContext = webAppContext;
+ }
+
+ return portHandler;
+ }
+
+ /**
+ * Initializes the {@link WebAppContext} based on the provided configuration
in the {@link Builder}.
+ * The method sets up various filters and configurations for the web
application context, including
+ * security and cross-origin resource sharing (CORS) settings, as well as
header management.
+ *
+ * <p>The method performs the following actions based on the {@code builder}
configuration:</p>
+ * <ul>
+ * <li>If {@code builder.useSPNEGO} is {@code true}, sets up the SPNEGO
filter for Kerberos authentication.</li>
+ * <li>If {@code builder.enableCORS} is {@code true}, sets up the CORS
filter.</li>
+ * <li>If {@code builder.xFrameEnabled} is {@code true}, configures the
X-Frame-Options header filter.</li>
+ * <li>If {@code builder.disableDirListing} is {@code true}, disables
directory listing on the servlet.</li>
+ * </ul>
+ *
+ * @param builder The {@link Builder} object containing configuration
options to customize the web application context.
+ * @param webAppContext The {@link WebAppContext} to which the request will
be forwarded
+ * after the URI has been rewritten.
+ * @throws IOException If an I/O error occurs while initializing the web
application context.
+ */
+ private void initWebAppContext(Builder builder, WebAppContext webAppContext)
throws IOException {
+ if (builder.useSPNEGO) {
+ // Secure the web server with kerberos
+ setupSpnegoFilter(builder, webAppContext);
+ }
+
+ if (builder.enableCORS) {
+ setupCORSFilter(builder, webAppContext);
+ }
+
+ Map<String, String> xFrameParams = setHeaders();
+ if (builder.xFrameEnabled) {
+ setupXframeFilter(xFrameParams, webAppContext);
+ }
+
+ if (builder.disableDirListing) {
+ disableDirectoryListingOnServlet(webAppContext);
+ }
+ }
+
+ /**
+ * Creates and configures a {@link RewriteHandler} that rewrites incoming
request URIs
+ * based on predefined rules, and sets the specified {@link WebAppContext}
as the
+ * handler for the rewritten requests.
+ *
+ * <p>This method creates a {@link RewriteHandler} that rewrites requests to
the root path
+ * ("/") to a new target URI specified by the {@code
builder.contextRootRewriteTarget}.
+ * The URI rewrite is applied before forwarding the request to the given
{@link WebAppContext}.</p>
+ *
+ * @param builder The builder object containing configuration values, such
as the
+ * target for URI rewrite.
+ * @param webAppContext The {@link WebAppContext} to which the request will
be forwarded
+ * after the URI has been rewritten.
+ * @return A {@link RewriteHandler} configured with the rewrite rule and the
web application context.
+ */
+ private RewriteHandler createRewriteHandler(Builder builder, WebAppContext
webAppContext) {
+ RewriteHandler rwHandler = new RewriteHandler();
+ rwHandler.setRewriteRequestURI(true);
+ rwHandler.setRewritePathInfo(false);
+
+ RewriteRegexRule rootRule = new RewriteRegexRule();
+ rootRule.setRegex("^/$");
+ rootRule.setReplacement(builder.contextRootRewriteTarget);
+ rootRule.setTerminating(true);
+
+ rwHandler.addRule(rootRule);
+ rwHandler.setHandler(webAppContext);
+
+ return rwHandler;
+ }
+
+ /**
+ * Create a channel connector for "http/https" requests and add it to the
server
*/
- Connector createChannelConnector(int queueSize, Builder b) {
+ ServerConnector createAndAddChannelConnector(int queueSize, Builder b) {
ServerConnector connector;
final HttpConfiguration conf = new HttpConfiguration();
@@ -574,6 +685,8 @@ Connector createChannelConnector(int queueSize, Builder b) {
connector.setReuseAddress(true);
connector.setHost(b.host);
connector.setPort(b.port);
+
+ webServer.addConnector(connector);
return connector;
}
@@ -604,7 +717,7 @@ void setContextAttributes(Context ctx, Map<String, Object>
contextAttrs) {
private void createWebServer(final Builder b) throws IOException {
// Create the thread pool for the web server to handle HTTP requests
- QueuedThreadPool threadPool = new QueuedThreadPool();
+ threadPool = new QueuedThreadPool();
if (b.maxThreads > 0) {
threadPool.setMaxThreads(b.maxThreads);
}
@@ -612,66 +725,33 @@ private void createWebServer(final Builder b) throws
IOException {
threadPool.setName(b.name + "-web");
this.webServer = new Server(threadPool);
- this.appDir = getWebAppsPath(b.name);
- this.webAppContext = createWebAppContext(b);
-
- if (b.useSPNEGO) {
- // Secure the web server with kerberos
- setupSpnegoFilter(b, webAppContext);
- }
-
- if (b.enableCORS) {
- setupCORSFilter(b);
- }
-
- Map<String, String> xFrameParams = setHeaders();
- if (b.xFrameEnabled) {
- setupXframeFilter(b,xFrameParams);
- }
-
- if (b.disableDirListing) {
- disableDirectoryListingOnServlet(webAppContext);
- }
- initializeWebServer(b, threadPool.getMaxThreads());
+ initializeWebServer(b);
}
- private void initializeWebServer(final Builder b, int queueSize) throws
IOException {
+ private void initializeWebServer(final Builder b) throws IOException {
// Set handling for low resource conditions.
final LowResourceMonitor low = new LowResourceMonitor(webServer);
low.setLowResourcesIdleTimeout(10000);
webServer.addBean(low);
- Connector connector = createChannelConnector(queueSize, b);
- webServer.addConnector(connector);
-
- RewriteHandler rwHandler = new RewriteHandler();
- rwHandler.setRewriteRequestURI(true);
- rwHandler.setRewritePathInfo(false);
-
- RewriteRegexRule rootRule = new RewriteRegexRule();
- rootRule.setRegex("^/$");
- rootRule.setReplacement(b.contextRootRewriteTarget);
- rootRule.setTerminating(true);
-
- rwHandler.addRule(rootRule);
- rwHandler.setHandler(webAppContext);
-
- // Configure web application contexts for the web server
- ContextHandlerCollection contexts = new ContextHandlerCollection();
- contexts.addHandler(rwHandler);
- webServer.setHandler(contexts);
-
+ // Configure the global context handler
+ portHandlerWrapper = new PortHandlerWrapper();
+ webServer.setHandler(portHandlerWrapper);
if (b.usePAM) {
- setupPam(b, contexts);
+ setupPam(b, portHandlerWrapper);
}
+ // Configures the web server connector and port handler to listen on
+ // Also creates and adds the web application context to the server to
which the servlets will be added
+ ContextHandlerCollection portHandler = createAndAddWebApp(b);
addServlet("jmx", "/jmx", JMXJsonServlet.class);
addServlet("conf", "/conf", ConfServlet.class);
addServlet("stacks", "/stacks", StackServlet.class);
addServlet("conflog", "/conflog", Log4j2ConfiguratorServlet.class);
+
final String asyncProfilerHome = ProfileServlet.getAsyncProfilerHome();
if (asyncProfilerHome != null && !asyncProfilerHome.trim().isEmpty()) {
addServlet("prof", "/prof", ProfileServlet.class);
@@ -679,8 +759,7 @@ private void initializeWebServer(final Builder b, int
queueSize) throws IOExcept
if (Files.notExists(tmpDir)) {
Files.createDirectories(tmpDir);
}
- ServletContextHandler genCtx =
- new ServletContextHandler(contexts, "/prof-output");
+ ServletContextHandler genCtx = new ServletContextHandler(portHandler,
"/prof-output");
setContextAttributes(genCtx.getServletContext(), b.contextAttrs);
genCtx.addServlet(ProfileOutputServlet.class, "/*");
genCtx.setResourceBase(tmpDir.toAbsolutePath().toString());
@@ -688,24 +767,15 @@ private void initializeWebServer(final Builder b, int
queueSize) throws IOExcept
} else {
LOG.info("ASYNC_PROFILER_HOME env or -Dasync.profiler.home not
specified. Disabling /prof endpoint..");
}
-
- for (Pair<String, Class<? extends HttpServlet>> p : b.servlets) {
- addServlet(p.getFirst(), "/" + p.getFirst(), p.getSecond());
- }
-
- b.globalFilters.forEach((k, v) -> addFilter(k, v.getFirst(),
v.getSecond(), webAppContext.getServletHandler()));
-
- ServletContextHandler staticCtx =
- new ServletContextHandler(contexts, "/static");
- staticCtx.setResourceBase(appDir + "/static");
+ ServletContextHandler staticCtx = new ServletContextHandler(portHandler,
"/static");
+ staticCtx.setResourceBase(getWebAppsPath(b.name) + "/static");
staticCtx.addServlet(DefaultServlet.class, "/*");
staticCtx.setDisplayName("static");
disableDirectoryListingOnServlet(staticCtx);
String logDir = getLogDir(b.conf);
if (logDir != null) {
- ServletContextHandler logCtx =
- new ServletContextHandler(contexts, "/logs");
+ ServletContextHandler logCtx = new ServletContextHandler(portHandler,
"/logs");
setContextAttributes(logCtx.getServletContext(), b.contextAttrs);
if(b.useSPNEGO) {
setupSpnegoFilter(b,logCtx);
@@ -716,11 +786,11 @@ private void initializeWebServer(final Builder b, int
queueSize) throws IOExcept
}
// Define the global filers for each servlet context except the
staticCtx(css style).
- Optional<Handler[]> handlers = Optional.ofNullable(contexts.getHandlers());
+ Optional<Handler[]> handlers =
Optional.ofNullable(portHandler.getHandlers());
handlers.ifPresent(hs -> Arrays.stream(hs)
.filter(h -> h instanceof ServletContextHandler &&
!"static".equals(((ServletContextHandler) h).getDisplayName()))
.forEach(h -> b.globalFilters.forEach((k, v) ->
- addFilter(k, v.getFirst(), v.getSecond(), ((ServletContextHandler)
h).getServletHandler()))));
+ addFilter(k, v.getKey(), v.getValue(), ((ServletContextHandler)
h).getServletHandler()))));
}
private Map<String, String> setHeaders() {
@@ -748,7 +818,7 @@ private Map<String, String> getDefaultHeaders() {
return headers;
}
- private void setupXframeFilter(Builder b, Map<String, String> params) {
+ private void setupXframeFilter(Map<String, String> params, WebAppContext
webAppContext) {
FilterHolder holder = new FilterHolder();
holder.setClassName(QuotingInputFilter.class.getName());
holder.setInitParameters(params);
@@ -797,13 +867,24 @@ String getWebAppsPath(String appName) throws
FileNotFoundException {
}
/**
- * Add a servlet in the server.
+ * Add a servlet to the rootWebAppContext that is added to the webserver
during its initialization.
+ * @param name The name of the servlet (can be passed as null)
+ * @param pathSpec The path spec for the servlet
+ * @param clazz The servlet class
+ */
+ public void addServlet(String name, String pathSpec, Class<? extends
HttpServlet> clazz) {
+ addServlet(name, pathSpec, clazz, rootWebAppContext);
+ }
+
+ /**
+ * Add a servlet to the provided webAppContext
* @param name The name of the servlet (can be passed as null)
* @param pathSpec The path spec for the servlet
* @param clazz The servlet class
+ * @param webAppContext The webAppContext to which the servlet will be added
*/
- public void addServlet(String name, String pathSpec,
- Class<? extends HttpServlet> clazz) {
+ private void addServlet(String name, String pathSpec, Class<? extends
HttpServlet> clazz,
+ WebAppContext webAppContext) {
ServletHolder holder = new ServletHolder(clazz);
if (name != null) {
holder.setName(name);
@@ -811,7 +892,24 @@ public void addServlet(String name, String pathSpec,
webAppContext.addServlet(holder, pathSpec);
}
+ /**
+ * Add a servlet holder to the rootWebAppContext that is added to the
webserver during its initialization.
+ * @param name The name of the servlet (can be passed as null)
+ * @param pathSpec The path spec for the servlet
+ * @param holder The servlet holder to be added to the webAppContext
+ */
public void addServlet(String name, String pathSpec, ServletHolder holder) {
+ addServlet(name, pathSpec, holder, rootWebAppContext);
+ }
+
+ /**
+ * Add a servlet holder to the provided webAppContext
+ * @param name The name of the servlet (can be passed as null)
+ * @param pathSpec The path spec for the servlet
+ * @param holder The servlet holder to be added to the webAppContext
+ * @param webAppContext The webAppContext to which the servlet will be added
+ */
+ private void addServlet(String name, String pathSpec, ServletHolder holder,
WebAppContext webAppContext) {
if (name != null) {
holder.setName(name);
}
@@ -1026,4 +1124,59 @@ private void initHttpHeaderMap() {
}
}
+ /**
+ * A custom {@link ContextHandlerCollection} that maps server connectors
(ports) to specific handler collections.
+ * This class allows for the association of different handlers with
different ports, and ensures that requests
+ * are routed to the appropriate handler based on the port they came through.
+ *
+ * <p>The {@link PortHandlerWrapper} class overrides the {@link
ContextHandlerCollection#handle} method to
+ * select the appropriate handler based on the request's port and delegate
the request to that handler.
+ * </p>
+ *
+ * <p>This class uses a map to associate each {@link ServerConnector} (which
represents a port) to a
+ * {@link HandlerCollection}. The {@link #addHandler(ServerConnector,
HandlerCollection)} method allows handlers
+ * to be added for specific ports.</p>
+ */
+ static class PortHandlerWrapper extends ContextHandlerCollection {
+
+ /** Map of server connectors (ports) to their corresponding handler
collections. */
+ private final Map<ServerConnector, HandlerCollection>
connectorToHandlerMap = new HashMap<>();
+
+ /**
+ * Adds a handler collection to the {@link PortHandlerWrapper} for a
specific server connector (port).
+ *
+ * @param connector the {@link ServerConnector} representing the port to
which the handler should be associated
+ * @param handler the {@link HandlerCollection} that will handle requests
on the specified port
+ */
+ public void addHandler(ServerConnector connector, HandlerCollection
handler) {
+ connectorToHandlerMap.put(connector, handler);
+ addHandler(handler);
+ }
+
+ /**
+ * Handles the HTTP request by determining which port the request came
through and routing it to the appropriate handler.
+ *
+ * @param target the target of the request
+ * @param baseRequest the base request object
+ * @param request the {@link HttpServletRequest} object containing the
request details
+ * @param response the {@link HttpServletResponse} object to send the
response
+ * @throws IOException if an input or output exception occurs during the
handling of the request
+ * @throws ServletException if a servlet-specific exception occurs during
the handling of the request
+ */
+ @Override
+ public void handle(String target, Request baseRequest, HttpServletRequest
request, HttpServletResponse response) throws IOException, ServletException {
+ // Determine the connector (port) the request came through
+ int port = request.getServerPort();
+
+ // Find the handler for the corresponding port
+ Handler handler = connectorToHandlerMap.entrySet().stream()
+ .filter(entry -> entry.getKey().getPort() == port)
+ .map(Map.Entry::getValue)
+ .findFirst()
+ .orElseThrow(() -> new IllegalStateException("No handler found for
port " + port));
+
+ // Delegate the request to the handler
+ handler.handle(target, baseRequest, request, response);
+ }
+ }
}
diff --git a/data/conf/hive-site.xml b/data/conf/hive-site.xml
index 94f9d036542..e9b0480ab0c 100644
--- a/data/conf/hive-site.xml
+++ b/data/conf/hive-site.xml
@@ -358,7 +358,7 @@
<property>
<name>hive.server2.webui.max.threads</name>
- <value>4</value>
+ <value>6</value>
</property>
<property>
diff --git a/data/conf/mr/hive-site.xml b/data/conf/mr/hive-site.xml
index 8cd1d6a0805..06eb0117a08 100644
--- a/data/conf/mr/hive-site.xml
+++ b/data/conf/mr/hive-site.xml
@@ -354,7 +354,7 @@
<property>
<name>hive.server2.webui.max.threads</name>
- <value>4</value>
+ <value>6</value>
</property>
<property>
diff --git
a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestActivePassiveHA.java
b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestActivePassiveHA.java
index 0b4daf8d16e..6879c76be3b 100644
---
a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestActivePassiveHA.java
+++
b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestActivePassiveHA.java
@@ -139,6 +139,80 @@ private static void setHAConfigs(Configuration conf) {
conf.setInt(ConfVars.HIVE_ZOOKEEPER_CONNECTION_MAX_RETRIES.varname, 1);
}
+ @Test(timeout = 60000)
+ public void testHealthCheck() throws Exception {
+ String instanceId1 = UUID.randomUUID().toString();
+ miniHS2_1.start(getConfOverlay(instanceId1));
+
+ String leaderURL = "http://localhost:" +
+ hiveConf1.get(ConfVars.HIVE_SERVER2_WEBUI_PORT.varname) + "/leader";
+ String healthCheckURL = "http://localhost:" +
+
hiveConf1.get(ConfVars.HIVE_SERVER2_ACTIVE_PASSIVE_HA_HEALTHCHECK_PORT.varname)
+ "/ha-healthcheck/health-ha";
+
+ String leaderURLHealthCheckPort = "http://localhost:" +
+
hiveConf1.get(ConfVars.HIVE_SERVER2_ACTIVE_PASSIVE_HA_HEALTHCHECK_PORT.varname)
+ "/leader";
+ String healthCheckURLWebUIPort = "http://localhost:" +
+ hiveConf1.get(ConfVars.HIVE_SERVER2_WEBUI_PORT.varname) +
"/ha-healthcheck/health-ha";
+
+ assertEquals(true, miniHS2_1.getIsLeaderTestFuture().get());
+ assertEquals(true, miniHS2_1.isLeader());
+
+ assertEquals("true", sendGet(leaderURL));
+ assertEquals("true", sendGet(healthCheckURL));
+
+ assertEquals("Not Found", sendGet(leaderURLHealthCheckPort));
+ assertEquals("Not Found", sendGet(healthCheckURLWebUIPort));
+ }
+
+ @Test(timeout = 60000)
+ public void testHealthCheckAuth() throws Exception {
+ hiveConf1.setBoolVar(ConfVars.HIVE_SERVER2_WEBUI_ENABLE_CORS, true);
+ setPamConfs(hiveConf1);
+ PamAuthenticator pamAuthenticator = new
TestHS2HttpServerPam.TestPamAuthenticator(hiveConf1);
+ String instanceId1 = UUID.randomUUID().toString();
+ miniHS2_1.setPamAuthenticator(pamAuthenticator);
+ miniHS2_1.start(getSecureConfOverlay(instanceId1));
+
+ String leaderURL = "http://localhost:" +
+ hiveConf1.get(ConfVars.HIVE_SERVER2_WEBUI_PORT.varname) + "/leader";
+ String healthCheckURL = "http://localhost:" +
+
hiveConf1.get(ConfVars.HIVE_SERVER2_ACTIVE_PASSIVE_HA_HEALTHCHECK_PORT.varname)
+ "/ha-healthcheck/health-ha";
+
+ String leaderURLHealthCheckPort = "http://localhost:" +
+
hiveConf1.get(ConfVars.HIVE_SERVER2_ACTIVE_PASSIVE_HA_HEALTHCHECK_PORT.varname)
+ "/leader";
+ String healthCheckURLWebUIPort = "http://localhost:" +
+ hiveConf1.get(ConfVars.HIVE_SERVER2_WEBUI_PORT.varname) +
"/ha-healthcheck/health-ha";
+
+ assertEquals(true, miniHS2_1.getIsLeaderTestFuture().get());
+ assertEquals(true, miniHS2_1.isLeader());
+
+ assertEquals("true", sendGet(leaderURL, true, true));
+ assertEquals("true", sendGet(healthCheckURL, true, true));
+
+ try {
+ sendGet(leaderURLHealthCheckPort, true, true);
+ } catch (AssertionError e) {
+ assertTrue(e.getMessage().contains("Not Found"));
+ } catch (Exception e) {
+ fail("Expected AssertionError");
+ }
+
+ assertEquals("Not Found", sendGet(healthCheckURLWebUIPort, true, true));
+ assertEquals("Method Not Allowed", sendDelete(healthCheckURL, true, true));
+ assertEquals("Method Not Allowed", sendDelete(healthCheckURLWebUIPort,
true, true));
+
+ try {
+ sendDelete(leaderURLHealthCheckPort, true, true);
+ } catch (AssertionError e) {
+ assertTrue(e.getMessage().contains("Not Found"));
+ } catch (Exception e) {
+ fail("Expected AssertionError");
+ }
+
+ String resp = sendDelete(leaderURL, true, true);
+ assertTrue(resp.contains("Failover successful!"));
+ }
+
@Test(timeout = 60000)
public void testActivePassiveHA() throws Exception {
String instanceId1 = UUID.randomUUID().toString();
@@ -148,12 +222,20 @@ public void testActivePassiveHA() throws Exception {
assertEquals(true, miniHS2_1.getIsLeaderTestFuture().get());
assertEquals(true, miniHS2_1.isLeader());
- String url = "http://localhost:" +
hiveConf1.get(ConfVars.HIVE_SERVER2_WEBUI_PORT.varname) + "/leader";
+ String url = "http://localhost:" +
+ hiveConf1.get(ConfVars.HIVE_SERVER2_WEBUI_PORT.varname) + "/leader";
assertEquals("true", sendGet(url));
+ String healthCheckURL = "http://localhost:" +
+
hiveConf1.get(ConfVars.HIVE_SERVER2_ACTIVE_PASSIVE_HA_HEALTHCHECK_PORT.varname)
+ "/ha-healthcheck/health-ha";
+ assertEquals("true", sendGet(healthCheckURL));
assertEquals(false, miniHS2_2.isLeader());
- url = "http://localhost:" +
hiveConf2.get(ConfVars.HIVE_SERVER2_WEBUI_PORT.varname) + "/leader";
+ url = "http://localhost:" +
+ hiveConf2.get(ConfVars.HIVE_SERVER2_WEBUI_PORT.varname) + "/leader";
assertEquals("false", sendGet(url));
+ healthCheckURL = "http://localhost:" +
+
hiveConf2.get(ConfVars.HIVE_SERVER2_ACTIVE_PASSIVE_HA_HEALTHCHECK_PORT.varname)
+ "/ha-healthcheck/health-ha";
+ assertEquals("false", sendGet(healthCheckURL));
url = "http://localhost:" +
hiveConf1.get(ConfVars.HIVE_SERVER2_WEBUI_PORT.varname) + "/peers";
String resp = sendGet(url);
@@ -190,8 +272,12 @@ public void testActivePassiveHA() throws Exception {
assertEquals(true, miniHS2_2.getIsLeaderTestFuture().get());
assertEquals(true, miniHS2_2.isLeader());
- url = "http://localhost:" +
hiveConf2.get(ConfVars.HIVE_SERVER2_WEBUI_PORT.varname) + "/leader";
+ url = "http://localhost:" +
+ hiveConf2.get(ConfVars.HIVE_SERVER2_WEBUI_PORT.varname) + "/leader";
assertEquals("true", sendGet(url));
+ healthCheckURL = "http://localhost:" +
+
hiveConf2.get(ConfVars.HIVE_SERVER2_ACTIVE_PASSIVE_HA_HEALTHCHECK_PORT.varname)
+ "/ha-healthcheck/health-ha";
+ assertEquals("true", sendGet(healthCheckURL));
while (client.getAll().size() != 1) {
Thread.sleep(100);
@@ -231,8 +317,12 @@ public void testActivePassiveHA() throws Exception {
miniHS2_1.start(getConfOverlay(instanceId1));
assertEquals(false, miniHS2_1.isLeader());
- url = "http://localhost:" +
hiveConf1.get(ConfVars.HIVE_SERVER2_WEBUI_PORT.varname) + "/leader";
+ url = "http://localhost:" +
+ hiveConf1.get(ConfVars.HIVE_SERVER2_WEBUI_PORT.varname) + "/leader";
assertEquals("false", sendGet(url));
+ healthCheckURL = "http://localhost:" +
+
hiveConf1.get(ConfVars.HIVE_SERVER2_ACTIVE_PASSIVE_HA_HEALTHCHECK_PORT.varname)
+ "/ha-healthcheck/health-ha";
+ assertEquals("false", sendGet(healthCheckURL));
while (client.getAll().size() != 2) {
Thread.sleep(100);
@@ -280,12 +370,20 @@ public void
testConnectionActivePassiveHAServiceDiscovery() throws Exception {
assertEquals(true, miniHS2_1.getIsLeaderTestFuture().get());
assertEquals(true, miniHS2_1.isLeader());
- String url = "http://localhost:" +
hiveConf1.get(ConfVars.HIVE_SERVER2_WEBUI_PORT.varname) + "/leader";
+ String url = "http://localhost:" +
+ hiveConf1.get(ConfVars.HIVE_SERVER2_WEBUI_PORT.varname) + "/leader";
assertEquals("true", sendGet(url));
+ String healthCheckURL = "http://localhost:" +
+
hiveConf1.get(ConfVars.HIVE_SERVER2_ACTIVE_PASSIVE_HA_HEALTHCHECK_PORT.varname)
+ "/ha-healthcheck/health-ha";
+ assertEquals("true", sendGet(healthCheckURL));
assertEquals(false, miniHS2_2.isLeader());
- url = "http://localhost:" +
hiveConf2.get(ConfVars.HIVE_SERVER2_WEBUI_PORT.varname) + "/leader";
+ url = "http://localhost:" +
+ hiveConf2.get(ConfVars.HIVE_SERVER2_WEBUI_PORT.varname) + "/leader";
assertEquals("false", sendGet(url));
+ healthCheckURL = "http://localhost:" +
+
hiveConf2.get(ConfVars.HIVE_SERVER2_ACTIVE_PASSIVE_HA_HEALTHCHECK_PORT.varname)
+ "/ha-healthcheck/health-ha";
+ assertEquals("false", sendGet(healthCheckURL));
// miniHS2_1 will be leader
String zkConnectString = zkServer.getConnectString();
@@ -347,11 +445,16 @@ public void testManualFailover() throws Exception {
miniHS2_2.start(confOverlay);
String url1 = "http://localhost:" +
hiveConf1.get(ConfVars.HIVE_SERVER2_WEBUI_PORT.varname) + "/leader";
String url2 = "http://localhost:" +
hiveConf2.get(ConfVars.HIVE_SERVER2_WEBUI_PORT.varname) + "/leader";
+ String healthCheckURL1 = "http://localhost:" +
+
hiveConf1.get(ConfVars.HIVE_SERVER2_ACTIVE_PASSIVE_HA_HEALTHCHECK_PORT.varname)
+ "/ha-healthcheck/health-ha";
+ String healthCheckURL2 = "http://localhost:" +
+
hiveConf2.get(ConfVars.HIVE_SERVER2_ACTIVE_PASSIVE_HA_HEALTHCHECK_PORT.varname)
+ "/ha-healthcheck/health-ha";
// when we start miniHS2_1 will be leader (sequential start)
assertEquals(true, miniHS2_1.getIsLeaderTestFuture().get());
assertEquals(true, miniHS2_1.isLeader());
assertEquals("true", sendGet(url1, true, true));
+ assertEquals("true", sendGet(healthCheckURL1, true, true));
// trigger failover on miniHS2_1
String resp = sendDelete(url1, true, true);
@@ -361,11 +464,13 @@ public void testManualFailover() throws Exception {
assertEquals(true, miniHS2_1.getNotLeaderTestFuture().get());
assertEquals(false, miniHS2_1.isLeader());
assertEquals("false", sendGet(url1, true, true));
+ assertEquals("false", sendGet(healthCheckURL1, true, true));
// make sure miniHS2_2 is the new leader
assertEquals(true, miniHS2_2.getIsLeaderTestFuture().get());
assertEquals(true, miniHS2_2.isLeader());
assertEquals("true", sendGet(url2, true, true));
+ assertEquals("true", sendGet(healthCheckURL2, true, true));
// send failover request again to miniHS2_1 and get a failure
resp = sendDelete(url1, true, true);
@@ -379,8 +484,10 @@ public void testManualFailover() throws Exception {
assertEquals(true, miniHS2_1.getIsLeaderTestFuture().get());
assertEquals(true, miniHS2_1.isLeader());
assertEquals("true", sendGet(url1, true, true));
+ assertEquals("true", sendGet(healthCheckURL1, true, true));
assertEquals(true, miniHS2_2.getNotLeaderTestFuture().get());
assertEquals("false", sendGet(url2, true, true));
+ assertEquals("false", sendGet(healthCheckURL2, true, true));
assertEquals(false, miniHS2_2.isLeader());
} finally {
resetFailoverConfs();
@@ -403,11 +510,15 @@ public void testManualFailoverUnauthorized() throws
Exception {
confOverlay.put(ConfVars.HIVE_SERVER2_THRIFT_HTTP_PATH.varname,
"clidriverTest");
miniHS2_2.start(confOverlay);
- String url1 = "http://localhost:" +
hiveConf1.get(ConfVars.HIVE_SERVER2_WEBUI_PORT.varname) + "/leader";
+ String url1 = "http://localhost:" +
+ hiveConf1.get(ConfVars.HIVE_SERVER2_WEBUI_PORT.varname) + "/leader";
+ String healthCheckURL1 = "http://localhost:" +
+
hiveConf1.get(ConfVars.HIVE_SERVER2_ACTIVE_PASSIVE_HA_HEALTHCHECK_PORT.varname)
+ "/ha-healthcheck/health-ha";
// when we start miniHS2_1 will be leader (sequential start)
assertEquals(true, miniHS2_1.getIsLeaderTestFuture().get());
assertEquals(true, miniHS2_1.isLeader());
assertEquals("true", sendGet(url1, true));
+ assertEquals("true", sendGet(healthCheckURL1, true));
// trigger failover on miniHS2_1 without authorization header
assertTrue(sendDelete(url1, false).contains("Unauthorized"));
@@ -438,7 +549,10 @@ public void testNoConnectionOnPassive() throws Exception {
Map<String, String> confOverlay = getSecureConfOverlay(instanceId2);
miniHS2_2.setPamAuthenticator(pamAuthenticator2);
miniHS2_2.start(confOverlay);
- String url1 = "http://localhost:" +
hiveConf1.get(ConfVars.HIVE_SERVER2_WEBUI_PORT.varname) + "/leader";
+ String url1 = "http://localhost:" +
+ hiveConf1.get(ConfVars.HIVE_SERVER2_WEBUI_PORT.varname) + "/leader";
+ String healthCheckURL1 = "http://localhost:" +
+
hiveConf1.get(ConfVars.HIVE_SERVER2_ACTIVE_PASSIVE_HA_HEALTHCHECK_PORT.varname)
+ "/ha-healthcheck/health-ha";
assertEquals(true, miniHS2_1.getIsLeaderTestFuture().get());
assertEquals(true, miniHS2_1.isLeader());
@@ -454,6 +568,9 @@ public void testNoConnectionOnPassive() throws Exception {
Thread.sleep(100);
}
+ resp = sendDelete(healthCheckURL1, true);
+ assertTrue(resp, resp.contains("Method Not Allowed"));
+
assertEquals(true, miniHS2_2.getIsLeaderTestFuture().get());
assertEquals(true, miniHS2_2.isLeader());
@@ -496,6 +613,10 @@ public void testClientConnectionsOnFailover() throws
Exception {
miniHS2_2.start(confOverlay);
String url1 = "http://localhost:" +
hiveConf1.get(ConfVars.HIVE_SERVER2_WEBUI_PORT.varname) + "/leader";
String url2 = "http://localhost:" +
hiveConf2.get(ConfVars.HIVE_SERVER2_WEBUI_PORT.varname) + "/leader";
+ String healthCheckUrl1 = "http://localhost:" +
+
hiveConf1.get(ConfVars.HIVE_SERVER2_ACTIVE_PASSIVE_HA_HEALTHCHECK_PORT.varname)
+ "/ha-healthcheck/health-ha";
+ String healthCheckUrl2 = "http://localhost:" +
+
hiveConf2.get(ConfVars.HIVE_SERVER2_ACTIVE_PASSIVE_HA_HEALTHCHECK_PORT.varname)
+ "/ha-healthcheck/health-ha";
String zkJdbcUrl = miniHS2_1.getJdbcURL();
String zkConnectString = zkServer.getConnectString();
assertTrue(zkJdbcUrl.contains(zkConnectString));
@@ -504,6 +625,7 @@ public void testClientConnectionsOnFailover() throws
Exception {
assertEquals(true, miniHS2_1.getIsLeaderTestFuture().get());
assertEquals(true, miniHS2_1.isLeader());
assertEquals("true", sendGet(url1, true));
+ assertEquals("true", sendGet(healthCheckUrl1, true));
// before failover, check if we are getting connection from miniHS2_1
String hs2_1_directUrl = "jdbc:hive2://" + miniHS2_1.getHost() + ":" +
miniHS2_1.getBinaryPort() +
@@ -523,15 +645,20 @@ public void testClientConnectionsOnFailover() throws
Exception {
Thread.sleep(100);
}
+ resp = sendDelete(healthCheckUrl1, true);
+ assertTrue(resp.contains("Method Not Allowed"));
+
// make sure miniHS2_1 is not leader
assertEquals(true, miniHS2_1.getNotLeaderTestFuture().get());
assertEquals(false, miniHS2_1.isLeader());
assertEquals("false", sendGet(url1, true));
+ assertEquals("false", sendGet(healthCheckUrl1, true));
// make sure miniHS2_2 is the new leader
assertEquals(true, miniHS2_2.getIsLeaderTestFuture().get());
assertEquals(true, miniHS2_2.isLeader());
assertEquals("true", sendGet(url2, true));
+ assertEquals("true", sendGet(healthCheckUrl2, true));
// when we make a new connection we should get it from miniHS2_2 this
time
String hs2_2_directUrl = "jdbc:hive2://" + miniHS2_2.getHost() + ":" +
miniHS2_2.getHttpPort() +
@@ -555,8 +682,10 @@ public void testClientConnectionsOnFailover() throws
Exception {
assertEquals(true, miniHS2_1.getIsLeaderTestFuture().get());
assertEquals(true, miniHS2_1.isLeader());
assertEquals("true", sendGet(url1, true));
+ assertEquals("true", sendGet(healthCheckUrl1, true));
assertEquals(true, miniHS2_2.getNotLeaderTestFuture().get());
assertEquals("false", sendGet(url2, true));
+ assertEquals("false", sendGet(healthCheckUrl2, true));
assertEquals(false, miniHS2_2.isLeader());
// make sure miniHS2_2 closes all its connections
while (miniHS2_2.getOpenSessionsCount() != 0) {
diff --git
a/itests/util/src/main/java/org/apache/hive/jdbc/miniHS2/AbstractHiveService.java
b/itests/util/src/main/java/org/apache/hive/jdbc/miniHS2/AbstractHiveService.java
index 069d58c6a0b..f83cc453bcf 100644
---
a/itests/util/src/main/java/org/apache/hive/jdbc/miniHS2/AbstractHiveService.java
+++
b/itests/util/src/main/java/org/apache/hive/jdbc/miniHS2/AbstractHiveService.java
@@ -36,15 +36,17 @@ public abstract class AbstractHiveService {
private int binaryPort;
private int httpPort;
private int webPort;
+ private int healthCheckHAPort;
private boolean startedHiveService = false;
private List<String> addedProperties = new ArrayList<String>();
- public AbstractHiveService(HiveConf hiveConf, String hostname, int
binaryPort, int httpPort, int webPort) {
+ AbstractHiveService(HiveConf hiveConf, String hostname, int binaryPort, int
httpPort, int webPort, int healthCheckHAPort) {
this.hiveConf = hiveConf;
this.hostname = hostname;
this.binaryPort = binaryPort;
this.httpPort = httpPort;
this.webPort = webPort;
+ this.healthCheckHAPort = healthCheckHAPort;
}
/**
@@ -142,6 +144,10 @@ public int getWebPort() {
return webPort;
}
+ public int getHealthCheckHAPort() {
+ return healthCheckHAPort;
+ }
+
public boolean isStarted() {
return startedHiveService;
}
diff --git
a/itests/util/src/main/java/org/apache/hive/jdbc/miniHS2/MiniHS2.java
b/itests/util/src/main/java/org/apache/hive/jdbc/miniHS2/MiniHS2.java
index 27fdad30a7f..166ab90f642 100644
--- a/itests/util/src/main/java/org/apache/hive/jdbc/miniHS2/MiniHS2.java
+++ b/itests/util/src/main/java/org/apache/hive/jdbc/miniHS2/MiniHS2.java
@@ -292,6 +292,8 @@ private MiniHS2(HiveConf hiveConf, MiniClusterType
miniClusterType, boolean useM
(usePortsFromConf ?
hiveConf.getIntVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_HTTP_PORT) :
MetaStoreTestUtils
.findFreePort()),
(usePortsFromConf ?
hiveConf.getIntVar(ConfVars.HIVE_SERVER2_WEBUI_PORT) : MetaStoreTestUtils
+ .findFreePort()),
+ (usePortsFromConf ?
hiveConf.getIntVar(ConfVars.HIVE_SERVER2_ACTIVE_PASSIVE_HA_HEALTHCHECK_PORT) :
MetaStoreTestUtils
.findFreePort()));
hiveConf.setLongVar(ConfVars.HIVE_SERVER2_MAX_START_ATTEMPTS, 3l);
hiveConf.setTimeVar(ConfVars.HIVE_SERVER2_SLEEP_INTERVAL_BETWEEN_START_ATTEMPTS,
10,
@@ -379,6 +381,7 @@ private MiniHS2(HiveConf hiveConf, MiniClusterType
miniClusterType, boolean useM
hiveConf.setIntVar(ConfVars.HIVE_SERVER2_THRIFT_PORT, getBinaryPort());
hiveConf.setIntVar(ConfVars.HIVE_SERVER2_THRIFT_HTTP_PORT, getHttpPort());
hiveConf.setIntVar(ConfVars.HIVE_SERVER2_WEBUI_PORT, getWebPort());
+
hiveConf.setIntVar(ConfVars.HIVE_SERVER2_ACTIVE_PASSIVE_HA_HEALTHCHECK_PORT,
getHealthCheckHAPort());
Path scratchDir = new Path(baseFsDir, "scratch");
// Create root scratchdir with write all, so that user impersonation has
no issues.
diff --git a/service/src/java/org/apache/hive/service/server/HiveServer2.java
b/service/src/java/org/apache/hive/service/server/HiveServer2.java
index 8c567c31616..8477e324b1e 100644
--- a/service/src/java/org/apache/hive/service/server/HiveServer2.java
+++ b/service/src/java/org/apache/hive/service/server/HiveServer2.java
@@ -121,6 +121,7 @@
import org.apache.hive.service.cli.thrift.ThriftCLIService;
import org.apache.hive.service.cli.thrift.ThriftHttpCLIService;
import org.apache.hive.service.servlet.HS2LeadershipStatus;
+import org.apache.hive.service.servlet.HS2LeadershipManager;
import org.apache.hive.service.servlet.HS2Peers;
import org.apache.hive.service.servlet.LDAPAuthenticationFilter;
import org.apache.hive.service.servlet.LoginServlet;
@@ -404,91 +405,11 @@ public synchronized void init(HiveConf hiveConf) {
// Set the default JspFactory to avoid NPE while opening the home
page
JspFactory.setDefaultFactory(new
org.apache.jasper.runtime.JspFactoryImpl());
}
- HttpServer.Builder builder = new HttpServer.Builder("hiveserver2");
- builder.setPort(webUIPort).setConf(hiveConf);
- builder.setHost(webHost);
- builder.setMaxThreads(
- hiveConf.getIntVar(ConfVars.HIVE_SERVER2_WEBUI_MAX_THREADS));
- builder.setAdmins(hiveConf.getVar(ConfVars.USERS_IN_ADMIN_ROLE));
- // SessionManager is initialized
- builder.setContextAttribute("hive.sm",
- cliService.getSessionManager());
- hiveConf.set("startcode",
- String.valueOf(System.currentTimeMillis()));
- if (hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_WEBUI_USE_SSL)) {
- String keyStorePath = hiveConf.getVar(
- ConfVars.HIVE_SERVER2_WEBUI_SSL_KEYSTORE_PATH);
- if (StringUtils.isBlank(keyStorePath)) {
- throw new IllegalArgumentException(
- ConfVars.HIVE_SERVER2_WEBUI_SSL_KEYSTORE_PATH.varname
- + " Not configured for SSL connection");
- }
-
builder.setKeyStorePassword(ShimLoader.getHadoopShims().getPassword(
- hiveConf,
ConfVars.HIVE_SERVER2_WEBUI_SSL_KEYSTORE_PASSWORD.varname));
- builder.setKeyStorePath(keyStorePath);
-
builder.setKeyStoreType(hiveConf.getVar(ConfVars.HIVE_SERVER2_WEBUI_SSL_KEYSTORE_TYPE));
- builder.setKeyManagerFactoryAlgorithm(
-
hiveConf.getVar(ConfVars.HIVE_SERVER2_WEBUI_SSL_KEYMANAGERFACTORY_ALGORITHM));
- builder.setExcludeCiphersuites(
-
hiveConf.getVar(ConfVars.HIVE_SERVER2_WEBUI_SSL_EXCLUDE_CIPHERSUITES));
- builder.setUseSSL(true);
- }
- if (hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_WEBUI_USE_SPNEGO)) {
- String spnegoPrincipal = hiveConf.getVar(
- ConfVars.HIVE_SERVER2_WEBUI_SPNEGO_PRINCIPAL);
- String spnegoKeytab = hiveConf.getVar(
- ConfVars.HIVE_SERVER2_WEBUI_SPNEGO_KEYTAB);
- if (StringUtils.isBlank(spnegoPrincipal) ||
StringUtils.isBlank(spnegoKeytab)) {
- throw new IllegalArgumentException(
- ConfVars.HIVE_SERVER2_WEBUI_SPNEGO_PRINCIPAL.varname
- + "/" + ConfVars.HIVE_SERVER2_WEBUI_SPNEGO_KEYTAB.varname
- + " Not configured for SPNEGO authentication");
- }
- builder.setSPNEGOPrincipal(spnegoPrincipal);
- builder.setSPNEGOKeytab(spnegoKeytab);
- builder.setUseSPNEGO(true);
- }
- if (hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_WEBUI_ENABLE_CORS)) {
- builder.setEnableCORS(true);
- String allowedOrigins =
hiveConf.getVar(ConfVars.HIVE_SERVER2_WEBUI_CORS_ALLOWED_ORIGINS);
- String allowedMethods =
hiveConf.getVar(ConfVars.HIVE_SERVER2_WEBUI_CORS_ALLOWED_METHODS);
- String allowedHeaders =
hiveConf.getVar(ConfVars.HIVE_SERVER2_WEBUI_CORS_ALLOWED_HEADERS);
- if (StringUtils.isBlank(allowedOrigins) ||
StringUtils.isBlank(allowedMethods) || StringUtils.isBlank(allowedHeaders)) {
- throw new IllegalArgumentException("CORS enabled. But " +
- ConfVars.HIVE_SERVER2_WEBUI_CORS_ALLOWED_ORIGINS.varname + "/"
+
- ConfVars.HIVE_SERVER2_WEBUI_CORS_ALLOWED_METHODS.varname + "/"
+
- ConfVars.HIVE_SERVER2_WEBUI_CORS_ALLOWED_HEADERS.varname + "/"
+
- " is not configured");
- }
- builder.setAllowedOrigins(allowedOrigins);
- builder.setAllowedMethods(allowedMethods);
- builder.setAllowedHeaders(allowedHeaders);
- LOG.info("CORS enabled - allowed-origins: {} allowed-methods: {}
allowed-headers: {}", allowedOrigins,
- allowedMethods, allowedHeaders);
- }
- if(hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_WEBUI_XFRAME_ENABLED)){
-
builder.configureXFrame(true).setXFrameOption(hiveConf.getVar(ConfVars.HIVE_SERVER2_WEBUI_XFRAME_VALUE));
- }
- if (hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_WEBUI_USE_PAM)) {
- if (hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_WEBUI_USE_SSL)) {
- String hiveServer2PamServices =
hiveConf.getVar(ConfVars.HIVE_SERVER2_PAM_SERVICES);
- if (hiveServer2PamServices == null ||
hiveServer2PamServices.isEmpty()) {
- throw new
IllegalArgumentException(ConfVars.HIVE_SERVER2_PAM_SERVICES.varname + " are not
configured.");
- }
- builder.setPAMAuthenticator(pamAuthenticator == null ? new
PamAuthenticator(hiveConf) : pamAuthenticator);
- builder.setUsePAM(true);
- } else if (hiveConf.getBoolVar(ConfVars.HIVE_IN_TEST)) {
- builder.setPAMAuthenticator(pamAuthenticator == null ? new
PamAuthenticator(hiveConf) : pamAuthenticator);
- builder.setUsePAM(true);
- } else {
- throw new
IllegalArgumentException(ConfVars.HIVE_SERVER2_WEBUI_USE_SSL.varname + " has
false value. It is recommended to set to true when PAM is used.");
- }
- }
+ HttpServer.Builder builder = createHttpServerBuilder(webHost,
webUIPort, "hiveserver2", "/",
+ hiveConf, cliService, pamAuthenticator);
if (serviceDiscovery && activePassiveHA) {
- builder.setContextAttribute("hs2.isLeader", isLeader);
- builder.setContextAttribute("hs2.failover.callback", new
FailoverHandlerCallback(hs2HARegistry));
- builder.setContextAttribute("hiveconf", hiveConf);
- builder.addServlet("leader", HS2LeadershipStatus.class);
+ addHAContextAttributes(builder, hiveConf);
+ builder.addServlet("leader", HS2LeadershipManager.class);
builder.addServlet("peers", HS2Peers.class);
}
builder.addServlet("llap", LlapServlet.class);
@@ -507,10 +428,11 @@ public synchronized void init(HiveConf hiveConf) {
if (ldapAuthService != null) {
webServer.addServlet("login", "/login", new ServletHolder(new
LoginServlet(ldapAuthService)));
}
+ initHAHealthChecker(webServer, hiveConf);
}
}
- } catch (IOException ie) {
- throw new ServiceException(ie);
+ } catch (IOException e) {
+ throw new ServiceException(e);
}
long otelExporterFrequency =
@@ -536,6 +458,103 @@ public synchronized void init(HiveConf hiveConf) {
// Extra time for releasing the resources if timeout sets to 0
ShutdownHookManager.addGracefulShutDownHook(() -> graceful_stop(),
timeout == 0 ? 30 : timeout);
}
+
+ private void addHAContextAttributes(HttpServer.Builder builder, HiveConf
hiveConf) {
+ builder.setContextAttribute("hs2.isLeader", isLeader);
+ builder.setContextAttribute("hs2.failover.callback", new
FailoverHandlerCallback(hs2HARegistry));
+ }
+
+ private static HttpServer.Builder createHttpServerBuilder(String webHost,
int port, String name, String contextPath,
+ HiveConf hiveConf, CLIService cliService, PamAuthenticator
pamAuthenticator) throws IOException {
+ HttpServer.Builder builder = new HttpServer.Builder(name);
+ builder.setConf(hiveConf);
+ builder.setHost(webHost);
+ builder.setPort(port);
+ builder.setContextPath(contextPath);
+
builder.setMaxThreads(hiveConf.getIntVar(ConfVars.HIVE_SERVER2_WEBUI_MAX_THREADS));
+ builder.setAdmins(hiveConf.getVar(ConfVars.USERS_IN_ADMIN_ROLE));
+ // SessionManager is initialized
+ builder.setContextAttribute("hive.sm", cliService.getSessionManager());
+ hiveConf.set("startcode", String.valueOf(System.currentTimeMillis()));
+ if (hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_WEBUI_USE_SSL)) {
+ String keyStorePath =
hiveConf.getVar(ConfVars.HIVE_SERVER2_WEBUI_SSL_KEYSTORE_PATH);
+ if (StringUtils.isBlank(keyStorePath)) {
+ throw new
IllegalArgumentException(ConfVars.HIVE_SERVER2_WEBUI_SSL_KEYSTORE_PATH.varname
+ + " Not configured for SSL connection");
+ }
+ builder.setKeyStorePassword(ShimLoader.getHadoopShims().getPassword(
+ hiveConf,
ConfVars.HIVE_SERVER2_WEBUI_SSL_KEYSTORE_PASSWORD.varname));
+ builder.setKeyStorePath(keyStorePath);
+
builder.setKeyStoreType(hiveConf.getVar(ConfVars.HIVE_SERVER2_WEBUI_SSL_KEYSTORE_TYPE));
+ builder.setKeyManagerFactoryAlgorithm(
+
hiveConf.getVar(ConfVars.HIVE_SERVER2_WEBUI_SSL_KEYMANAGERFACTORY_ALGORITHM));
+
builder.setExcludeCiphersuites(hiveConf.getVar(ConfVars.HIVE_SERVER2_WEBUI_SSL_EXCLUDE_CIPHERSUITES));
+ builder.setUseSSL(true);
+ }
+ if (hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_WEBUI_USE_SPNEGO)) {
+ String spnegoPrincipal =
hiveConf.getVar(ConfVars.HIVE_SERVER2_WEBUI_SPNEGO_PRINCIPAL);
+ String spnegoKeytab =
hiveConf.getVar(ConfVars.HIVE_SERVER2_WEBUI_SPNEGO_KEYTAB);
+ if (StringUtils.isBlank(spnegoPrincipal) ||
StringUtils.isBlank(spnegoKeytab)) {
+ throw new IllegalArgumentException(
+ ConfVars.HIVE_SERVER2_WEBUI_SPNEGO_PRINCIPAL.varname
+ + "/" + ConfVars.HIVE_SERVER2_WEBUI_SPNEGO_KEYTAB.varname
+ + " Not configured for SPNEGO authentication");
+ }
+ builder.setSPNEGOPrincipal(spnegoPrincipal);
+ builder.setSPNEGOKeytab(spnegoKeytab);
+ builder.setUseSPNEGO(true);
+ }
+ if (hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_WEBUI_ENABLE_CORS)) {
+ builder.setEnableCORS(true);
+ String allowedOrigins =
hiveConf.getVar(ConfVars.HIVE_SERVER2_WEBUI_CORS_ALLOWED_ORIGINS);
+ String allowedMethods =
hiveConf.getVar(ConfVars.HIVE_SERVER2_WEBUI_CORS_ALLOWED_METHODS);
+ String allowedHeaders =
hiveConf.getVar(ConfVars.HIVE_SERVER2_WEBUI_CORS_ALLOWED_HEADERS);
+ if (StringUtils.isBlank(allowedOrigins) ||
StringUtils.isBlank(allowedMethods) || StringUtils.isBlank(allowedHeaders)) {
+ throw new IllegalArgumentException("CORS enabled. But " +
+ ConfVars.HIVE_SERVER2_WEBUI_CORS_ALLOWED_ORIGINS.varname + "/" +
+ ConfVars.HIVE_SERVER2_WEBUI_CORS_ALLOWED_METHODS.varname + "/" +
+ ConfVars.HIVE_SERVER2_WEBUI_CORS_ALLOWED_HEADERS.varname + "/" +
+ " is not configured");
+ }
+ builder.setAllowedOrigins(allowedOrigins);
+ builder.setAllowedMethods(allowedMethods);
+ builder.setAllowedHeaders(allowedHeaders);
+ LOG.info("CORS enabled - allowed-origins: {} allowed-methods: {}
allowed-headers: {}", allowedOrigins,
+ allowedMethods, allowedHeaders);
+ }
+ if(hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_WEBUI_XFRAME_ENABLED)) {
+
builder.configureXFrame(true).setXFrameOption(hiveConf.getVar(ConfVars.HIVE_SERVER2_WEBUI_XFRAME_VALUE));
+ }
+ if (hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_WEBUI_USE_PAM)) {
+ if (hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_WEBUI_USE_SSL)) {
+ String hiveServer2PamServices =
hiveConf.getVar(ConfVars.HIVE_SERVER2_PAM_SERVICES);
+ if (hiveServer2PamServices == null ||
hiveServer2PamServices.isEmpty()) {
+ throw new
IllegalArgumentException(ConfVars.HIVE_SERVER2_PAM_SERVICES.varname + " are not
configured.");
+ }
+ builder.setPAMAuthenticator(pamAuthenticator == null ? new
PamAuthenticator(hiveConf) : pamAuthenticator);
+ builder.setUsePAM(true);
+ } else if (hiveConf.getBoolVar(ConfVars.HIVE_IN_TEST)) {
+ builder.setPAMAuthenticator(pamAuthenticator == null ? new
PamAuthenticator(hiveConf) : pamAuthenticator);
+ builder.setUsePAM(true);
+ } else {
+ throw new
IllegalArgumentException(ConfVars.HIVE_SERVER2_WEBUI_USE_SSL.varname + " has
false value. It is recommended to set to true when PAM is used.");
+ }
+ }
+
+ return builder;
+ }
+
+ private void initHAHealthChecker(HttpServer webServer, HiveConf hiveConf)
throws IOException {
+ if (serviceDiscovery && activePassiveHA) {
+ String webHost = hiveConf.getVar(ConfVars.HIVE_SERVER2_WEBUI_BIND_HOST);
+ int healthCheckPort =
hiveConf.getIntVar(ConfVars.HIVE_SERVER2_ACTIVE_PASSIVE_HA_HEALTHCHECK_PORT);
+ HttpServer.Builder builder = createHttpServerBuilder(webHost,
healthCheckPort, "ha-healthcheck",
+ "/ha-healthcheck", hiveConf, cliService, pamAuthenticator);
+ addHAContextAttributes(builder, hiveConf);
+ builder.addServlet("health-ha", HS2LeadershipStatus.class);
+ webServer.createAndAddWebApp(builder);
+ }
+ }
private void logCompactionParameters(HiveConf hiveConf) {
LOG.info("Compaction HS2 parameters:");
diff --git
a/service/src/java/org/apache/hive/service/servlet/HS2LeadershipStatus.java
b/service/src/java/org/apache/hive/service/servlet/HS2LeadershipManager.java
similarity index 70%
copy from
service/src/java/org/apache/hive/service/servlet/HS2LeadershipStatus.java
copy to
service/src/java/org/apache/hive/service/servlet/HS2LeadershipManager.java
index f5dde7efcf7..0c10fcc500d 100644
--- a/service/src/java/org/apache/hive/service/servlet/HS2LeadershipStatus.java
+++ b/service/src/java/org/apache/hive/service/servlet/HS2LeadershipManager.java
@@ -21,11 +21,9 @@
import java.util.concurrent.atomic.AtomicBoolean;
import javax.servlet.ServletContext;
-import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
-import org.apache.hive.http.HttpConstants;
import org.apache.hive.http.HttpServer;
import org.apache.hive.service.server.HiveServer2;
import org.slf4j.Logger;
@@ -34,43 +32,13 @@
import com.fasterxml.jackson.databind.ObjectMapper;
/**
- * Returns "true" if this HS2 instance is leader else "false".
+ * Extends read-only HS2LeadershipStatus servlet to provide a DELETE method
for triggering failover.
* Invoking a "DELETE" method on this endpoint will trigger a failover if this
instance is a leader.
* hadoop.security.instrumentation.requires.admin should be set to true and
current user has to be in admin ACLS
- * for accessing any of these endpoints.
+ * for accessing this endpoint.
*/
-public class HS2LeadershipStatus extends HttpServlet {
- private static final Logger LOG =
LoggerFactory.getLogger(HS2LeadershipStatus.class);
-
- @Override
- public void doGet(HttpServletRequest request, HttpServletResponse response)
throws IOException {
- // admin check -
- // allows when hadoop.security.instrumentation.requires.admin is set to
false
- // when hadoop.security.instrumentation.requires.admin is set to true,
checks if hadoop.security.authorization
- // is true and if the logged in user (via PAM or SPNEGO + kerberos) is in
hive.users.in.admin.role list
- final ServletContext context = getServletContext();
- if (!HttpServer.isInstrumentationAccessAllowed(context, request,
response)) {
- LOG.warn("Unauthorized to perform GET action. remoteUser: {}",
request.getRemoteUser());
- return;
- }
-
- setResponseHeaders(response);
-
- ServletContext ctx = getServletContext();
- AtomicBoolean isLeader = (AtomicBoolean) ctx.getAttribute("hs2.isLeader");
- LOG.info("Returning isLeader: {}", isLeader);
- ObjectMapper mapper = new ObjectMapper();
- mapper.writerWithDefaultPrettyPrinter().writeValue(response.getWriter(),
isLeader);
- response.setStatus(HttpServletResponse.SC_OK);
- response.flushBuffer();
- }
-
- private void setResponseHeaders(final HttpServletResponse response) {
- response.setContentType(HttpConstants.CONTENT_TYPE_JSON);
- response.setHeader(HttpConstants.ACCESS_CONTROL_ALLOW_METHODS,
- HttpConstants.METHOD_GET + "," + HttpConstants.METHOD_DELETE);
- response.setHeader(HttpConstants.ACCESS_CONTROL_ALLOW_ORIGIN,
HttpConstants.WILDCARD);
- }
+public class HS2LeadershipManager extends HS2LeadershipStatus {
+ private static final Logger LOG =
LoggerFactory.getLogger(HS2LeadershipManager.class);
private class FailoverResponse {
private boolean success;
diff --git
a/service/src/java/org/apache/hive/service/servlet/HS2LeadershipStatus.java
b/service/src/java/org/apache/hive/service/servlet/HS2LeadershipStatus.java
index f5dde7efcf7..daa0b520304 100644
--- a/service/src/java/org/apache/hive/service/servlet/HS2LeadershipStatus.java
+++ b/service/src/java/org/apache/hive/service/servlet/HS2LeadershipStatus.java
@@ -27,7 +27,6 @@
import org.apache.hive.http.HttpConstants;
import org.apache.hive.http.HttpServer;
-import org.apache.hive.service.server.HiveServer2;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -35,9 +34,8 @@
/**
* Returns "true" if this HS2 instance is leader else "false".
- * Invoking a "DELETE" method on this endpoint will trigger a failover if this
instance is a leader.
* hadoop.security.instrumentation.requires.admin should be set to true and
current user has to be in admin ACLS
- * for accessing any of these endpoints.
+ * for accessing this endpoint.
*/
public class HS2LeadershipStatus extends HttpServlet {
private static final Logger LOG =
LoggerFactory.getLogger(HS2LeadershipStatus.class);
@@ -65,82 +63,10 @@ public void doGet(HttpServletRequest request,
HttpServletResponse response) thro
response.flushBuffer();
}
- private void setResponseHeaders(final HttpServletResponse response) {
+ protected void setResponseHeaders(final HttpServletResponse response) {
response.setContentType(HttpConstants.CONTENT_TYPE_JSON);
response.setHeader(HttpConstants.ACCESS_CONTROL_ALLOW_METHODS,
- HttpConstants.METHOD_GET + "," + HttpConstants.METHOD_DELETE);
+ HttpConstants.METHOD_GET + "," + HttpConstants.METHOD_DELETE);
response.setHeader(HttpConstants.ACCESS_CONTROL_ALLOW_ORIGIN,
HttpConstants.WILDCARD);
}
-
- private class FailoverResponse {
- private boolean success;
- private String message;
-
- FailoverResponse() {
- }
-
- public boolean isSuccess() {
- return success;
- }
-
- public void setSuccess(final boolean success) {
- this.success = success;
- }
-
- public String getMessage() {
- return message;
- }
-
- public void setMessage(final String message) {
- this.message = message;
- }
- }
-
- @Override
- public void doDelete(final HttpServletRequest request, final
HttpServletResponse response) throws IOException {
- // strict admin check -
- // allows ONLY if hadoop.security.instrumentation.requires.admin is set to
true
- // when hadoop.security.instrumentation.requires.admin is set to true,
checks if hadoop.security.authorization
- // is true and if the logged in user (via PAM or SPNEGO + kerberos) is in
hive.users.in.admin.role list
- final ServletContext context = getServletContext();
- if (!HttpServer.isInstrumentationAccessAllowedStrict(context, request,
response)) {
- LOG.warn("Unauthorized to perform DELETE action. remoteUser: {}",
request.getRemoteUser());
- return;
- }
-
- setResponseHeaders(response);
-
- LOG.info("DELETE handler invoked for failover..");
- ObjectMapper mapper = new ObjectMapper();
- FailoverResponse failoverResponse = new FailoverResponse();
- AtomicBoolean isLeader = (AtomicBoolean)
context.getAttribute("hs2.isLeader");
- if (!isLeader.get()) {
- String msg = "Cannot failover an instance that is not a leader";
- LOG.info(msg);
- failoverResponse.setSuccess(false);
- failoverResponse.setMessage(msg);
- mapper.writerWithDefaultPrettyPrinter().writeValue(response.getWriter(),
failoverResponse);
- response.setStatus(HttpServletResponse.SC_FORBIDDEN);
- return;
- }
-
- HiveServer2.FailoverHandlerCallback failoverHandler =
(HiveServer2.FailoverHandlerCallback) context
- .getAttribute("hs2.failover.callback");
- try {
- String msg = "Failover successful!";
- LOG.info(msg);
- failoverHandler.failover();
- failoverResponse.setSuccess(true);
- failoverResponse.setMessage(msg);
- mapper.writerWithDefaultPrettyPrinter().writeValue(response.getWriter(),
failoverResponse);
- response.setStatus(HttpServletResponse.SC_OK);
- } catch (Exception e) {
- String errMsg = "Cannot perform failover of HS2 instance. err: " +
e.getMessage();
- LOG.error(errMsg, e);
- failoverResponse.setSuccess(false);
- failoverResponse.setMessage(errMsg);
- mapper.writerWithDefaultPrettyPrinter().writeValue(response.getWriter(),
failoverResponse);
- response.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
- }
- }
}
diff --git a/service/src/java/org/apache/hive/service/servlet/HS2Peers.java
b/service/src/java/org/apache/hive/service/servlet/HS2Peers.java
index 1650753d317..c40a7caac1f 100644
--- a/service/src/java/org/apache/hive/service/servlet/HS2Peers.java
+++ b/service/src/java/org/apache/hive/service/servlet/HS2Peers.java
@@ -88,7 +88,7 @@ public void doGet(HttpServletRequest request,
HttpServletResponse response) thro
response.setHeader(HttpConstants.ACCESS_CONTROL_ALLOW_ORIGIN,
HttpConstants.WILDCARD);
ServletContext ctx = getServletContext();
- HiveConf hiveConf = (HiveConf) ctx.getAttribute("hiveconf");
+ HiveConf hiveConf = (HiveConf)
ctx.getAttribute(HttpServer.CONF_CONTEXT_ATTRIBUTE);
HS2ActivePassiveHARegistry hs2Registry =
HS2ActivePassiveHARegistryClient.getClient(hiveConf);
HS2Instances instances = new HS2Instances(hs2Registry.getAll());
response.getWriter().write(instances.toJson());
diff --git a/service/src/resources/hive-webapps/ha-healthcheck/WEB-INF/web.xml
b/service/src/resources/hive-webapps/ha-healthcheck/WEB-INF/web.xml
new file mode 100644
index 00000000000..53204f478ac
--- /dev/null
+++ b/service/src/resources/hive-webapps/ha-healthcheck/WEB-INF/web.xml
@@ -0,0 +1,10 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<web-app xmlns="http://xmlns.jcp.org/xml/ns/javaee"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://xmlns.jcp.org/xml/ns/javaee
+ http://xmlns.jcp.org/xml/ns/javaee/web-app_3_1.xsd"
+ version="3.1"
+ metadata-complete="false">
+
+</web-app>
+