From 5398a734c2cb8e91972477d1d4789b519ed67b12 Mon Sep 17 00:00:00 2001 From: Dmitriy Fingerman Date: Fri, 21 Feb 2025 15:41:15 -0500 Subject: [PATCH] HIVE-28775: HiveServer2: enabling HA Leader endpoint on a different port than WebUI --- .../org/apache/hadoop/hive/conf/HiveConf.java | 2 + .../java/org/apache/hive/http/HttpServer.java | 272 +++++++++++++----- data/conf/hive-site.xml | 2 +- data/conf/mr/hive-site.xml | 2 +- .../apache/hive/jdbc/TestActivePassiveHA.java | 101 +++++++ .../jdbc/miniHS2/AbstractHiveService.java | 8 +- .../org/apache/hive/jdbc/miniHS2/MiniHS2.java | 3 + .../hive/service/server/HiveServer2.java | 186 ++++++------ .../service/servlet/HS2HealthHAStatus.java | 31 ++ .../hive-webapps/health-ha/WEB-INF/web.xml | 10 + 10 files changed, 459 insertions(+), 158 deletions(-) create mode 100644 service/src/java/org/apache/hive/service/servlet/HS2HealthHAStatus.java create mode 100644 service/src/resources/hive-webapps/health-ha/WEB-INF/web.xml diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java index 107a848e78cd..fa8f3bf57ba8 100644 --- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java +++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java @@ -3979,6 +3979,8 @@ public static enum ConfVars { HIVE_SERVER2_WEBUI_BIND_HOST("hive.server2.webui.host", "0.0.0.0", "The host address the HiveServer2 WebUI will listen on"), HIVE_SERVER2_WEBUI_PORT("hive.server2.webui.port", 10002, "The port the HiveServer2 WebUI will listen on. This can be" + "set to 0 or a negative integer to disable the web UI"), + HIVE_SERVER2_HEALTH_HA_PORT("hive.server2.health.ha.port", 11002, "The port the HiveServer2 health-ha web app will listen on. This can be" + + "set to 0 or a negative integer to disable HS2 health-ha checking"), HIVE_SERVER2_WEBUI_MAX_THREADS("hive.server2.webui.max.threads", 50, "The max HiveServer2 WebUI threads"), HIVE_SERVER2_WEBUI_USE_SSL("hive.server2.webui.use.ssl", false, "Set this to true for using SSL encryption for HiveServer2 WebUI."), diff --git a/common/src/java/org/apache/hive/http/HttpServer.java b/common/src/java/org/apache/hive/http/HttpServer.java index f884ff8df247..f4c1709d3d29 100644 --- a/common/src/java/org/apache/hive/http/HttpServer.java +++ b/common/src/java/org/apache/hive/http/HttpServer.java @@ -82,15 +82,16 @@ import org.eclipse.jetty.security.ConstraintMapping; import org.eclipse.jetty.security.ConstraintSecurityHandler; import org.eclipse.jetty.security.LoginService; -import org.eclipse.jetty.server.Connector; import org.eclipse.jetty.server.Handler; import org.eclipse.jetty.server.HttpConfiguration; import org.eclipse.jetty.server.HttpConnectionFactory; import org.eclipse.jetty.server.LowResourceMonitor; +import org.eclipse.jetty.server.Request; import org.eclipse.jetty.server.Server; import org.eclipse.jetty.server.handler.ContextHandler.Context; import org.eclipse.jetty.server.handler.ContextHandlerCollection; import org.eclipse.jetty.server.ServerConnector; +import org.eclipse.jetty.server.handler.HandlerCollection; import org.eclipse.jetty.servlet.DefaultServlet; import org.eclipse.jetty.servlet.FilterHolder; import org.eclipse.jetty.servlet.FilterMapping; @@ -135,9 +136,10 @@ public class HttpServer { private final String name; - private String appDir; - private WebAppContext webAppContext; + private List webAppContexts; private Server webServer; + private QueuedThreadPool threadPool; + private PortHandlerWrapper portHandlerWrapper; /** * Create a status server on the given port. @@ -179,6 +181,7 @@ public static class Builder { new LinkedList>>(); private boolean disableDirListing = false; private final Map> globalFilters = new LinkedHashMap<>(); + private String contextPath; public Builder(String name) { Preconditions.checkArgument(name != null && !name.isEmpty(), "Name must be specified"); @@ -189,6 +192,10 @@ public HttpServer build() throws IOException { return new HttpServer(this); } + public void setContextPath(String contextPath) { + this.contextPath = contextPath; + } + public Builder setConf(HiveConf origConf) { this.conf = new HiveConf(origConf); origConf.stripHiddenConfigurations(conf); @@ -489,13 +496,13 @@ static boolean userHasAdministratorAccess(ServletContext servletContext, /** * Create the web context for the application of specified name */ - WebAppContext createWebAppContext(Builder b) { + WebAppContext createWebAppContext(Builder b) throws FileNotFoundException { WebAppContext ctx = new WebAppContext(); setContextAttributes(ctx.getServletContext(), b.contextAttrs); ctx.getServletContext().getSessionCookieConfig().setHttpOnly(true); ctx.setDisplayName(b.name); - ctx.setContextPath("/"); - ctx.setWar(appDir + "/" + b.name); + ctx.setContextPath(b.contextPath == null ? "/" : b.contextPath); + ctx.setWar(getWebAppsPath(b.name) + "/" + b.name); return ctx; } @@ -519,8 +526,9 @@ void setupSpnegoFilter(Builder b, ServletContextHandler ctx) throws IOException /** * Setup cross-origin requests (CORS) filter. * @param b - builder + * @param webAppContext - webAppContext */ - private void setupCORSFilter(Builder b) { + private void setupCORSFilter(Builder b, WebAppContext webAppContext) { FilterHolder holder = new FilterHolder(); holder.setClassName(CrossOriginFilter.class.getName()); Map params = new HashMap<>(); @@ -534,9 +542,110 @@ private void setupCORSFilter(Builder b) { } /** - * Create a channel connector for "http/https" requests + * Creates a port connector and initializes a web application that processes requests through the newly + * created port connector. + * + * @param builder - The builder object used to configure and create the port connector and web application. + * @return ContextHandlerCollection - A collection of request handlers associated with the new port connector, + * which includes the newly initialized web application. + */ + public ContextHandlerCollection addWebApp(Builder builder) throws IOException { + WebAppContext webAppContext = createWebAppContext(builder); + initWebAppContext(builder, webAppContext); + RewriteHandler rwHandler = createRewriteHandler(builder, webAppContext); + + ContextHandlerCollection portHandler = new ContextHandlerCollection(); + ServerConnector connector = addChannelConnector(threadPool.getQueueSize(), builder); + portHandler.addHandler(rwHandler); + + for (Pair> p : builder.servlets) { + addServlet(p.getFirst(), "/" + p.getFirst(), p.getSecond(), webAppContext); + } + + builder.globalFilters.forEach((k, v) -> + addFilter(k, v.getFirst(), v.getSecond(), webAppContext.getServletHandler())); + + // Add port handler to the global context handler + portHandlerWrapper.addHandler(connector, portHandler); + // Add the web application context to the global list of web application contexts + webAppContexts.add(webAppContext); + + return portHandler; + } + + /** + * Initializes the {@link WebAppContext} based on the provided configuration in the {@link Builder}. + * The method sets up various filters and configurations for the web application context, including + * security and cross-origin resource sharing (CORS) settings, as well as header management. + * + *

The method performs the following actions based on the {@code builder} configuration:

+ *
    + *
  • If {@code builder.useSPNEGO} is {@code true}, sets up the SPNEGO filter for Kerberos authentication.
  • + *
  • If {@code builder.enableCORS} is {@code true}, sets up the CORS filter.
  • + *
  • If {@code builder.xFrameEnabled} is {@code true}, configures the X-Frame-Options header filter.
  • + *
  • If {@code builder.disableDirListing} is {@code true}, disables directory listing on the servlet.
  • + *
+ * + * @param builder The {@link Builder} object containing configuration options to customize the web application context. + * @param webAppContext The {@link WebAppContext} to which the request will be forwarded + * after the URI has been rewritten. + * @throws IOException If an I/O error occurs while initializing the web application context. + */ + private void initWebAppContext(Builder builder, WebAppContext webAppContext) throws IOException { + if (builder.useSPNEGO) { + // Secure the web server with kerberos + setupSpnegoFilter(builder, webAppContext); + } + + if (builder.enableCORS) { + setupCORSFilter(builder, webAppContext); + } + + Map xFrameParams = setHeaders(); + if (builder.xFrameEnabled) { + setupXframeFilter(xFrameParams, webAppContext); + } + + if (builder.disableDirListing) { + disableDirectoryListingOnServlet(webAppContext); + } + } + + /** + * Creates and configures a {@link RewriteHandler} that rewrites incoming request URIs + * based on predefined rules, and sets the specified {@link WebAppContext} as the + * handler for the rewritten requests. + * + *

This method creates a {@link RewriteHandler} that rewrites requests to the root path + * ("/") to a new target URI specified by the {@code builder.contextRootRewriteTarget}. + * The URI rewrite is applied before forwarding the request to the given {@link WebAppContext}.

+ * + * @param builder The builder object containing configuration values, such as the + * target for URI rewrite. + * @param webAppContext The {@link WebAppContext} to which the request will be forwarded + * after the URI has been rewritten. + * @return A {@link RewriteHandler} configured with the rewrite rule and the web application context. + */ + private RewriteHandler createRewriteHandler(Builder builder, WebAppContext webAppContext) { + RewriteHandler rwHandler = new RewriteHandler(); + rwHandler.setRewriteRequestURI(true); + rwHandler.setRewritePathInfo(false); + + RewriteRegexRule rootRule = new RewriteRegexRule(); + rootRule.setRegex("^/$"); + rootRule.setReplacement(builder.contextRootRewriteTarget); + rootRule.setTerminating(true); + + rwHandler.addRule(rootRule); + rwHandler.setHandler(webAppContext); + + return rwHandler; + } + + /** + * Create a channel connector for "http/https" requests and add it to the server */ - Connector createChannelConnector(int queueSize, Builder b) { + ServerConnector addChannelConnector(int queueSize, Builder b) { ServerConnector connector; final HttpConfiguration conf = new HttpConfiguration(); @@ -574,6 +683,8 @@ Connector createChannelConnector(int queueSize, Builder b) { connector.setReuseAddress(true); connector.setHost(b.host); connector.setPort(b.port); + + webServer.addConnector(connector); return connector; } @@ -604,7 +715,7 @@ void setContextAttributes(Context ctx, Map contextAttrs) { private void createWebServer(final Builder b) throws IOException { // Create the thread pool for the web server to handle HTTP requests - QueuedThreadPool threadPool = new QueuedThreadPool(); + threadPool = new QueuedThreadPool(); if (b.maxThreads > 0) { threadPool.setMaxThreads(b.maxThreads); } @@ -612,66 +723,33 @@ private void createWebServer(final Builder b) throws IOException { threadPool.setName(b.name + "-web"); this.webServer = new Server(threadPool); - this.appDir = getWebAppsPath(b.name); - this.webAppContext = createWebAppContext(b); - - if (b.useSPNEGO) { - // Secure the web server with kerberos - setupSpnegoFilter(b, webAppContext); - } - - if (b.enableCORS) { - setupCORSFilter(b); - } - - Map xFrameParams = setHeaders(); - if (b.xFrameEnabled) { - setupXframeFilter(b,xFrameParams); - } - - if (b.disableDirListing) { - disableDirectoryListingOnServlet(webAppContext); - } + this.webAppContexts = new LinkedList<>(); - initializeWebServer(b, threadPool.getMaxThreads()); + initializeWebServer(b); } - private void initializeWebServer(final Builder b, int queueSize) throws IOException { + private void initializeWebServer(final Builder b) throws IOException { // Set handling for low resource conditions. final LowResourceMonitor low = new LowResourceMonitor(webServer); low.setLowResourcesIdleTimeout(10000); webServer.addBean(low); - Connector connector = createChannelConnector(queueSize, b); - webServer.addConnector(connector); - - RewriteHandler rwHandler = new RewriteHandler(); - rwHandler.setRewriteRequestURI(true); - rwHandler.setRewritePathInfo(false); - - RewriteRegexRule rootRule = new RewriteRegexRule(); - rootRule.setRegex("^/$"); - rootRule.setReplacement(b.contextRootRewriteTarget); - rootRule.setTerminating(true); - - rwHandler.addRule(rootRule); - rwHandler.setHandler(webAppContext); - - // Configure web application contexts for the web server - ContextHandlerCollection contexts = new ContextHandlerCollection(); - contexts.addHandler(rwHandler); - webServer.setHandler(contexts); + // Configure the global context handler + portHandlerWrapper = new PortHandlerWrapper(); + webServer.setHandler(portHandlerWrapper); + // Configure the web server connector and port handler to listen on + ContextHandlerCollection portHandler = addWebApp(b); if (b.usePAM) { - setupPam(b, contexts); + setupPam(b, portHandlerWrapper); } - addServlet("jmx", "/jmx", JMXJsonServlet.class); addServlet("conf", "/conf", ConfServlet.class); addServlet("stacks", "/stacks", StackServlet.class); addServlet("conflog", "/conflog", Log4j2ConfiguratorServlet.class); + final String asyncProfilerHome = ProfileServlet.getAsyncProfilerHome(); if (asyncProfilerHome != null && !asyncProfilerHome.trim().isEmpty()) { addServlet("prof", "/prof", ProfileServlet.class); @@ -679,8 +757,7 @@ private void initializeWebServer(final Builder b, int queueSize) throws IOExcept if (Files.notExists(tmpDir)) { Files.createDirectories(tmpDir); } - ServletContextHandler genCtx = - new ServletContextHandler(contexts, "/prof-output"); + ServletContextHandler genCtx = new ServletContextHandler(portHandler, "/prof-output"); setContextAttributes(genCtx.getServletContext(), b.contextAttrs); genCtx.addServlet(ProfileOutputServlet.class, "/*"); genCtx.setResourceBase(tmpDir.toAbsolutePath().toString()); @@ -688,24 +765,15 @@ private void initializeWebServer(final Builder b, int queueSize) throws IOExcept } else { LOG.info("ASYNC_PROFILER_HOME env or -Dasync.profiler.home not specified. Disabling /prof endpoint.."); } - - for (Pair> p : b.servlets) { - addServlet(p.getFirst(), "/" + p.getFirst(), p.getSecond()); - } - - b.globalFilters.forEach((k, v) -> addFilter(k, v.getFirst(), v.getSecond(), webAppContext.getServletHandler())); - - ServletContextHandler staticCtx = - new ServletContextHandler(contexts, "/static"); - staticCtx.setResourceBase(appDir + "/static"); + ServletContextHandler staticCtx = new ServletContextHandler(portHandler, "/static"); + staticCtx.setResourceBase(getWebAppsPath(b.name) + "/static"); staticCtx.addServlet(DefaultServlet.class, "/*"); staticCtx.setDisplayName("static"); disableDirectoryListingOnServlet(staticCtx); String logDir = getLogDir(b.conf); if (logDir != null) { - ServletContextHandler logCtx = - new ServletContextHandler(contexts, "/logs"); + ServletContextHandler logCtx = new ServletContextHandler(portHandler, "/logs"); setContextAttributes(logCtx.getServletContext(), b.contextAttrs); if(b.useSPNEGO) { setupSpnegoFilter(b,logCtx); @@ -716,7 +784,7 @@ private void initializeWebServer(final Builder b, int queueSize) throws IOExcept } // Define the global filers for each servlet context except the staticCtx(css style). - Optional handlers = Optional.ofNullable(contexts.getHandlers()); + Optional handlers = Optional.ofNullable(portHandler.getHandlers()); handlers.ifPresent(hs -> Arrays.stream(hs) .filter(h -> h instanceof ServletContextHandler && !"static".equals(((ServletContextHandler) h).getDisplayName())) .forEach(h -> b.globalFilters.forEach((k, v) -> @@ -748,7 +816,7 @@ private Map getDefaultHeaders() { return headers; } - private void setupXframeFilter(Builder b, Map params) { + private void setupXframeFilter(Map params, WebAppContext webAppContext) { FilterHolder holder = new FilterHolder(); holder.setClassName(QuotingInputFilter.class.getName()); holder.setInitParameters(params); @@ -808,6 +876,15 @@ public void addServlet(String name, String pathSpec, if (name != null) { holder.setName(name); } + webAppContexts.get(0).addServlet(holder, pathSpec); + } + + private void addServlet(String name, String pathSpec, Class clazz, + WebAppContext webAppContext) { + ServletHolder holder = new ServletHolder(clazz); + if (name != null) { + holder.setName(name); + } webAppContext.addServlet(holder, pathSpec); } @@ -815,7 +892,7 @@ public void addServlet(String name, String pathSpec, ServletHolder holder) { if (name != null) { holder.setName(name); } - webAppContext.addServlet(holder, pathSpec); + webAppContexts.get(0).addServlet(holder, pathSpec); } public void addFilter(String name, String pathSpec, Filter filter, ServletHandler handler) { @@ -1026,4 +1103,59 @@ private void initHttpHeaderMap() { } } + /** + * A custom {@link ContextHandlerCollection} that maps server connectors (ports) to specific handler collections. + * This class allows for the association of different handlers with different ports, and ensures that requests + * are routed to the appropriate handler based on the port they came through. + * + *

The {@link PortHandlerWrapper} class overrides the {@link ContextHandlerCollection#handle} method to + * select the appropriate handler based on the request's port and delegate the request to that handler. + *

+ * + *

This class uses a map to associate each {@link ServerConnector} (which represents a port) to a + * {@link HandlerCollection}. The {@link #addHandler(ServerConnector, HandlerCollection)} method allows handlers + * to be added for specific ports.

+ */ + static class PortHandlerWrapper extends ContextHandlerCollection { + + /** Map of server connectors (ports) to their corresponding handler collections. */ + private final Map connectorToHandlerMap = new HashMap<>(); + + /** + * Adds a handler collection to the {@link PortHandlerWrapper} for a specific server connector (port). + * + * @param connector the {@link ServerConnector} representing the port to which the handler should be associated + * @param handler the {@link HandlerCollection} that will handle requests on the specified port + */ + public void addHandler(ServerConnector connector, HandlerCollection handler) { + connectorToHandlerMap.put(connector, handler); + addHandler(handler); + } + + /** + * Handles the HTTP request by determining which port the request came through and routing it to the appropriate handler. + * + * @param target the target of the request + * @param baseRequest the base request object + * @param request the {@link HttpServletRequest} object containing the request details + * @param response the {@link HttpServletResponse} object to send the response + * @throws IOException if an input or output exception occurs during the handling of the request + * @throws ServletException if a servlet-specific exception occurs during the handling of the request + */ + @Override + public void handle(String target, Request baseRequest, HttpServletRequest request, HttpServletResponse response) throws IOException, ServletException { + // Determine the connector (port) the request came through + int port = request.getServerPort(); + + // Find the handler for the corresponding port + Handler handler = connectorToHandlerMap.entrySet().stream() + .filter(entry -> entry.getKey().getPort() == port) + .map(Map.Entry::getValue) + .findFirst() + .orElseThrow(() -> new IllegalStateException("No handler found for port " + port)); + + // Delegate the request to the handler + handler.handle(target, baseRequest, request, response); + } + } } diff --git a/data/conf/hive-site.xml b/data/conf/hive-site.xml index 94f9d0365426..e9b0480ab0c4 100644 --- a/data/conf/hive-site.xml +++ b/data/conf/hive-site.xml @@ -358,7 +358,7 @@ hive.server2.webui.max.threads - 4 + 6 diff --git a/data/conf/mr/hive-site.xml b/data/conf/mr/hive-site.xml index 8cd1d6a0805e..06eb0117a087 100644 --- a/data/conf/mr/hive-site.xml +++ b/data/conf/mr/hive-site.xml @@ -354,7 +354,7 @@ hive.server2.webui.max.threads - 4 + 6 diff --git a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestActivePassiveHA.java b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestActivePassiveHA.java index 0b4daf8d16e4..df4d09ad0990 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestActivePassiveHA.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestActivePassiveHA.java @@ -139,6 +139,72 @@ private static void setHAConfigs(Configuration conf) { conf.setInt(ConfVars.HIVE_ZOOKEEPER_CONNECTION_MAX_RETRIES.varname, 1); } + @Test(timeout = 60000) + public void testHealthCheckHA() throws Exception { + String instanceId1 = UUID.randomUUID().toString(); + miniHS2_1.start(getConfOverlay(instanceId1)); + + String leaderURL = "http://localhost:" + hiveConf1.get(ConfVars.HIVE_SERVER2_WEBUI_PORT.varname) + "/leader"; + String healthCheckURL = "http://localhost:" + hiveConf1.get(ConfVars.HIVE_SERVER2_HEALTH_HA_PORT.varname) + "/health-ha/leader"; + + String leaderURLHealthCheckPort = "http://localhost:" + hiveConf1.get(ConfVars.HIVE_SERVER2_HEALTH_HA_PORT.varname) + "/leader"; + String healthCheckURLWebUIPort = "http://localhost:" + hiveConf1.get(ConfVars.HIVE_SERVER2_WEBUI_PORT.varname) + "/health-ha/leader"; + + assertEquals(true, miniHS2_1.getIsLeaderTestFuture().get()); + assertEquals(true, miniHS2_1.isLeader()); + + assertEquals("true", sendGet(leaderURL)); + assertEquals("true", sendGet(healthCheckURL)); + + assertEquals("Not Found", sendGet(leaderURLHealthCheckPort)); + assertEquals("Not Found", sendGet(healthCheckURLWebUIPort)); + } + + @Test(timeout = 60000) + public void testHealthCheckHAAuth() throws Exception { + hiveConf1.setBoolVar(ConfVars.HIVE_SERVER2_WEBUI_ENABLE_CORS, true); + setPamConfs(hiveConf1); + PamAuthenticator pamAuthenticator = new TestHS2HttpServerPam.TestPamAuthenticator(hiveConf1); + String instanceId1 = UUID.randomUUID().toString(); + miniHS2_1.setPamAuthenticator(pamAuthenticator); + miniHS2_1.start(getSecureConfOverlay(instanceId1)); + + String leaderURL = "http://localhost:" + hiveConf1.get(ConfVars.HIVE_SERVER2_WEBUI_PORT.varname) + "/leader"; + String healthCheckURL = "http://localhost:" + hiveConf1.get(ConfVars.HIVE_SERVER2_HEALTH_HA_PORT.varname) + "/health-ha/leader"; + + String leaderURLHealthCheckPort = "http://localhost:" + hiveConf1.get(ConfVars.HIVE_SERVER2_HEALTH_HA_PORT.varname) + "/leader"; + String healthCheckURLWebUIPort = "http://localhost:" + hiveConf1.get(ConfVars.HIVE_SERVER2_WEBUI_PORT.varname) + "/health-ha/leader"; + + assertEquals(true, miniHS2_1.getIsLeaderTestFuture().get()); + assertEquals(true, miniHS2_1.isLeader()); + + assertEquals("true", sendGet(leaderURL, true, true)); + assertEquals("true", sendGet(healthCheckURL, true, true)); + + try { + sendGet(leaderURLHealthCheckPort, true, true); + } catch (AssertionError e) { + assertTrue(e.getMessage().contains("Not Found")); + } catch (Exception e) { + fail("Expected AssertionError"); + } + + assertEquals("Not Found", sendGet(healthCheckURLWebUIPort, true, true)); + assertEquals("Method Not Allowed", sendDelete(healthCheckURL, true, true)); + assertEquals("Method Not Allowed", sendDelete(healthCheckURLWebUIPort, true, true)); + + try { + sendDelete(leaderURLHealthCheckPort, true, true); + } catch (AssertionError e) { + assertTrue(e.getMessage().contains("Not Found")); + } catch (Exception e) { + fail("Expected AssertionError"); + } + + String resp = sendDelete(leaderURL, true, true); + assertTrue(resp.contains("Failover successful!")); + } + @Test(timeout = 60000) public void testActivePassiveHA() throws Exception { String instanceId1 = UUID.randomUUID().toString(); @@ -150,10 +216,14 @@ public void testActivePassiveHA() throws Exception { assertEquals(true, miniHS2_1.isLeader()); String url = "http://localhost:" + hiveConf1.get(ConfVars.HIVE_SERVER2_WEBUI_PORT.varname) + "/leader"; assertEquals("true", sendGet(url)); + String healthCheckURL = "http://localhost:" + hiveConf1.get(ConfVars.HIVE_SERVER2_HEALTH_HA_PORT.varname) + "/health-ha/leader"; + assertEquals("true", sendGet(healthCheckURL)); assertEquals(false, miniHS2_2.isLeader()); url = "http://localhost:" + hiveConf2.get(ConfVars.HIVE_SERVER2_WEBUI_PORT.varname) + "/leader"; assertEquals("false", sendGet(url)); + healthCheckURL = "http://localhost:" + hiveConf2.get(ConfVars.HIVE_SERVER2_HEALTH_HA_PORT.varname) + "/health-ha/leader"; + assertEquals("false", sendGet(healthCheckURL)); url = "http://localhost:" + hiveConf1.get(ConfVars.HIVE_SERVER2_WEBUI_PORT.varname) + "/peers"; String resp = sendGet(url); @@ -192,6 +262,8 @@ public void testActivePassiveHA() throws Exception { assertEquals(true, miniHS2_2.isLeader()); url = "http://localhost:" + hiveConf2.get(ConfVars.HIVE_SERVER2_WEBUI_PORT.varname) + "/leader"; assertEquals("true", sendGet(url)); + healthCheckURL = "http://localhost:" + hiveConf2.get(ConfVars.HIVE_SERVER2_HEALTH_HA_PORT.varname) + "/health-ha/leader"; + assertEquals("true", sendGet(healthCheckURL)); while (client.getAll().size() != 1) { Thread.sleep(100); @@ -233,6 +305,8 @@ public void testActivePassiveHA() throws Exception { assertEquals(false, miniHS2_1.isLeader()); url = "http://localhost:" + hiveConf1.get(ConfVars.HIVE_SERVER2_WEBUI_PORT.varname) + "/leader"; assertEquals("false", sendGet(url)); + healthCheckURL = "http://localhost:" + hiveConf1.get(ConfVars.HIVE_SERVER2_HEALTH_HA_PORT.varname) + "/health-ha/leader"; + assertEquals("false", sendGet(healthCheckURL)); while (client.getAll().size() != 2) { Thread.sleep(100); @@ -282,10 +356,14 @@ public void testConnectionActivePassiveHAServiceDiscovery() throws Exception { assertEquals(true, miniHS2_1.isLeader()); String url = "http://localhost:" + hiveConf1.get(ConfVars.HIVE_SERVER2_WEBUI_PORT.varname) + "/leader"; assertEquals("true", sendGet(url)); + String healthCheckURL = "http://localhost:" + hiveConf1.get(ConfVars.HIVE_SERVER2_HEALTH_HA_PORT.varname) + "/health-ha/leader"; + assertEquals("true", sendGet(healthCheckURL)); assertEquals(false, miniHS2_2.isLeader()); url = "http://localhost:" + hiveConf2.get(ConfVars.HIVE_SERVER2_WEBUI_PORT.varname) + "/leader"; assertEquals("false", sendGet(url)); + healthCheckURL = "http://localhost:" + hiveConf2.get(ConfVars.HIVE_SERVER2_HEALTH_HA_PORT.varname) + "/health-ha/leader"; + assertEquals("false", sendGet(healthCheckURL)); // miniHS2_1 will be leader String zkConnectString = zkServer.getConnectString(); @@ -347,11 +425,14 @@ public void testManualFailover() throws Exception { miniHS2_2.start(confOverlay); String url1 = "http://localhost:" + hiveConf1.get(ConfVars.HIVE_SERVER2_WEBUI_PORT.varname) + "/leader"; String url2 = "http://localhost:" + hiveConf2.get(ConfVars.HIVE_SERVER2_WEBUI_PORT.varname) + "/leader"; + String healthCheckURL1 = "http://localhost:" + hiveConf1.get(ConfVars.HIVE_SERVER2_HEALTH_HA_PORT.varname) + "/health-ha/leader"; + String healthCheckURL2 = "http://localhost:" + hiveConf2.get(ConfVars.HIVE_SERVER2_HEALTH_HA_PORT.varname) + "/health-ha/leader"; // when we start miniHS2_1 will be leader (sequential start) assertEquals(true, miniHS2_1.getIsLeaderTestFuture().get()); assertEquals(true, miniHS2_1.isLeader()); assertEquals("true", sendGet(url1, true, true)); + assertEquals("true", sendGet(healthCheckURL1, true, true)); // trigger failover on miniHS2_1 String resp = sendDelete(url1, true, true); @@ -361,11 +442,13 @@ public void testManualFailover() throws Exception { assertEquals(true, miniHS2_1.getNotLeaderTestFuture().get()); assertEquals(false, miniHS2_1.isLeader()); assertEquals("false", sendGet(url1, true, true)); + assertEquals("false", sendGet(healthCheckURL1, true, true)); // make sure miniHS2_2 is the new leader assertEquals(true, miniHS2_2.getIsLeaderTestFuture().get()); assertEquals(true, miniHS2_2.isLeader()); assertEquals("true", sendGet(url2, true, true)); + assertEquals("true", sendGet(healthCheckURL2, true, true)); // send failover request again to miniHS2_1 and get a failure resp = sendDelete(url1, true, true); @@ -379,8 +462,10 @@ public void testManualFailover() throws Exception { assertEquals(true, miniHS2_1.getIsLeaderTestFuture().get()); assertEquals(true, miniHS2_1.isLeader()); assertEquals("true", sendGet(url1, true, true)); + assertEquals("true", sendGet(healthCheckURL1, true, true)); assertEquals(true, miniHS2_2.getNotLeaderTestFuture().get()); assertEquals("false", sendGet(url2, true, true)); + assertEquals("false", sendGet(healthCheckURL2, true, true)); assertEquals(false, miniHS2_2.isLeader()); } finally { resetFailoverConfs(); @@ -404,10 +489,12 @@ public void testManualFailoverUnauthorized() throws Exception { miniHS2_2.start(confOverlay); String url1 = "http://localhost:" + hiveConf1.get(ConfVars.HIVE_SERVER2_WEBUI_PORT.varname) + "/leader"; + String healthCheckURL1 = "http://localhost:" + hiveConf1.get(ConfVars.HIVE_SERVER2_HEALTH_HA_PORT.varname) + "/health-ha/leader"; // when we start miniHS2_1 will be leader (sequential start) assertEquals(true, miniHS2_1.getIsLeaderTestFuture().get()); assertEquals(true, miniHS2_1.isLeader()); assertEquals("true", sendGet(url1, true)); + assertEquals("true", sendGet(healthCheckURL1, true)); // trigger failover on miniHS2_1 without authorization header assertTrue(sendDelete(url1, false).contains("Unauthorized")); @@ -439,6 +526,7 @@ public void testNoConnectionOnPassive() throws Exception { miniHS2_2.setPamAuthenticator(pamAuthenticator2); miniHS2_2.start(confOverlay); String url1 = "http://localhost:" + hiveConf1.get(ConfVars.HIVE_SERVER2_WEBUI_PORT.varname) + "/leader"; + String healthCheckURL1 = "http://localhost:" + hiveConf1.get(ConfVars.HIVE_SERVER2_HEALTH_HA_PORT.varname) + "/health-ha/leader"; assertEquals(true, miniHS2_1.getIsLeaderTestFuture().get()); assertEquals(true, miniHS2_1.isLeader()); @@ -454,6 +542,9 @@ public void testNoConnectionOnPassive() throws Exception { Thread.sleep(100); } + resp = sendDelete(healthCheckURL1, true); + assertTrue(resp, resp.contains("Method Not Allowed")); + assertEquals(true, miniHS2_2.getIsLeaderTestFuture().get()); assertEquals(true, miniHS2_2.isLeader()); @@ -496,6 +587,8 @@ public void testClientConnectionsOnFailover() throws Exception { miniHS2_2.start(confOverlay); String url1 = "http://localhost:" + hiveConf1.get(ConfVars.HIVE_SERVER2_WEBUI_PORT.varname) + "/leader"; String url2 = "http://localhost:" + hiveConf2.get(ConfVars.HIVE_SERVER2_WEBUI_PORT.varname) + "/leader"; + String healthCheckUrl1 = "http://localhost:" + hiveConf1.get(ConfVars.HIVE_SERVER2_HEALTH_HA_PORT.varname) + "/health-ha/leader"; + String healthCheckUrl2 = "http://localhost:" + hiveConf2.get(ConfVars.HIVE_SERVER2_HEALTH_HA_PORT.varname) + "/health-ha/leader"; String zkJdbcUrl = miniHS2_1.getJdbcURL(); String zkConnectString = zkServer.getConnectString(); assertTrue(zkJdbcUrl.contains(zkConnectString)); @@ -504,6 +597,7 @@ public void testClientConnectionsOnFailover() throws Exception { assertEquals(true, miniHS2_1.getIsLeaderTestFuture().get()); assertEquals(true, miniHS2_1.isLeader()); assertEquals("true", sendGet(url1, true)); + assertEquals("true", sendGet(healthCheckUrl1, true)); // before failover, check if we are getting connection from miniHS2_1 String hs2_1_directUrl = "jdbc:hive2://" + miniHS2_1.getHost() + ":" + miniHS2_1.getBinaryPort() + @@ -523,15 +617,20 @@ public void testClientConnectionsOnFailover() throws Exception { Thread.sleep(100); } + resp = sendDelete(healthCheckUrl1, true); + assertTrue(resp.contains("Method Not Allowed")); + // make sure miniHS2_1 is not leader assertEquals(true, miniHS2_1.getNotLeaderTestFuture().get()); assertEquals(false, miniHS2_1.isLeader()); assertEquals("false", sendGet(url1, true)); + assertEquals("false", sendGet(healthCheckUrl1, true)); // make sure miniHS2_2 is the new leader assertEquals(true, miniHS2_2.getIsLeaderTestFuture().get()); assertEquals(true, miniHS2_2.isLeader()); assertEquals("true", sendGet(url2, true)); + assertEquals("true", sendGet(healthCheckUrl2, true)); // when we make a new connection we should get it from miniHS2_2 this time String hs2_2_directUrl = "jdbc:hive2://" + miniHS2_2.getHost() + ":" + miniHS2_2.getHttpPort() + @@ -555,8 +654,10 @@ public void testClientConnectionsOnFailover() throws Exception { assertEquals(true, miniHS2_1.getIsLeaderTestFuture().get()); assertEquals(true, miniHS2_1.isLeader()); assertEquals("true", sendGet(url1, true)); + assertEquals("true", sendGet(healthCheckUrl1, true)); assertEquals(true, miniHS2_2.getNotLeaderTestFuture().get()); assertEquals("false", sendGet(url2, true)); + assertEquals("false", sendGet(healthCheckUrl2, true)); assertEquals(false, miniHS2_2.isLeader()); // make sure miniHS2_2 closes all its connections while (miniHS2_2.getOpenSessionsCount() != 0) { diff --git a/itests/util/src/main/java/org/apache/hive/jdbc/miniHS2/AbstractHiveService.java b/itests/util/src/main/java/org/apache/hive/jdbc/miniHS2/AbstractHiveService.java index 069d58c6a0b0..9b506d48572a 100644 --- a/itests/util/src/main/java/org/apache/hive/jdbc/miniHS2/AbstractHiveService.java +++ b/itests/util/src/main/java/org/apache/hive/jdbc/miniHS2/AbstractHiveService.java @@ -36,15 +36,17 @@ public abstract class AbstractHiveService { private int binaryPort; private int httpPort; private int webPort; + private int healthHAPort; private boolean startedHiveService = false; private List addedProperties = new ArrayList(); - public AbstractHiveService(HiveConf hiveConf, String hostname, int binaryPort, int httpPort, int webPort) { + AbstractHiveService(HiveConf hiveConf, String hostname, int binaryPort, int httpPort, int webPort, int healthHAPort) { this.hiveConf = hiveConf; this.hostname = hostname; this.binaryPort = binaryPort; this.httpPort = httpPort; this.webPort = webPort; + this.healthHAPort = healthHAPort; } /** @@ -142,6 +144,10 @@ public int getWebPort() { return webPort; } + public int getHealthHAPort() { + return healthHAPort; + } + public boolean isStarted() { return startedHiveService; } diff --git a/itests/util/src/main/java/org/apache/hive/jdbc/miniHS2/MiniHS2.java b/itests/util/src/main/java/org/apache/hive/jdbc/miniHS2/MiniHS2.java index 27fdad30a7f9..7b1d3c1472f0 100644 --- a/itests/util/src/main/java/org/apache/hive/jdbc/miniHS2/MiniHS2.java +++ b/itests/util/src/main/java/org/apache/hive/jdbc/miniHS2/MiniHS2.java @@ -292,6 +292,8 @@ private MiniHS2(HiveConf hiveConf, MiniClusterType miniClusterType, boolean useM (usePortsFromConf ? hiveConf.getIntVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_HTTP_PORT) : MetaStoreTestUtils .findFreePort()), (usePortsFromConf ? hiveConf.getIntVar(ConfVars.HIVE_SERVER2_WEBUI_PORT) : MetaStoreTestUtils + .findFreePort()), + (usePortsFromConf ? hiveConf.getIntVar(ConfVars.HIVE_SERVER2_HEALTH_HA_PORT) : MetaStoreTestUtils .findFreePort())); hiveConf.setLongVar(ConfVars.HIVE_SERVER2_MAX_START_ATTEMPTS, 3l); hiveConf.setTimeVar(ConfVars.HIVE_SERVER2_SLEEP_INTERVAL_BETWEEN_START_ATTEMPTS, 10, @@ -379,6 +381,7 @@ private MiniHS2(HiveConf hiveConf, MiniClusterType miniClusterType, boolean useM hiveConf.setIntVar(ConfVars.HIVE_SERVER2_THRIFT_PORT, getBinaryPort()); hiveConf.setIntVar(ConfVars.HIVE_SERVER2_THRIFT_HTTP_PORT, getHttpPort()); hiveConf.setIntVar(ConfVars.HIVE_SERVER2_WEBUI_PORT, getWebPort()); + hiveConf.setIntVar(ConfVars.HIVE_SERVER2_HEALTH_HA_PORT, getHealthHAPort()); Path scratchDir = new Path(baseFsDir, "scratch"); // Create root scratchdir with write all, so that user impersonation has no issues. diff --git a/service/src/java/org/apache/hive/service/server/HiveServer2.java b/service/src/java/org/apache/hive/service/server/HiveServer2.java index 34d43ae2c826..513e835f03ba 100644 --- a/service/src/java/org/apache/hive/service/server/HiveServer2.java +++ b/service/src/java/org/apache/hive/service/server/HiveServer2.java @@ -119,6 +119,7 @@ import org.apache.hive.service.cli.thrift.ThriftBinaryCLIService; import org.apache.hive.service.cli.thrift.ThriftCLIService; import org.apache.hive.service.cli.thrift.ThriftHttpCLIService; +import org.apache.hive.service.servlet.HS2HealthHAStatus; import org.apache.hive.service.servlet.HS2LeadershipStatus; import org.apache.hive.service.servlet.HS2Peers; import org.apache.hive.service.servlet.LDAPAuthenticationFilter; @@ -402,90 +403,9 @@ public synchronized void init(HiveConf hiveConf) { // Set the default JspFactory to avoid NPE while opening the home page JspFactory.setDefaultFactory(new org.apache.jasper.runtime.JspFactoryImpl()); } - HttpServer.Builder builder = new HttpServer.Builder("hiveserver2"); - builder.setPort(webUIPort).setConf(hiveConf); - builder.setHost(webHost); - builder.setMaxThreads( - hiveConf.getIntVar(ConfVars.HIVE_SERVER2_WEBUI_MAX_THREADS)); - builder.setAdmins(hiveConf.getVar(ConfVars.USERS_IN_ADMIN_ROLE)); - // SessionManager is initialized - builder.setContextAttribute("hive.sm", - cliService.getSessionManager()); - hiveConf.set("startcode", - String.valueOf(System.currentTimeMillis())); - if (hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_WEBUI_USE_SSL)) { - String keyStorePath = hiveConf.getVar( - ConfVars.HIVE_SERVER2_WEBUI_SSL_KEYSTORE_PATH); - if (StringUtils.isBlank(keyStorePath)) { - throw new IllegalArgumentException( - ConfVars.HIVE_SERVER2_WEBUI_SSL_KEYSTORE_PATH.varname - + " Not configured for SSL connection"); - } - builder.setKeyStorePassword(ShimLoader.getHadoopShims().getPassword( - hiveConf, ConfVars.HIVE_SERVER2_WEBUI_SSL_KEYSTORE_PASSWORD.varname)); - builder.setKeyStorePath(keyStorePath); - builder.setKeyStoreType(hiveConf.getVar(ConfVars.HIVE_SERVER2_WEBUI_SSL_KEYSTORE_TYPE)); - builder.setKeyManagerFactoryAlgorithm( - hiveConf.getVar(ConfVars.HIVE_SERVER2_WEBUI_SSL_KEYMANAGERFACTORY_ALGORITHM)); - builder.setExcludeCiphersuites( - hiveConf.getVar(ConfVars.HIVE_SERVER2_WEBUI_SSL_EXCLUDE_CIPHERSUITES)); - builder.setUseSSL(true); - } - if (hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_WEBUI_USE_SPNEGO)) { - String spnegoPrincipal = hiveConf.getVar( - ConfVars.HIVE_SERVER2_WEBUI_SPNEGO_PRINCIPAL); - String spnegoKeytab = hiveConf.getVar( - ConfVars.HIVE_SERVER2_WEBUI_SPNEGO_KEYTAB); - if (StringUtils.isBlank(spnegoPrincipal) || StringUtils.isBlank(spnegoKeytab)) { - throw new IllegalArgumentException( - ConfVars.HIVE_SERVER2_WEBUI_SPNEGO_PRINCIPAL.varname - + "/" + ConfVars.HIVE_SERVER2_WEBUI_SPNEGO_KEYTAB.varname - + " Not configured for SPNEGO authentication"); - } - builder.setSPNEGOPrincipal(spnegoPrincipal); - builder.setSPNEGOKeytab(spnegoKeytab); - builder.setUseSPNEGO(true); - } - if (hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_WEBUI_ENABLE_CORS)) { - builder.setEnableCORS(true); - String allowedOrigins = hiveConf.getVar(ConfVars.HIVE_SERVER2_WEBUI_CORS_ALLOWED_ORIGINS); - String allowedMethods = hiveConf.getVar(ConfVars.HIVE_SERVER2_WEBUI_CORS_ALLOWED_METHODS); - String allowedHeaders = hiveConf.getVar(ConfVars.HIVE_SERVER2_WEBUI_CORS_ALLOWED_HEADERS); - if (StringUtils.isBlank(allowedOrigins) || StringUtils.isBlank(allowedMethods) || StringUtils.isBlank(allowedHeaders)) { - throw new IllegalArgumentException("CORS enabled. But " + - ConfVars.HIVE_SERVER2_WEBUI_CORS_ALLOWED_ORIGINS.varname + "/" + - ConfVars.HIVE_SERVER2_WEBUI_CORS_ALLOWED_METHODS.varname + "/" + - ConfVars.HIVE_SERVER2_WEBUI_CORS_ALLOWED_HEADERS.varname + "/" + - " is not configured"); - } - builder.setAllowedOrigins(allowedOrigins); - builder.setAllowedMethods(allowedMethods); - builder.setAllowedHeaders(allowedHeaders); - LOG.info("CORS enabled - allowed-origins: {} allowed-methods: {} allowed-headers: {}", allowedOrigins, - allowedMethods, allowedHeaders); - } - if(hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_WEBUI_XFRAME_ENABLED)){ - builder.configureXFrame(true).setXFrameOption(hiveConf.getVar(ConfVars.HIVE_SERVER2_WEBUI_XFRAME_VALUE)); - } - if (hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_WEBUI_USE_PAM)) { - if (hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_WEBUI_USE_SSL)) { - String hiveServer2PamServices = hiveConf.getVar(ConfVars.HIVE_SERVER2_PAM_SERVICES); - if (hiveServer2PamServices == null || hiveServer2PamServices.isEmpty()) { - throw new IllegalArgumentException(ConfVars.HIVE_SERVER2_PAM_SERVICES.varname + " are not configured."); - } - builder.setPAMAuthenticator(pamAuthenticator == null ? new PamAuthenticator(hiveConf) : pamAuthenticator); - builder.setUsePAM(true); - } else if (hiveConf.getBoolVar(ConfVars.HIVE_IN_TEST)) { - builder.setPAMAuthenticator(pamAuthenticator == null ? new PamAuthenticator(hiveConf) : pamAuthenticator); - builder.setUsePAM(true); - } else { - throw new IllegalArgumentException(ConfVars.HIVE_SERVER2_WEBUI_USE_SSL.varname + " has false value. It is recommended to set to true when PAM is used."); - } - } + HttpServer.Builder builder = initBuilder(webHost, webUIPort, "hiveserver2", "/", hiveConf); if (serviceDiscovery && activePassiveHA) { - builder.setContextAttribute("hs2.isLeader", isLeader); - builder.setContextAttribute("hs2.failover.callback", new FailoverHandlerCallback(hs2HARegistry)); - builder.setContextAttribute("hiveconf", hiveConf); + addHAContextAttributes(builder, hiveConf); builder.addServlet("leader", HS2LeadershipStatus.class); builder.addServlet("peers", HS2Peers.class); } @@ -505,10 +425,11 @@ public synchronized void init(HiveConf hiveConf) { if (ldapAuthService != null) { webServer.addServlet("login", "/login", new ServletHolder(new LoginServlet(ldapAuthService))); } + initHealthHA(webServer, hiveConf); } } - } catch (IOException ie) { - throw new ServiceException(ie); + } catch (IOException e) { + throw new ServiceException(e); } long otelExporterFrequency = @@ -534,6 +455,101 @@ public synchronized void init(HiveConf hiveConf) { // Extra time for releasing the resources if timeout sets to 0 ShutdownHookManager.addGracefulShutDownHook(() -> graceful_stop(), timeout == 0 ? 30 : timeout); } + + private void addHAContextAttributes(HttpServer.Builder builder, HiveConf hiveConf) { + builder.setContextAttribute("hs2.isLeader", isLeader); + builder.setContextAttribute("hs2.failover.callback", new FailoverHandlerCallback(hs2HARegistry)); + builder.setContextAttribute("hiveconf", hiveConf); + } + + private HttpServer.Builder initBuilder(String webHost, int port, String name, String contextPath, HiveConf hiveConf) throws IOException { + HttpServer.Builder builder = new HttpServer.Builder(name); + builder.setPort(port).setConf(hiveConf); + builder.setHost(webHost); + builder.setContextPath(contextPath); + builder.setMaxThreads(hiveConf.getIntVar(ConfVars.HIVE_SERVER2_WEBUI_MAX_THREADS)); + builder.setAdmins(hiveConf.getVar(ConfVars.USERS_IN_ADMIN_ROLE)); + // SessionManager is initialized + builder.setContextAttribute("hive.sm", cliService.getSessionManager()); + hiveConf.set("startcode", String.valueOf(System.currentTimeMillis())); + if (hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_WEBUI_USE_SSL)) { + String keyStorePath = hiveConf.getVar(ConfVars.HIVE_SERVER2_WEBUI_SSL_KEYSTORE_PATH); + if (StringUtils.isBlank(keyStorePath)) { + throw new IllegalArgumentException(ConfVars.HIVE_SERVER2_WEBUI_SSL_KEYSTORE_PATH.varname + + " Not configured for SSL connection"); + } + builder.setKeyStorePassword(ShimLoader.getHadoopShims().getPassword( + hiveConf, ConfVars.HIVE_SERVER2_WEBUI_SSL_KEYSTORE_PASSWORD.varname)); + builder.setKeyStorePath(keyStorePath); + builder.setKeyStoreType(hiveConf.getVar(ConfVars.HIVE_SERVER2_WEBUI_SSL_KEYSTORE_TYPE)); + builder.setKeyManagerFactoryAlgorithm( + hiveConf.getVar(ConfVars.HIVE_SERVER2_WEBUI_SSL_KEYMANAGERFACTORY_ALGORITHM)); + builder.setExcludeCiphersuites(hiveConf.getVar(ConfVars.HIVE_SERVER2_WEBUI_SSL_EXCLUDE_CIPHERSUITES)); + builder.setUseSSL(true); + } + if (hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_WEBUI_USE_SPNEGO)) { + String spnegoPrincipal = hiveConf.getVar(ConfVars.HIVE_SERVER2_WEBUI_SPNEGO_PRINCIPAL); + String spnegoKeytab = hiveConf.getVar(ConfVars.HIVE_SERVER2_WEBUI_SPNEGO_KEYTAB); + if (StringUtils.isBlank(spnegoPrincipal) || StringUtils.isBlank(spnegoKeytab)) { + throw new IllegalArgumentException( + ConfVars.HIVE_SERVER2_WEBUI_SPNEGO_PRINCIPAL.varname + + "/" + ConfVars.HIVE_SERVER2_WEBUI_SPNEGO_KEYTAB.varname + + " Not configured for SPNEGO authentication"); + } + builder.setSPNEGOPrincipal(spnegoPrincipal); + builder.setSPNEGOKeytab(spnegoKeytab); + builder.setUseSPNEGO(true); + } + if (hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_WEBUI_ENABLE_CORS)) { + builder.setEnableCORS(true); + String allowedOrigins = hiveConf.getVar(ConfVars.HIVE_SERVER2_WEBUI_CORS_ALLOWED_ORIGINS); + String allowedMethods = hiveConf.getVar(ConfVars.HIVE_SERVER2_WEBUI_CORS_ALLOWED_METHODS); + String allowedHeaders = hiveConf.getVar(ConfVars.HIVE_SERVER2_WEBUI_CORS_ALLOWED_HEADERS); + if (StringUtils.isBlank(allowedOrigins) || StringUtils.isBlank(allowedMethods) || StringUtils.isBlank(allowedHeaders)) { + throw new IllegalArgumentException("CORS enabled. But " + + ConfVars.HIVE_SERVER2_WEBUI_CORS_ALLOWED_ORIGINS.varname + "/" + + ConfVars.HIVE_SERVER2_WEBUI_CORS_ALLOWED_METHODS.varname + "/" + + ConfVars.HIVE_SERVER2_WEBUI_CORS_ALLOWED_HEADERS.varname + "/" + + " is not configured"); + } + builder.setAllowedOrigins(allowedOrigins); + builder.setAllowedMethods(allowedMethods); + builder.setAllowedHeaders(allowedHeaders); + LOG.info("CORS enabled - allowed-origins: {} allowed-methods: {} allowed-headers: {}", allowedOrigins, + allowedMethods, allowedHeaders); + } + if(hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_WEBUI_XFRAME_ENABLED)){ + builder.configureXFrame(true).setXFrameOption(hiveConf.getVar(ConfVars.HIVE_SERVER2_WEBUI_XFRAME_VALUE)); + } + if (hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_WEBUI_USE_PAM)) { + if (hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_WEBUI_USE_SSL)) { + String hiveServer2PamServices = hiveConf.getVar(ConfVars.HIVE_SERVER2_PAM_SERVICES); + if (hiveServer2PamServices == null || hiveServer2PamServices.isEmpty()) { + throw new IllegalArgumentException(ConfVars.HIVE_SERVER2_PAM_SERVICES.varname + " are not configured."); + } + builder.setPAMAuthenticator(pamAuthenticator == null ? new PamAuthenticator(hiveConf) : pamAuthenticator); + builder.setUsePAM(true); + } else if (hiveConf.getBoolVar(ConfVars.HIVE_IN_TEST)) { + builder.setPAMAuthenticator(pamAuthenticator == null ? new PamAuthenticator(hiveConf) : pamAuthenticator); + builder.setUsePAM(true); + } else { + throw new IllegalArgumentException(ConfVars.HIVE_SERVER2_WEBUI_USE_SSL.varname + " has false value. It is recommended to set to true when PAM is used."); + } + } + + return builder; + } + + private void initHealthHA(HttpServer webServer, HiveConf hiveConf) throws IOException { + if (serviceDiscovery && activePassiveHA) { + String webHost = hiveConf.getVar(ConfVars.HIVE_SERVER2_WEBUI_BIND_HOST); + int healthCheckPort = hiveConf.getIntVar(ConfVars.HIVE_SERVER2_HEALTH_HA_PORT); + HttpServer.Builder builder = initBuilder(webHost, healthCheckPort, "health-ha", "/health-ha", hiveConf); + addHAContextAttributes(builder, hiveConf); + builder.addServlet("leader", HS2HealthHAStatus.class); + webServer.addWebApp(builder); + } + } private void logCompactionParameters(HiveConf hiveConf) { LOG.info("Compaction HS2 parameters:"); diff --git a/service/src/java/org/apache/hive/service/servlet/HS2HealthHAStatus.java b/service/src/java/org/apache/hive/service/servlet/HS2HealthHAStatus.java new file mode 100644 index 000000000000..a00c8bd7b701 --- /dev/null +++ b/service/src/java/org/apache/hive/service/servlet/HS2HealthHAStatus.java @@ -0,0 +1,31 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hive.service.servlet; + +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; +import java.io.IOException; + +public class HS2HealthHAStatus extends HS2LeadershipStatus { + + @Override + public void doDelete(final HttpServletRequest request, final HttpServletResponse response) throws IOException { + response.sendError(HttpServletResponse.SC_METHOD_NOT_ALLOWED, "DELETE method is not allowed"); + } +} diff --git a/service/src/resources/hive-webapps/health-ha/WEB-INF/web.xml b/service/src/resources/hive-webapps/health-ha/WEB-INF/web.xml new file mode 100644 index 000000000000..53204f478ac9 --- /dev/null +++ b/service/src/resources/hive-webapps/health-ha/WEB-INF/web.xml @@ -0,0 +1,10 @@ + + + + +