http://git-wip-us.apache.org/repos/asf/hbase/blob/d6982414/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java
deleted file mode 100644
index e9a56ff..0000000
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java
+++ /dev/null
@@ -1,621 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.http;
-
-import java.io.IOException;
-import java.io.PrintWriter;
-import java.net.HttpURLConnection;
-import java.net.URI;
-import java.net.URL;
-import java.util.Arrays;
-import java.util.Enumeration;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.SortedSet;
-import java.util.TreeSet;
-import java.util.concurrent.CountDownLatch;
-import java.util.concurrent.Executor;
-import java.util.concurrent.Executors;
-
-import javax.servlet.Filter;
-import javax.servlet.FilterChain;
-import javax.servlet.FilterConfig;
-import javax.servlet.ServletContext;
-import javax.servlet.ServletException;
-import javax.servlet.ServletRequest;
-import javax.servlet.ServletResponse;
-import javax.servlet.http.HttpServlet;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletRequestWrapper;
-import javax.servlet.http.HttpServletResponse;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.CommonConfigurationKeys;
-import 
org.apache.hadoop.hbase.http.HttpServer.QuotingInputFilter.RequestQuoter;
-import org.apache.hadoop.hbase.http.resource.JerseyResource;
-import org.apache.hadoop.hbase.testclassification.MiscTests;
-import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.apache.hadoop.net.NetUtils;
-import org.apache.hadoop.security.Groups;
-import org.apache.hadoop.security.ShellBasedUnixGroupsMapping;
-import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.security.authorize.AccessControlList;
-import org.eclipse.jetty.server.ServerConnector;
-import org.eclipse.jetty.util.ajax.JSON;
-import org.junit.AfterClass;
-import org.junit.Assert;
-import org.junit.BeforeClass;
-import org.junit.Ignore;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.mockito.Mockito;
-import org.mockito.internal.util.reflection.Whitebox;
-
-@Category({MiscTests.class, SmallTests.class})
-public class TestHttpServer extends HttpServerFunctionalTest {
-  private static final Log LOG = LogFactory.getLog(TestHttpServer.class);
-  private static HttpServer server;
-  private static URL baseUrl;
-  // jetty 9.4.x needs this many threads to start, even in the small.
-  static final int MAX_THREADS = 16;
-  
-  @SuppressWarnings("serial")
-  public static class EchoMapServlet extends HttpServlet {
-    @Override
-    public void doGet(HttpServletRequest request, 
-                      HttpServletResponse response
-                      ) throws ServletException, IOException {
-      PrintWriter out = response.getWriter();
-      Map<String, String[]> params = request.getParameterMap();
-      SortedSet<String> keys = new TreeSet<>(params.keySet());
-      for(String key: keys) {
-        out.print(key);
-        out.print(':');
-        String[] values = params.get(key);
-        if (values.length > 0) {
-          out.print(values[0]);
-          for(int i=1; i < values.length; ++i) {
-            out.print(',');
-            out.print(values[i]);
-          }
-        }
-        out.print('\n');
-      }
-      out.close();
-    }    
-  }
-
-  @SuppressWarnings("serial")
-  public static class EchoServlet extends HttpServlet {
-    @Override
-    public void doGet(HttpServletRequest request, 
-                      HttpServletResponse response
-                      ) throws ServletException, IOException {
-      PrintWriter out = response.getWriter();
-      SortedSet<String> sortedKeys = new TreeSet<>();
-      Enumeration<String> keys = request.getParameterNames();
-      while(keys.hasMoreElements()) {
-        sortedKeys.add(keys.nextElement());
-      }
-      for(String key: sortedKeys) {
-        out.print(key);
-        out.print(':');
-        out.print(request.getParameter(key));
-        out.print('\n');
-      }
-      out.close();
-    }    
-  }
-
-  @SuppressWarnings("serial")
-  public static class LongHeaderServlet extends HttpServlet {
-    @Override
-    public void doGet(HttpServletRequest request,
-                      HttpServletResponse response
-    ) throws ServletException, IOException {
-      Assert.assertEquals(63 * 1024, request.getHeader("longheader").length());
-      response.setStatus(HttpServletResponse.SC_OK);
-    }
-  }
-
-  @SuppressWarnings("serial")
-  public static class HtmlContentServlet extends HttpServlet {
-    @Override
-    public void doGet(HttpServletRequest request, 
-                      HttpServletResponse response
-                      ) throws ServletException, IOException {
-      response.setContentType("text/html");
-      PrintWriter out = response.getWriter();
-      out.print("hello world");
-      out.close();
-    }
-  }
-
-  @BeforeClass public static void setup() throws Exception {
-    Configuration conf = new Configuration();
-    conf.setInt(HttpServer.HTTP_MAX_THREADS, MAX_THREADS);
-    server = createTestServer(conf);
-    server.addServlet("echo", "/echo", EchoServlet.class);
-    server.addServlet("echomap", "/echomap", EchoMapServlet.class);
-    server.addServlet("htmlcontent", "/htmlcontent", HtmlContentServlet.class);
-    server.addServlet("longheader", "/longheader", LongHeaderServlet.class);
-    server.addJerseyResourcePackage(
-        JerseyResource.class.getPackage().getName(), "/jersey/*");
-    server.start();
-    baseUrl = getServerURL(server);
-    LOG.info("HTTP server started: "+ baseUrl);
-  }
-  
-  @AfterClass public static void cleanup() throws Exception {
-    server.stop();
-  }
-  
-  /** Test the maximum number of threads cannot be exceeded. */
-  @Test public void testMaxThreads() throws Exception {
-    int clientThreads = MAX_THREADS * 10;
-    Executor executor = Executors.newFixedThreadPool(clientThreads);
-    // Run many clients to make server reach its maximum number of threads
-    final CountDownLatch ready = new CountDownLatch(clientThreads);
-    final CountDownLatch start = new CountDownLatch(1);
-    for (int i = 0; i < clientThreads; i++) {
-      executor.execute(new Runnable() {
-        @Override
-        public void run() {
-          ready.countDown();
-          try {
-            start.await();
-            assertEquals("a:b\nc:d\n",
-                         readOutput(new URL(baseUrl, "/echo?a=b&c=d")));
-            int serverThreads = server.webServer.getThreadPool().getThreads();
-            assertTrue("More threads are started than expected, Server Threads 
count: "
-                    + serverThreads, serverThreads <= MAX_THREADS);
-            System.out.println("Number of threads = " + serverThreads +
-                " which is less or equal than the max = " + MAX_THREADS);
-          } catch (Exception e) {
-            // do nothing
-          }
-        }
-      });
-    }
-    // Start the client threads when they are all ready
-    ready.await();
-    start.countDown();
-  }
-  
-  @Test public void testEcho() throws Exception {
-    assertEquals("a:b\nc:d\n", 
-                 readOutput(new URL(baseUrl, "/echo?a=b&c=d")));
-    assertEquals("a:b\nc&lt;:d\ne:&gt;\n", 
-                 readOutput(new URL(baseUrl, "/echo?a=b&c<=d&e=>")));    
-  }
-  
-  /** Test the echo map servlet that uses getParameterMap. */
-  @Test public void testEchoMap() throws Exception {
-    assertEquals("a:b\nc:d\n", 
-                 readOutput(new URL(baseUrl, "/echomap?a=b&c=d")));
-    assertEquals("a:b,&gt;\nc&lt;:d\n", 
-                 readOutput(new URL(baseUrl, "/echomap?a=b&c<=d&a=>")));
-  }
-
-  /** 
-   *  Test that verifies headers can be up to 64K long. 
-   *  The test adds a 63K header leaving 1K for other headers.
-   *  This is because the header buffer setting is for ALL headers,
-   *  names and values included. */
-  @Test public void testLongHeader() throws Exception {
-    URL url = new URL(baseUrl, "/longheader");
-    HttpURLConnection conn = (HttpURLConnection) url.openConnection();
-    StringBuilder sb = new StringBuilder();
-    for (int i = 0 ; i < 63 * 1024; i++) {
-      sb.append("a");
-    }
-    conn.setRequestProperty("longheader", sb.toString());
-    assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode());
-  }
-
-  @Test
-  public void testContentTypes() throws Exception {
-    // Static CSS files should have text/css
-    URL cssUrl = new URL(baseUrl, "/static/test.css");
-    HttpURLConnection conn = (HttpURLConnection)cssUrl.openConnection();
-    conn.connect();
-    assertEquals(200, conn.getResponseCode());
-    assertEquals("text/css", conn.getContentType());
-
-    // Servlets should have text/plain with proper encoding by default
-    URL servletUrl = new URL(baseUrl, "/echo?a=b");
-    conn = (HttpURLConnection)servletUrl.openConnection();
-    conn.connect();
-    assertEquals(200, conn.getResponseCode());
-    assertEquals("text/plain;charset=utf-8", conn.getContentType());
-
-    // We should ignore parameters for mime types - ie a parameter
-    // ending in .css should not change mime type
-    servletUrl = new URL(baseUrl, "/echo?a=b.css");
-    conn = (HttpURLConnection)servletUrl.openConnection();
-    conn.connect();
-    assertEquals(200, conn.getResponseCode());
-    assertEquals("text/plain;charset=utf-8", conn.getContentType());
-
-    // Servlets that specify text/html should get that content type
-    servletUrl = new URL(baseUrl, "/htmlcontent");
-    conn = (HttpURLConnection)servletUrl.openConnection();
-    conn.connect();
-    assertEquals(200, conn.getResponseCode());
-    assertEquals("text/html;charset=utf-8", conn.getContentType());
-
-    // JSPs should default to text/html with utf8
-    // JSPs do not work from unit tests
-    // servletUrl = new URL(baseUrl, "/testjsp.jsp");
-    // conn = (HttpURLConnection)servletUrl.openConnection();
-    // conn.connect();
-    // assertEquals(200, conn.getResponseCode());
-    // assertEquals("text/html; charset=utf-8", conn.getContentType());
-  }
-
-  /**
-   * Dummy filter that mimics as an authentication filter. Obtains user 
identity
-   * from the request parameter user.name. Wraps around the request so that
-   * request.getRemoteUser() returns the user identity.
-   * 
-   */
-  public static class DummyServletFilter implements Filter {
-    @Override
-    public void destroy() { }
-
-    @Override
-    public void doFilter(ServletRequest request, ServletResponse response,
-        FilterChain filterChain) throws IOException, ServletException {
-      final String userName = request.getParameter("user.name");
-      ServletRequest requestModified =
-        new HttpServletRequestWrapper((HttpServletRequest) request) {
-        @Override
-        public String getRemoteUser() {
-          return userName;
-        }
-      };
-      filterChain.doFilter(requestModified, response);
-    }
-
-    @Override
-    public void init(FilterConfig arg0) throws ServletException { }
-  }
-
-  /**
-   * FilterInitializer that initialized the DummyFilter.
-   *
-   */
-  public static class DummyFilterInitializer extends FilterInitializer {
-    public DummyFilterInitializer() {
-    }
-
-    @Override
-    public void initFilter(FilterContainer container, Configuration conf) {
-      container.addFilter("DummyFilter", DummyServletFilter.class.getName(), 
null);
-    }
-  }
-
-  /**
-   * Access a URL and get the corresponding return Http status code. The URL
-   * will be accessed as the passed user, by sending user.name request
-   * parameter.
-   * 
-   * @param urlstring
-   * @param userName
-   * @return
-   * @throws IOException
-   */
-  static int getHttpStatusCode(String urlstring, String userName)
-      throws IOException {
-    URL url = new URL(urlstring + "?user.name=" + userName);
-    System.out.println("Accessing " + url + " as user " + userName);
-    HttpURLConnection connection = (HttpURLConnection)url.openConnection();
-    connection.connect();
-    return connection.getResponseCode();
-  }
-
-  /**
-   * Custom user->group mapping service.
-   */
-  public static class MyGroupsProvider extends ShellBasedUnixGroupsMapping {
-    static Map<String, List<String>> mapping = new HashMap<>();
-
-    static void clearMapping() {
-      mapping.clear();
-    }
-
-    @Override
-    public List<String> getGroups(String user) throws IOException {
-      return mapping.get(user);
-    }
-  }
-
-  /**
-   * Verify the access for /logs, /stacks, /conf, /logLevel and /metrics
-   * servlets, when authentication filters are set, but authorization is not
-   * enabled.
-   * @throws Exception 
-   */
-  @Test
-  @Ignore
-  public void testDisabledAuthorizationOfDefaultServlets() throws Exception {
-
-    Configuration conf = new Configuration();
-
-    // Authorization is disabled by default
-    conf.set(HttpServer.FILTER_INITIALIZERS_PROPERTY,
-        DummyFilterInitializer.class.getName());
-    conf.set(CommonConfigurationKeys.HADOOP_SECURITY_GROUP_MAPPING,
-        MyGroupsProvider.class.getName());
-    Groups.getUserToGroupsMappingService(conf);
-    MyGroupsProvider.clearMapping();
-    MyGroupsProvider.mapping.put("userA", Arrays.asList("groupA"));
-    MyGroupsProvider.mapping.put("userB", Arrays.asList("groupB"));
-
-    HttpServer myServer = new HttpServer.Builder().setName("test")
-        .addEndpoint(new URI("http://localhost:0";)).setFindPort(true).build();
-    myServer.setAttribute(HttpServer.CONF_CONTEXT_ATTRIBUTE, conf);
-    myServer.start();
-    String serverURL = "http://"; + 
NetUtils.getHostPortString(myServer.getConnectorAddress(0)) + "/";
-    for (String servlet : new String[] { "conf", "logs", "stacks",
-        "logLevel", "metrics" }) {
-      for (String user : new String[] { "userA", "userB" }) {
-        assertEquals(HttpURLConnection.HTTP_OK, getHttpStatusCode(serverURL
-            + servlet, user));
-      }
-    }
-    myServer.stop();
-  }
-
-  /**
-   * Verify the administrator access for /logs, /stacks, /conf, /logLevel and
-   * /metrics servlets.
-   * 
-   * @throws Exception
-   */
-  @Test
-  @Ignore
-  public void testAuthorizationOfDefaultServlets() throws Exception {
-    Configuration conf = new Configuration();
-    conf.setBoolean(CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION,
-        true);
-    
conf.setBoolean(CommonConfigurationKeys.HADOOP_SECURITY_INSTRUMENTATION_REQUIRES_ADMIN,
-        true);
-    conf.set(HttpServer.FILTER_INITIALIZERS_PROPERTY,
-        DummyFilterInitializer.class.getName());
-
-    conf.set(CommonConfigurationKeys.HADOOP_SECURITY_GROUP_MAPPING,
-        MyGroupsProvider.class.getName());
-    Groups.getUserToGroupsMappingService(conf);
-    MyGroupsProvider.clearMapping();
-    MyGroupsProvider.mapping.put("userA", Arrays.asList("groupA"));
-    MyGroupsProvider.mapping.put("userB", Arrays.asList("groupB"));
-    MyGroupsProvider.mapping.put("userC", Arrays.asList("groupC"));
-    MyGroupsProvider.mapping.put("userD", Arrays.asList("groupD"));
-    MyGroupsProvider.mapping.put("userE", Arrays.asList("groupE"));
-
-    HttpServer myServer = new HttpServer.Builder().setName("test")
-        .addEndpoint(new 
URI("http://localhost:0";)).setFindPort(true).setConf(conf)
-        .setACL(new AccessControlList("userA,userB groupC,groupD")).build();
-    myServer.setAttribute(HttpServer.CONF_CONTEXT_ATTRIBUTE, conf);
-    myServer.start();
-
-    String serverURL = "http://";
-        + NetUtils.getHostPortString(myServer.getConnectorAddress(0)) + "/";
-    for (String servlet : new String[] { "conf", "logs", "stacks",
-        "logLevel", "metrics" }) {
-      for (String user : new String[] { "userA", "userB", "userC", "userD" }) {
-        assertEquals(HttpURLConnection.HTTP_OK, getHttpStatusCode(serverURL
-            + servlet, user));
-      }
-      assertEquals(HttpURLConnection.HTTP_UNAUTHORIZED, getHttpStatusCode(
-          serverURL + servlet, "userE"));
-    }
-    myServer.stop();
-  }
-  
-  @Test
-  public void testRequestQuoterWithNull() throws Exception {
-    HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
-    Mockito.doReturn(null).when(request).getParameterValues("dummy");
-    RequestQuoter requestQuoter = new RequestQuoter(request);
-    String[] parameterValues = requestQuoter.getParameterValues("dummy");
-    Assert.assertEquals("It should return null "
-        + "when there are no values for the parameter", null, parameterValues);
-  }
-
-  @Test
-  public void testRequestQuoterWithNotNull() throws Exception {
-    HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
-    String[] values = new String[] { "abc", "def" };
-    Mockito.doReturn(values).when(request).getParameterValues("dummy");
-    RequestQuoter requestQuoter = new RequestQuoter(request);
-    String[] parameterValues = requestQuoter.getParameterValues("dummy");
-    Assert.assertTrue("It should return Parameter Values", Arrays.equals(
-        values, parameterValues));
-  }
-
-  @SuppressWarnings("unchecked")
-  private static Map<String, Object> parse(String jsonString) {
-    return (Map<String, Object>)JSON.parse(jsonString);
-  }
-
-  @Test public void testJersey() throws Exception {
-    LOG.info("BEGIN testJersey()");
-    final String js = readOutput(new URL(baseUrl, "/jersey/foo?op=bar"));
-    final Map<String, Object> m = parse(js);
-    LOG.info("m=" + m);
-    assertEquals("foo", m.get(JerseyResource.PATH));
-    assertEquals("bar", m.get(JerseyResource.OP));
-    LOG.info("END testJersey()");
-  }
-
-  @Test
-  public void testHasAdministratorAccess() throws Exception {
-    Configuration conf = new Configuration();
-    conf.setBoolean(CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION, 
false);
-    ServletContext context = Mockito.mock(ServletContext.class);
-    
Mockito.when(context.getAttribute(HttpServer.CONF_CONTEXT_ATTRIBUTE)).thenReturn(conf);
-    Mockito.when(context.getAttribute(HttpServer.ADMINS_ACL)).thenReturn(null);
-    HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
-    Mockito.when(request.getRemoteUser()).thenReturn(null);
-    HttpServletResponse response = Mockito.mock(HttpServletResponse.class);
-
-    //authorization OFF
-    Assert.assertTrue(HttpServer.hasAdministratorAccess(context, request, 
response));
-
-    //authorization ON & user NULL
-    response = Mockito.mock(HttpServletResponse.class);
-    conf.setBoolean(CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION, 
true);
-    Assert.assertFalse(HttpServer.hasAdministratorAccess(context, request, 
response));
-    
Mockito.verify(response).sendError(Mockito.eq(HttpServletResponse.SC_UNAUTHORIZED),
 Mockito.anyString());
-
-    //authorization ON & user NOT NULL & ACLs NULL
-    response = Mockito.mock(HttpServletResponse.class);
-    Mockito.when(request.getRemoteUser()).thenReturn("foo");
-    Assert.assertTrue(HttpServer.hasAdministratorAccess(context, request, 
response));
-
-    //authorization ON & user NOT NULL & ACLs NOT NULL & user not in ACLs
-    response = Mockito.mock(HttpServletResponse.class);
-    AccessControlList acls = Mockito.mock(AccessControlList.class);
-    
Mockito.when(acls.isUserAllowed(Mockito.<UserGroupInformation>any())).thenReturn(false);
-    Mockito.when(context.getAttribute(HttpServer.ADMINS_ACL)).thenReturn(acls);
-    Assert.assertFalse(HttpServer.hasAdministratorAccess(context, request, 
response));
-    
Mockito.verify(response).sendError(Mockito.eq(HttpServletResponse.SC_UNAUTHORIZED),
 Mockito.anyString());
-
-    //authorization ON & user NOT NULL & ACLs NOT NULL & user in in ACLs
-    response = Mockito.mock(HttpServletResponse.class);
-    
Mockito.when(acls.isUserAllowed(Mockito.<UserGroupInformation>any())).thenReturn(true);
-    Mockito.when(context.getAttribute(HttpServer.ADMINS_ACL)).thenReturn(acls);
-    Assert.assertTrue(HttpServer.hasAdministratorAccess(context, request, 
response));
-
-  }
-
-  @Test
-  public void testRequiresAuthorizationAccess() throws Exception {
-    Configuration conf = new Configuration();
-    ServletContext context = Mockito.mock(ServletContext.class);
-    
Mockito.when(context.getAttribute(HttpServer.CONF_CONTEXT_ATTRIBUTE)).thenReturn(conf);
-    HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
-    HttpServletResponse response = Mockito.mock(HttpServletResponse.class);
-
-    //requires admin access to instrumentation, FALSE by default
-    Assert.assertTrue(HttpServer.isInstrumentationAccessAllowed(context, 
request, response));
-
-    //requires admin access to instrumentation, TRUE
-    
conf.setBoolean(CommonConfigurationKeys.HADOOP_SECURITY_INSTRUMENTATION_REQUIRES_ADMIN,
 true);
-    conf.setBoolean(CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION, 
true);
-    AccessControlList acls = Mockito.mock(AccessControlList.class);
-    
Mockito.when(acls.isUserAllowed(Mockito.<UserGroupInformation>any())).thenReturn(false);
-    Mockito.when(context.getAttribute(HttpServer.ADMINS_ACL)).thenReturn(acls);
-    Assert.assertFalse(HttpServer.isInstrumentationAccessAllowed(context, 
request, response));
-  }
-
-  @Test public void testBindAddress() throws Exception {
-    checkBindAddress("localhost", 0, false).stop();
-    // hang onto this one for a bit more testing
-    HttpServer myServer = checkBindAddress("localhost", 0, false);
-    HttpServer myServer2 = null;
-    try { 
-      int port = myServer.getConnectorAddress(0).getPort();
-      // it's already in use, true = expect a higher port
-      myServer2 = checkBindAddress("localhost", port, true);
-      // try to reuse the port
-      port = myServer2.getConnectorAddress(0).getPort();
-      myServer2.stop();
-      assertNull(myServer2.getConnectorAddress(0)); // not bound
-      myServer2.openListeners();
-      assertEquals(port, myServer2.getConnectorAddress(0).getPort()); // 
expect same port
-    } finally {
-      myServer.stop();
-      if (myServer2 != null) {
-        myServer2.stop();
-      }
-    }
-  }
-  
-  private HttpServer checkBindAddress(String host, int port, boolean findPort)
-      throws Exception {
-    HttpServer server = createServer(host, port);
-    try {
-      // not bound, ephemeral should return requested port (0 for ephemeral)
-      List<?> listeners = (List<?>) Whitebox.getInternalState(server,
-          "listeners");
-      ServerConnector listener = (ServerConnector) Whitebox.getInternalState(
-          listeners.get(0), "listener");
-
-      assertEquals(port, listener.getPort());
-      // verify hostname is what was given
-      server.openListeners();
-      assertEquals(host, server.getConnectorAddress(0).getHostName());
-
-      int boundPort = server.getConnectorAddress(0).getPort();
-      if (port == 0) {
-        assertTrue(boundPort != 0); // ephemeral should now return bound port
-      } else if (findPort) {
-        assertTrue(boundPort > port);
-        // allow a little wiggle room to prevent random test failures if
-        // some consecutive ports are already in use
-        assertTrue(boundPort - port < 8);
-      }
-    } catch (Exception e) {
-      server.stop();
-      throw e;
-    }
-    return server;
-  }
-
-  @Test
-  public void testXFrameHeaderSameOrigin() throws Exception {
-    Configuration conf = new Configuration();
-    conf.set("hbase.http.filter.xframeoptions.mode", "SAMEORIGIN");
-
-    HttpServer myServer = new HttpServer.Builder().setName("test")
-            .addEndpoint(new URI("http://localhost:0";))
-            .setFindPort(true).setConf(conf).build();
-    myServer.setAttribute(HttpServer.CONF_CONTEXT_ATTRIBUTE, conf);
-    myServer.addServlet("echo", "/echo", EchoServlet.class);
-    myServer.start();
-
-    String serverURL = "http://";
-            + NetUtils.getHostPortString(myServer.getConnectorAddress(0));
-    URL url = new URL(new URL(serverURL), "/echo?a=b&c=d");
-    HttpURLConnection conn = (HttpURLConnection) url.openConnection();
-    assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode());
-    assertEquals("SAMEORIGIN", conn.getHeaderField("X-Frame-Options"));
-    myServer.stop();
-  }
-
-
-
-  @Test
-  public void testNoCacheHeader() throws Exception {
-    URL url = new URL(baseUrl, "/echo?a=b&c=d");
-    HttpURLConnection conn = (HttpURLConnection) url.openConnection();
-    assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode());
-    assertEquals("no-cache", conn.getHeaderField("Cache-Control"));
-    assertEquals("no-cache", conn.getHeaderField("Pragma"));
-    assertNotNull(conn.getHeaderField("Expires"));
-    assertNotNull(conn.getHeaderField("Date"));
-    assertEquals(conn.getHeaderField("Expires"), conn.getHeaderField("Date"));
-    assertEquals("DENY", conn.getHeaderField("X-Frame-Options"));
-  }
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/d6982414/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpServerLifecycle.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpServerLifecycle.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpServerLifecycle.java
deleted file mode 100644
index d0f2825..0000000
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpServerLifecycle.java
+++ /dev/null
@@ -1,135 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.http;
-
-import org.apache.hadoop.hbase.testclassification.MiscTests;
-import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.junit.Ignore;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-
-@Category({MiscTests.class, SmallTests.class})
-public class TestHttpServerLifecycle extends HttpServerFunctionalTest {
-
-  /**
-   * Check that a server is alive by probing the {@link HttpServer#isAlive()} 
method
-   * and the text of its toString() description
-   * @param server server
-   */
-  private void assertAlive(HttpServer server) {
-    assertTrue("Server is not alive", server.isAlive());
-    assertToStringContains(server, HttpServer.STATE_DESCRIPTION_ALIVE);
-  }
-
-  private void assertNotLive(HttpServer server) {
-    assertTrue("Server should not be live", !server.isAlive());
-    assertToStringContains(server, HttpServer.STATE_DESCRIPTION_NOT_LIVE);
-  }
-
-  /**
-   * Test that the server is alive once started
-   *
-   * @throws Throwable on failure
-   */
-  @Ignore ("Hangs on occasion; see HBASE-14430") @Test(timeout=60000)
-  public void testCreatedServerIsNotAlive() throws Throwable {
-    HttpServer server = createTestServer();
-    assertNotLive(server);
-  }
-
-  @Ignore ("Hangs on occasion; see HBASE-14430") @Test(timeout=60000)
-  public void testStopUnstartedServer() throws Throwable {
-    HttpServer server = createTestServer();
-    stop(server);
-  }
-
-  /**
-   * Test that the server is alive once started
-   *
-   * @throws Throwable on failure
-   */
-  @Ignore ("Hangs on occasion; see HBASE-14430") @Test(timeout=60000)
-  public void testStartedServerIsAlive() throws Throwable {
-    HttpServer server = null;
-    server = createTestServer();
-    assertNotLive(server);
-    server.start();
-    assertAlive(server);
-    stop(server);
-  }
-
-  /**
-   * Assert that the result of {@link HttpServer#toString()} contains the 
specific text
-   * @param server server to examine
-   * @param text text to search for
-   */
-  private void assertToStringContains(HttpServer server, String text) {
-    String description = server.toString();
-    assertTrue("Did not find \"" + text + "\" in \"" + description + "\"",
-               description.contains(text));
-  }
-
-  /**
-   * Test that the server is not alive once stopped
-   *
-   * @throws Throwable on failure
-   */
-  @Ignore ("Hangs on occasion; see HBASE-14430") @Test(timeout=60000)
-  public void testStoppedServerIsNotAlive() throws Throwable {
-    HttpServer server = createAndStartTestServer();
-    assertAlive(server);
-    stop(server);
-    assertNotLive(server);
-  }
-
-  /**
-   * Test that the server is not alive once stopped
-   *
-   * @throws Throwable on failure
-   */
-  @Ignore ("Hangs on occasion; see HBASE-14430") @Test(timeout=60000)
-  public void testStoppingTwiceServerIsAllowed() throws Throwable {
-    HttpServer server = createAndStartTestServer();
-    assertAlive(server);
-    stop(server);
-    assertNotLive(server);
-    stop(server);
-    assertNotLive(server);
-  }
-
-  /**
-   * Test that the server is alive once started
-   *
-   * @throws Throwable
-   *           on failure
-   */
-  @Ignore ("Hangs on occasion; see HBASE-14430") @Test(timeout=60000)
-  public void testWepAppContextAfterServerStop() throws Throwable {
-    HttpServer server = null;
-    String key = "test.attribute.key";
-    String value = "test.attribute.value";
-    server = createTestServer();
-    assertNotLive(server);
-    server.start();
-    server.setAttribute(key, value);
-    assertAlive(server);
-    assertEquals(value, server.getAttribute(key));
-    stop(server);
-    assertNull("Server context should have cleared", server.getAttribute(key));
-  }
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/d6982414/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpServerWebapps.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpServerWebapps.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpServerWebapps.java
deleted file mode 100644
index db394a8..0000000
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpServerWebapps.java
+++ /dev/null
@@ -1,68 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.http;
-
-import org.apache.hadoop.hbase.testclassification.MiscTests;
-import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.apache.commons.logging.LogFactory;
-import org.apache.commons.logging.Log;
-
-import java.io.FileNotFoundException;
-
-/**
- * Test webapp loading
- */
-@Category({MiscTests.class, SmallTests.class})
-public class TestHttpServerWebapps extends HttpServerFunctionalTest {
-  private static final Log log = 
LogFactory.getLog(TestHttpServerWebapps.class);
-
-  /**
-   * Test that the test server is loadable on the classpath
-   * @throws Throwable if something went wrong
-   */
-  @Test
-  public void testValidServerResource() throws Throwable {
-    HttpServer server = null;
-    try {
-      server = createServer("test");
-    } finally {
-      stop(server);
-    }
-  }
-
-  /**
-   * Test that an invalid webapp triggers an exception
-   * @throws Throwable if something went wrong
-   */
-  @Test
-  public void testMissingServerResource() throws Throwable {
-    try {
-      HttpServer server = createServer("NoSuchWebapp");
-      //should not have got here.
-      //close the server
-      String serverDescription = server.toString();
-      stop(server);
-      fail("Expected an exception, got " + serverDescription);
-    } catch (FileNotFoundException expected) {
-      log.debug("Expected exception " + expected, expected);
-    }
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/d6982414/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestPathFilter.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestPathFilter.java 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestPathFilter.java
deleted file mode 100644
index 3c2de53..0000000
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestPathFilter.java
+++ /dev/null
@@ -1,155 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.http;
-
-import java.io.BufferedReader;
-import java.io.IOException;
-import java.io.InputStreamReader;
-import java.net.URL;
-import java.net.URLConnection;
-import java.util.Set;
-import java.util.TreeSet;
-
-import javax.servlet.Filter;
-import javax.servlet.FilterChain;
-import javax.servlet.FilterConfig;
-import javax.servlet.ServletException;
-import javax.servlet.ServletRequest;
-import javax.servlet.ServletResponse;
-import javax.servlet.http.HttpServletRequest;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.testclassification.MiscTests;
-import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.apache.hadoop.net.NetUtils;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-
-@Category({MiscTests.class, SmallTests.class})
-public class TestPathFilter extends HttpServerFunctionalTest {
-  private static final Log LOG = LogFactory.getLog(HttpServer.class);
-  static final Set<String> RECORDS = new TreeSet<>();
-
-  /** A very simple filter that records accessed uri's */
-  static public class RecordingFilter implements Filter {
-    private FilterConfig filterConfig = null;
-
-    @Override
-    public void init(FilterConfig filterConfig) {
-      this.filterConfig = filterConfig;
-    }
-
-    @Override
-    public void destroy() {
-      this.filterConfig = null;
-    }
-
-    @Override
-    public void doFilter(ServletRequest request, ServletResponse response,
-        FilterChain chain) throws IOException, ServletException {
-      if (filterConfig == null)
-         return;
-
-      String uri = ((HttpServletRequest)request).getRequestURI();
-      LOG.info("filtering " + uri);
-      RECORDS.add(uri);
-      chain.doFilter(request, response);
-    }
-
-    /** Configuration for RecordingFilter */
-    static public class Initializer extends FilterInitializer {
-      public Initializer() {}
-
-      @Override
-      public void initFilter(FilterContainer container, Configuration conf) {
-        container.addFilter("recording", RecordingFilter.class.getName(), 
null);
-      }
-    }
-  }
-  
-  
-  /** access a url, ignoring some IOException such as the page does not exist 
*/
-  static void access(String urlstring) throws IOException {
-    LOG.warn("access " + urlstring);
-    URL url = new URL(urlstring);
-    
-    URLConnection connection = url.openConnection();
-    connection.connect();
-    
-    try {
-      BufferedReader in = new BufferedReader(new InputStreamReader(
-          connection.getInputStream()));
-      try {
-        for(; in.readLine() != null; );
-      } finally {
-        in.close();
-      }
-    } catch(IOException ioe) {
-      LOG.warn("urlstring=" + urlstring, ioe);
-    }
-  }
-
-  @Test
-  public void testPathSpecFilters() throws Exception {
-    Configuration conf = new Configuration();
-    
-    //start a http server with CountingFilter
-    conf.set(HttpServer.FILTER_INITIALIZERS_PROPERTY,
-        RecordingFilter.Initializer.class.getName());
-    String[] pathSpecs = { "/path", "/path/*" };
-    HttpServer http = createTestServer(conf, pathSpecs);
-    http.start();
-
-    final String baseURL = "/path";
-    final String baseSlashURL = "/path/";
-    final String addedURL = "/path/nodes";
-    final String addedSlashURL = "/path/nodes/";
-    final String longURL = "/path/nodes/foo/job";
-    final String rootURL = "/";
-    final String allURL = "/*";
-
-    final String[] filteredUrls = {baseURL, baseSlashURL, addedURL, 
-        addedSlashURL, longURL};
-    final String[] notFilteredUrls = {rootURL, allURL};
-
-    // access the urls and verify our paths specs got added to the 
-    // filters
-    final String prefix = "http://";
-        + NetUtils.getHostPortString(http.getConnectorAddress(0));
-    try {
-      for(int i = 0; i < filteredUrls.length; i++) {
-        access(prefix + filteredUrls[i]);
-      }
-      for(int i = 0; i < notFilteredUrls.length; i++) {
-        access(prefix + notFilteredUrls[i]);
-      }
-    } finally {
-      http.stop();
-    }
-
-    LOG.info("RECORDS = " + RECORDS);
-    
-    //verify records
-    for(int i = 0; i < filteredUrls.length; i++) {
-      assertTrue(RECORDS.remove(filteredUrls[i]));
-    }
-    assertTrue(RECORDS.isEmpty());
-  }
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/d6982414/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestSSLHttpServer.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestSSLHttpServer.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestSSLHttpServer.java
deleted file mode 100644
index b599350..0000000
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestSSLHttpServer.java
+++ /dev/null
@@ -1,124 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.http;
-
-import java.io.ByteArrayOutputStream;
-import java.io.File;
-import java.io.InputStream;
-import java.net.URI;
-import java.net.URL;
-
-import javax.net.ssl.HttpsURLConnection;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileUtil;
-import org.apache.hadoop.hbase.HBaseConfiguration;
-import org.apache.hadoop.hbase.testclassification.MiscTests;
-import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.apache.hadoop.hbase.http.ssl.KeyStoreTestUtil;
-import org.apache.hadoop.io.IOUtils;
-import org.apache.hadoop.net.NetUtils;
-import org.apache.hadoop.security.ssl.SSLFactory;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-
-/**
- * This testcase issues SSL certificates configures the HttpServer to serve
- * HTTPS using the created certficates and calls an echo servlet using the
- * corresponding HTTPS URL.
- */
-@Category({MiscTests.class, SmallTests.class})
-public class TestSSLHttpServer extends HttpServerFunctionalTest {
-  private static final String BASEDIR = System.getProperty("test.build.dir",
-      "target/test-dir") + "/" + TestSSLHttpServer.class.getSimpleName();
-
-  private static final Log LOG = LogFactory.getLog(TestSSLHttpServer.class);
-  private static Configuration conf;
-  private static HttpServer server;
-  private static URL baseUrl;
-  private static String keystoresDir;
-  private static String sslConfDir;
-  private static SSLFactory clientSslFactory;
-
-  @BeforeClass
-  public static void setup() throws Exception {
-    conf = new Configuration();
-    conf.setInt(HttpServer.HTTP_MAX_THREADS, TestHttpServer.MAX_THREADS);
-
-    File base = new File(BASEDIR);
-    FileUtil.fullyDelete(base);
-    base.mkdirs();
-    keystoresDir = new File(BASEDIR).getAbsolutePath();
-    sslConfDir = KeyStoreTestUtil.getClasspathDir(TestSSLHttpServer.class);
-
-    KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, conf, false);
-    Configuration sslConf = new Configuration(false);
-    sslConf.addResource("ssl-server.xml");
-    sslConf.addResource("ssl-client.xml");
-
-    clientSslFactory = new SSLFactory(SSLFactory.Mode.CLIENT, sslConf);
-    clientSslFactory.init();
-
-    server = new HttpServer.Builder()
-        .setName("test")
-        .addEndpoint(new URI("https://localhost";))
-        .setConf(conf)
-        .keyPassword(HBaseConfiguration.getPassword(sslConf, 
"ssl.server.keystore.keypassword",
-            null))
-        .keyStore(sslConf.get("ssl.server.keystore.location"),
-            HBaseConfiguration.getPassword(sslConf, 
"ssl.server.keystore.password", null),
-            sslConf.get("ssl.server.keystore.type", "jks"))
-        .trustStore(sslConf.get("ssl.server.truststore.location"),
-            HBaseConfiguration.getPassword(sslConf, 
"ssl.server.truststore.password", null),
-            sslConf.get("ssl.server.truststore.type", "jks")).build();
-    server.addServlet("echo", "/echo", TestHttpServer.EchoServlet.class);
-    server.start();
-    baseUrl = new URL("https://";
-        + NetUtils.getHostPortString(server.getConnectorAddress(0)));
-    LOG.info("HTTP server started: " + baseUrl);
-  }
-
-  @AfterClass
-  public static void cleanup() throws Exception {
-    server.stop();
-    FileUtil.fullyDelete(new File(BASEDIR));
-    KeyStoreTestUtil.cleanupSSLConfig(keystoresDir, sslConfDir);
-    clientSslFactory.destroy();
-  }
-
-  @Test
-  public void testEcho() throws Exception {
-    assertEquals("a:b\nc:d\n", readOut(new URL(baseUrl, "/echo?a=b&c=d")));
-    assertEquals("a:b\nc&lt;:d\ne:&gt;\n", readOut(new URL(baseUrl,
-        "/echo?a=b&c<=d&e=>")));
-  }
-
-  private static String readOut(URL url) throws Exception {
-    HttpsURLConnection conn = (HttpsURLConnection) url.openConnection();
-    conn.setSSLSocketFactory(clientSslFactory.createSSLSocketFactory());
-    InputStream in = conn.getInputStream();
-    ByteArrayOutputStream out = new ByteArrayOutputStream();
-    IOUtils.copyBytes(in, out, 1024);
-    return out.toString();
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/d6982414/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestServletFilter.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestServletFilter.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestServletFilter.java
deleted file mode 100644
index 1d24ec2..0000000
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestServletFilter.java
+++ /dev/null
@@ -1,210 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.http;
-
-import java.io.BufferedReader;
-import java.io.IOException;
-import java.io.InputStreamReader;
-import java.net.URL;
-import java.net.URLConnection;
-import java.util.Random;
-
-import javax.servlet.Filter;
-import javax.servlet.FilterChain;
-import javax.servlet.FilterConfig;
-import javax.servlet.ServletException;
-import javax.servlet.ServletRequest;
-import javax.servlet.ServletResponse;
-import javax.servlet.http.HttpServletRequest;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.GenericTestUtils;
-import org.apache.hadoop.hbase.testclassification.MiscTests;
-import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.apache.hadoop.net.NetUtils;
-import org.junit.Ignore;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-
-@Category({MiscTests.class, SmallTests.class})
-public class TestServletFilter extends HttpServerFunctionalTest {
-  private static final Log LOG = LogFactory.getLog(HttpServer.class);
-  static volatile String uri = null; 
-
-  /** A very simple filter which record the uri filtered. */
-  static public class SimpleFilter implements Filter {
-    private FilterConfig filterConfig = null;
-
-    @Override
-    public void init(FilterConfig filterConfig) throws ServletException {
-      this.filterConfig = filterConfig;
-    }
-
-    @Override
-    public void destroy() {
-      this.filterConfig = null;
-    }
-
-    @Override
-    public void doFilter(ServletRequest request, ServletResponse response,
-        FilterChain chain) throws IOException, ServletException {
-      if (filterConfig == null)
-         return;
-
-      uri = ((HttpServletRequest)request).getRequestURI();
-      LOG.info("filtering " + uri);
-      chain.doFilter(request, response);
-    }
-
-    /** Configuration for the filter */
-    static public class Initializer extends FilterInitializer {
-      public Initializer() {}
-
-      @Override
-      public void initFilter(FilterContainer container, Configuration conf) {
-        container.addFilter("simple", SimpleFilter.class.getName(), null);
-      }
-    }
-  }
-  
-  
-  /** access a url, ignoring some IOException such as the page does not exist 
*/
-  static void access(String urlstring) throws IOException {
-    LOG.warn("access " + urlstring);
-    URL url = new URL(urlstring);
-    URLConnection connection = url.openConnection();
-    connection.connect();
-    
-    try {
-      BufferedReader in = new BufferedReader(new InputStreamReader(
-          connection.getInputStream()));
-      try {
-        for(; in.readLine() != null; );
-      } finally {
-        in.close();
-      }
-    } catch(IOException ioe) {
-      LOG.warn("urlstring=" + urlstring, ioe);
-    }
-  }
-
-  @Test
-  @Ignore
-  //From stack
-  // Its a 'foreign' test, one that came in from hadoop when we copy/pasted 
http
-  // It's second class. Could comment it out if only failing test (as per 
@nkeywal – sort of)
-  public void testServletFilter() throws Exception {
-    Configuration conf = new Configuration();
-    
-    //start a http server with CountingFilter
-    conf.set(HttpServer.FILTER_INITIALIZERS_PROPERTY,
-        SimpleFilter.Initializer.class.getName());
-    HttpServer http = createTestServer(conf);
-    http.start();
-
-    final String fsckURL = "/fsck";
-    final String stacksURL = "/stacks";
-    final String ajspURL = "/a.jsp";
-    final String logURL = "/logs/a.log";
-    final String hadooplogoURL = "/static/hadoop-logo.jpg";
-    
-    final String[] urls = {fsckURL, stacksURL, ajspURL, logURL, hadooplogoURL};
-    final Random ran = new Random();
-    final int[] sequence = new int[50];
-
-    //generate a random sequence and update counts 
-    for(int i = 0; i < sequence.length; i++) {
-      sequence[i] = ran.nextInt(urls.length);
-    }
-
-    //access the urls as the sequence
-    final String prefix = "http://";
-        + NetUtils.getHostPortString(http.getConnectorAddress(0));
-    try {
-      for(int i = 0; i < sequence.length; i++) {
-        access(prefix + urls[sequence[i]]);
-
-        //make sure everything except fsck get filtered
-        if (sequence[i] == 0) {
-          assertEquals(null, uri);
-        } else {
-          assertEquals(urls[sequence[i]], uri);
-          uri = null;
-        }
-      }
-    } finally {
-      http.stop();
-    }
-  }
-  
-  static public class ErrorFilter extends SimpleFilter {
-    @Override
-    public void init(FilterConfig arg0) throws ServletException {
-      throw new ServletException("Throwing the exception from Filter init");
-    }
-
-    /** Configuration for the filter */
-    static public class Initializer extends FilterInitializer {
-      public Initializer() {
-      }
-
-      @Override
-      public void initFilter(FilterContainer container, Configuration conf) {
-        container.addFilter("simple", ErrorFilter.class.getName(), null);
-      }
-    }
-  }
-
-  @Test
-  public void testServletFilterWhenInitThrowsException() throws Exception {
-    Configuration conf = new Configuration();
-    // start a http server with ErrorFilter
-    conf.set(HttpServer.FILTER_INITIALIZERS_PROPERTY,
-        ErrorFilter.Initializer.class.getName());
-    HttpServer http = createTestServer(conf);
-    try {
-      http.start();
-      fail("expecting exception");
-    } catch (IOException e) {
-      GenericTestUtils.assertExceptionContains("Problem starting http server", 
e);
-    }
-  }
-  
-  /**
-   * Similar to the above test case, except that it uses a different API to 
add the
-   * filter. Regression test for HADOOP-8786.
-   */
-  @Test
-  public void testContextSpecificServletFilterWhenInitThrowsException()
-      throws Exception {
-    Configuration conf = new Configuration();
-    HttpServer http = createTestServer(conf);
-    HttpServer.defineFilter(http.webAppContext,
-        "ErrorFilter", ErrorFilter.class.getName(),
-        null, null);
-    try {
-      http.start();
-      fail("expecting exception");
-    } catch (IOException e) {
-      GenericTestUtils.assertExceptionContains("Unable to initialize 
WebAppContext", e);
-    }
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/d6982414/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestSpnegoHttpServer.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestSpnegoHttpServer.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestSpnegoHttpServer.java
deleted file mode 100644
index 4fad031..0000000
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestSpnegoHttpServer.java
+++ /dev/null
@@ -1,258 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to you under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.http;
-
-import java.io.BufferedReader;
-import java.io.File;
-import java.io.IOException;
-import java.io.InputStreamReader;
-import java.net.HttpURLConnection;
-import java.net.URL;
-import java.security.Principal;
-import java.security.PrivilegedExceptionAction;
-import java.util.Set;
-
-import javax.security.auth.Subject;
-import javax.security.auth.kerberos.KerberosTicket;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.http.TestHttpServer.EchoServlet;
-import org.apache.hadoop.hbase.http.resource.JerseyResource;
-import org.apache.hadoop.hbase.testclassification.MiscTests;
-import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.apache.hadoop.security.authentication.util.KerberosName;
-import org.apache.http.HttpHost;
-import org.apache.http.HttpResponse;
-import org.apache.http.auth.AuthSchemeProvider;
-import org.apache.http.auth.AuthScope;
-import org.apache.http.auth.KerberosCredentials;
-import org.apache.http.client.HttpClient;
-import org.apache.http.client.config.AuthSchemes;
-import org.apache.http.client.methods.HttpGet;
-import org.apache.http.client.protocol.HttpClientContext;
-import org.apache.http.config.Lookup;
-import org.apache.http.config.RegistryBuilder;
-import org.apache.http.entity.ByteArrayEntity;
-import org.apache.http.entity.ContentType;
-import org.apache.http.impl.auth.SPNegoSchemeFactory;
-import org.apache.http.impl.client.BasicCredentialsProvider;
-import org.apache.http.impl.client.HttpClients;
-import org.apache.http.util.EntityUtils;
-import org.apache.kerby.kerberos.kerb.KrbException;
-import org.apache.kerby.kerberos.kerb.client.JaasKrbUtil;
-import org.apache.kerby.kerberos.kerb.server.SimpleKdcServer;
-import org.ietf.jgss.GSSCredential;
-import org.ietf.jgss.GSSManager;
-import org.ietf.jgss.GSSName;
-import org.ietf.jgss.Oid;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-
-/**
- * Test class for SPNEGO authentication on the HttpServer. Uses Kerby's 
MiniKDC and Apache
- * HttpComponents to verify that a simple Servlet is reachable via SPNEGO and 
unreachable w/o.
- */
-@Category({MiscTests.class, SmallTests.class})
-public class TestSpnegoHttpServer extends HttpServerFunctionalTest {
-  private static final Log LOG = LogFactory.getLog(TestSpnegoHttpServer.class);
-  private static final String KDC_SERVER_HOST = "localhost";
-  private static final String CLIENT_PRINCIPAL = "client";
-
-  private static HttpServer server;
-  private static URL baseUrl;
-  private static SimpleKdcServer kdc;
-  private static File infoServerKeytab;
-  private static File clientKeytab;
-
-  @BeforeClass
-  public static void setupServer() throws Exception {
-    final String serverPrincipal = "HTTP/" + KDC_SERVER_HOST;
-    final File target = new File(System.getProperty("user.dir"), "target");
-    assertTrue(target.exists());
-
-    kdc = buildMiniKdc();
-    kdc.start();
-
-    File keytabDir = new File(target, 
TestSpnegoHttpServer.class.getSimpleName()
-        + "_keytabs");
-    if (keytabDir.exists()) {
-      deleteRecursively(keytabDir);
-    }
-    keytabDir.mkdirs();
-
-    infoServerKeytab = new File(keytabDir, serverPrincipal.replace('/', '_') + 
".keytab");
-    clientKeytab = new File(keytabDir, CLIENT_PRINCIPAL + ".keytab");
-
-    setupUser(kdc, clientKeytab, CLIENT_PRINCIPAL);
-    setupUser(kdc, infoServerKeytab, serverPrincipal);
-
-    Configuration conf = buildSpnegoConfiguration(serverPrincipal, 
infoServerKeytab);
-
-    server = createTestServerWithSecurity(conf);
-    server.addServlet("echo", "/echo", EchoServlet.class);
-    
server.addJerseyResourcePackage(JerseyResource.class.getPackage().getName(), 
"/jersey/*");
-    server.start();
-    baseUrl = getServerURL(server);
-
-    LOG.info("HTTP server started: "+ baseUrl);
-  }
-
-  @AfterClass
-  public static void stopServer() throws Exception {
-    try {
-      if (null != server) {
-        server.stop();
-      }
-    } catch (Exception e) {
-      LOG.info("Failed to stop info server", e);
-    }
-    try {
-      if (null != kdc) {
-        kdc.stop();
-      }
-    } catch (Exception e) {
-      LOG.info("Failed to stop mini KDC", e);
-    }
-  }
-
-  private static void setupUser(SimpleKdcServer kdc, File keytab, String 
principal)
-      throws KrbException {
-    kdc.createPrincipal(principal);
-    kdc.exportPrincipal(principal, keytab);
-  }
-
-  private static SimpleKdcServer buildMiniKdc() throws Exception {
-    SimpleKdcServer kdc = new SimpleKdcServer();
-
-    final File target = new File(System.getProperty("user.dir"), "target");
-    File kdcDir = new File(target, TestSpnegoHttpServer.class.getSimpleName());
-    if (kdcDir.exists()) {
-      deleteRecursively(kdcDir);
-    }
-    kdcDir.mkdirs();
-    kdc.setWorkDir(kdcDir);
-
-    kdc.setKdcHost(KDC_SERVER_HOST);
-    int kdcPort = getFreePort();
-    kdc.setAllowTcp(true);
-    kdc.setAllowUdp(false);
-    kdc.setKdcTcpPort(kdcPort);
-
-    LOG.info("Starting KDC server at " + KDC_SERVER_HOST + ":" + kdcPort);
-
-    kdc.init();
-
-    return kdc;
-  }
-
-  private static Configuration buildSpnegoConfiguration(String 
serverPrincipal, File
-      serverKeytab) {
-    Configuration conf = new Configuration();
-    KerberosName.setRules("DEFAULT");
-
-    conf.setInt(HttpServer.HTTP_MAX_THREADS, TestHttpServer.MAX_THREADS);
-
-    // Enable Kerberos (pre-req)
-    conf.set("hbase.security.authentication", "kerberos");
-    conf.set(HttpServer.HTTP_UI_AUTHENTICATION, "kerberos");
-    conf.set(HttpServer.HTTP_SPNEGO_AUTHENTICATION_PRINCIPAL_KEY, 
serverPrincipal);
-    conf.set(HttpServer.HTTP_SPNEGO_AUTHENTICATION_KEYTAB_KEY, 
serverKeytab.getAbsolutePath());
-
-    return conf;
-  }
-
-  @Test
-  public void testUnauthorizedClientsDisallowed() throws IOException {
-    URL url = new URL(getServerURL(server), "/echo?a=b");
-    HttpURLConnection conn = (HttpURLConnection) url.openConnection();
-    assertEquals(HttpURLConnection.HTTP_UNAUTHORIZED, conn.getResponseCode());
-  }
-
-  @Test
-  public void testAllowedClient() throws Exception {
-    // Create the subject for the client
-    final Subject clientSubject = 
JaasKrbUtil.loginUsingKeytab(CLIENT_PRINCIPAL, clientKeytab);
-    final Set<Principal> clientPrincipals = clientSubject.getPrincipals();
-    // Make sure the subject has a principal
-    assertFalse(clientPrincipals.isEmpty());
-
-    // Get a TGT for the subject (might have many, different encryption 
types). The first should
-    // be the default encryption type.
-    Set<KerberosTicket> privateCredentials =
-            clientSubject.getPrivateCredentials(KerberosTicket.class);
-    assertFalse(privateCredentials.isEmpty());
-    KerberosTicket tgt = privateCredentials.iterator().next();
-    assertNotNull(tgt);
-
-    // The name of the principal
-    final String principalName = clientPrincipals.iterator().next().getName();
-
-    // Run this code, logged in as the subject (the client)
-    HttpResponse resp = Subject.doAs(clientSubject,
-        new PrivilegedExceptionAction<HttpResponse>() {
-      @Override
-      public HttpResponse run() throws Exception {
-        // Logs in with Kerberos via GSS
-        GSSManager gssManager = GSSManager.getInstance();
-        // jGSS Kerberos login constant
-        Oid oid = new Oid("1.2.840.113554.1.2.2");
-        GSSName gssClient = gssManager.createName(principalName, 
GSSName.NT_USER_NAME);
-        GSSCredential credential = gssManager.createCredential(gssClient,
-            GSSCredential.DEFAULT_LIFETIME, oid, GSSCredential.INITIATE_ONLY);
-
-        HttpClientContext context = HttpClientContext.create();
-        Lookup<AuthSchemeProvider> authRegistry = 
RegistryBuilder.<AuthSchemeProvider>create()
-            .register(AuthSchemes.SPNEGO, new SPNegoSchemeFactory(true, true))
-            .build();
-
-        HttpClient client = 
HttpClients.custom().setDefaultAuthSchemeRegistry(authRegistry).build();
-        BasicCredentialsProvider credentialsProvider = new 
BasicCredentialsProvider();
-        credentialsProvider.setCredentials(AuthScope.ANY, new 
KerberosCredentials(credential));
-
-        URL url = new URL(getServerURL(server), "/echo?a=b");
-        context.setTargetHost(new HttpHost(url.getHost(), url.getPort()));
-        context.setCredentialsProvider(credentialsProvider);
-        context.setAuthSchemeRegistry(authRegistry);
-
-        HttpGet get = new HttpGet(url.toURI());
-        return client.execute(get, context);
-      }
-    });
-
-    assertNotNull(resp);
-    assertEquals(HttpURLConnection.HTTP_OK, 
resp.getStatusLine().getStatusCode());
-    assertEquals("a:b", EntityUtils.toString(resp.getEntity()).trim());
-  }
-
-  @Test(expected = IllegalArgumentException.class)
-  public void testMissingConfigurationThrowsException() throws Exception {
-    Configuration conf = new Configuration();
-    conf.setInt(HttpServer.HTTP_MAX_THREADS, TestHttpServer.MAX_THREADS);
-    // Enable Kerberos (pre-req)
-    conf.set("hbase.security.authentication", "kerberos");
-    // Intentionally skip keytab and principal
-
-    HttpServer customServer = createTestServerWithSecurity(conf);
-    customServer.addServlet("echo", "/echo", EchoServlet.class);
-    
customServer.addJerseyResourcePackage(JerseyResource.class.getPackage().getName(),
 "/jersey/*");
-    customServer.start();
-  }
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/d6982414/hbase-server/src/test/java/org/apache/hadoop/hbase/http/conf/TestConfServlet.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/conf/TestConfServlet.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/conf/TestConfServlet.java
deleted file mode 100644
index e1d9aca..0000000
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/conf/TestConfServlet.java
+++ /dev/null
@@ -1,116 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.http.conf;
-
-import java.io.StringReader;
-import java.io.StringWriter;
-import java.util.Map;
-
-import javax.xml.parsers.DocumentBuilder;
-import javax.xml.parsers.DocumentBuilderFactory;
-
-import junit.framework.TestCase;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.testclassification.MiscTests;
-import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.eclipse.jetty.util.ajax.JSON;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.w3c.dom.Document;
-import org.w3c.dom.Element;
-import org.w3c.dom.Node;
-import org.w3c.dom.NodeList;
-import org.xml.sax.InputSource;
-
-/**
- * Basic test case that the ConfServlet can write configuration
- * to its output in XML and JSON format.
- */
-@Category({MiscTests.class, SmallTests.class})
-public class TestConfServlet extends TestCase {
-  private static final String TEST_KEY = "testconfservlet.key";
-  private static final String TEST_VAL = "testval";
-
-  private Configuration getTestConf() {
-    Configuration testConf = new Configuration();
-    testConf.set(TEST_KEY, TEST_VAL);
-    return testConf;
-  }
-
-  @Test
-  @SuppressWarnings("unchecked")
-  public void testWriteJson() throws Exception {
-    StringWriter sw = new StringWriter();
-    ConfServlet.writeResponse(getTestConf(), sw, "json");
-    String json = sw.toString();
-    boolean foundSetting = false;
-    Object parsed = JSON.parse(json);
-    Object[] properties = ((Map<String, Object[]>)parsed).get("properties");
-    for (Object o : properties) {
-      Map<String, Object> propertyInfo = (Map<String, Object>)o;
-      String key = (String)propertyInfo.get("key");
-      String val = (String)propertyInfo.get("value");
-      String resource = (String)propertyInfo.get("resource");
-      System.err.println("k: " + key + " v: " + val + " r: " + resource);
-      if (TEST_KEY.equals(key) && TEST_VAL.equals(val)
-          && "programatically".equals(resource)) {
-        foundSetting = true;
-      }
-    }
-    assertTrue(foundSetting);
-  }
-
-  @Test
-  public void testWriteXml() throws Exception {
-    StringWriter sw = new StringWriter();
-    ConfServlet.writeResponse(getTestConf(), sw, "xml");
-    String xml = sw.toString();
-
-    DocumentBuilderFactory docBuilderFactory 
-      = DocumentBuilderFactory.newInstance();
-    DocumentBuilder builder = docBuilderFactory.newDocumentBuilder();
-    Document doc = builder.parse(new InputSource(new StringReader(xml)));
-    NodeList nameNodes = doc.getElementsByTagName("name");
-    boolean foundSetting = false;
-    for (int i = 0; i < nameNodes.getLength(); i++) {
-      Node nameNode = nameNodes.item(i);
-      String key = nameNode.getTextContent();
-      System.err.println("xml key: " + key);
-      if (TEST_KEY.equals(key)) {
-        foundSetting = true;
-        Element propertyElem = (Element)nameNode.getParentNode();
-        String val = 
propertyElem.getElementsByTagName("value").item(0).getTextContent();
-        assertEquals(TEST_VAL, val);
-      }
-    }
-    assertTrue(foundSetting);
-  }
-
-  @Test
-  public void testBadFormat() throws Exception {
-    StringWriter sw = new StringWriter();
-    try {
-      ConfServlet.writeResponse(getTestConf(), sw, "not a format");
-      fail("writeResponse with bad format didn't throw!");
-    } catch (ConfServlet.BadFormatException bfe) {
-      // expected
-    }
-    assertEquals("", sw.toString());
-  }
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/d6982414/hbase-server/src/test/java/org/apache/hadoop/hbase/http/jmx/TestJMXJsonServlet.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/jmx/TestJMXJsonServlet.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/jmx/TestJMXJsonServlet.java
deleted file mode 100644
index d7e68d5..0000000
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/jmx/TestJMXJsonServlet.java
+++ /dev/null
@@ -1,134 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- * 
- *      http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hbase.http.jmx;
-
-import java.net.HttpURLConnection;
-import java.net.URL;
-import java.net.URLEncoder;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
-import javax.servlet.http.HttpServletResponse;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hbase.testclassification.MiscTests;
-import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.apache.hadoop.hbase.http.HttpServer;
-import org.apache.hadoop.hbase.http.HttpServerFunctionalTest;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-
-@Category({MiscTests.class, SmallTests.class})
-public class TestJMXJsonServlet extends HttpServerFunctionalTest {
-  private   static final Log LOG = LogFactory.getLog(TestJMXJsonServlet.class);
-  private static HttpServer server;
-  private static URL baseUrl;
-
-  @BeforeClass public static void setup() throws Exception {
-    // Eclipse doesn't pick this up correctly from the plugin
-    // configuration in the pom.
-    System.setProperty(HttpServerFunctionalTest.TEST_BUILD_WEBAPPS, 
"target/test-classes/webapps");
-    server = createTestServer();
-    server.start();
-    baseUrl = getServerURL(server);
-  }
-  
-  @AfterClass public static void cleanup() throws Exception {
-    server.stop();
-  }
-  
-  public static void assertReFind(String re, String value) {
-    Pattern p = Pattern.compile(re);
-    Matcher m = p.matcher(value);
-    assertTrue("'"+p+"' does not match "+value, m.find());
-  }
-  
-  @Test public void testQuery() throws Exception {
-    String result = readOutput(new URL(baseUrl, 
"/jmx?qry=java.lang:type=Runtime"));
-    LOG.info("/jmx?qry=java.lang:type=Runtime RESULT: "+result);
-    assertReFind("\"name\"\\s*:\\s*\"java.lang:type=Runtime\"", result);
-    assertReFind("\"modelerType\"", result);
-    
-    result = readOutput(new URL(baseUrl, "/jmx?qry=java.lang:type=Memory"));
-    LOG.info("/jmx?qry=java.lang:type=Memory RESULT: "+result);
-    assertReFind("\"name\"\\s*:\\s*\"java.lang:type=Memory\"", result);
-    assertReFind("\"modelerType\"", result);
-    
-    result = readOutput(new URL(baseUrl, "/jmx"));
-    LOG.info("/jmx RESULT: "+result);
-    assertReFind("\"name\"\\s*:\\s*\"java.lang:type=Memory\"", result);
-    
-    // test to get an attribute of a mbean
-    result = readOutput(new URL(baseUrl, 
-        "/jmx?get=java.lang:type=Memory::HeapMemoryUsage"));
-    LOG.info("/jmx RESULT: "+result);
-    assertReFind("\"name\"\\s*:\\s*\"java.lang:type=Memory\"", result);
-    assertReFind("\"committed\"\\s*:", result);
-    
-    // negative test to get an attribute of a mbean
-    result = readOutput(new URL(baseUrl, 
-        "/jmx?get=java.lang:type=Memory::"));
-    LOG.info("/jmx RESULT: "+result);
-    assertReFind("\"ERROR\"", result);
-
-    // test to get JSONP result
-    result = readOutput(new URL(baseUrl, 
"/jmx?qry=java.lang:type=Memory&callback=mycallback1"));
-    LOG.info("/jmx?qry=java.lang:type=Memory&callback=mycallback RESULT: 
"+result);
-    assertReFind("^mycallback1\\(\\{", result);
-    assertReFind("\\}\\);$", result);
-
-    // negative test to get an attribute of a mbean as JSONP
-    result = readOutput(new URL(baseUrl,
-        "/jmx?get=java.lang:type=Memory::&callback=mycallback2"));
-    LOG.info("/jmx RESULT: "+result);
-    assertReFind("^mycallback2\\(\\{", result);
-    assertReFind("\"ERROR\"", result);
-    assertReFind("\\}\\);$", result);
-
-    // test to get an attribute of a mbean as JSONP
-    result = readOutput(new URL(baseUrl,
-        
"/jmx?get=java.lang:type=Memory::HeapMemoryUsage&callback=mycallback3"));
-    LOG.info("/jmx RESULT: "+result);
-    assertReFind("^mycallback3\\(\\{", result);
-    assertReFind("\"name\"\\s*:\\s*\"java.lang:type=Memory\"", result);
-    assertReFind("\"committed\"\\s*:", result);
-    assertReFind("\\}\\);$", result);
-
-  }
-
-  @Test
-  public void testDisallowedJSONPCallback() throws Exception {
-    String callback = "function(){alert('bigproblems!')};foo";
-    URL url = new URL(
-        baseUrl, 
"/jmx?qry=java.lang:type=Memory&callback="+URLEncoder.encode(callback, 
"UTF-8"));
-    HttpURLConnection cnxn = (HttpURLConnection) url.openConnection();
-    assertEquals(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, 
cnxn.getResponseCode());
-  }
-
-  @Test
-  public void testUnderscoresInJSONPCallback() throws Exception {
-    String callback = "my_function";
-    URL url = new URL(
-        baseUrl, 
"/jmx?qry=java.lang:type=Memory&callback="+URLEncoder.encode(callback, 
"UTF-8"));
-    HttpURLConnection cnxn = (HttpURLConnection) url.openConnection();
-    assertEquals(HttpServletResponse.SC_OK, cnxn.getResponseCode());
-  }
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/d6982414/hbase-server/src/test/java/org/apache/hadoop/hbase/http/lib/TestStaticUserWebFilter.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/lib/TestStaticUserWebFilter.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/lib/TestStaticUserWebFilter.java
deleted file mode 100644
index 81bcbd5..0000000
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/lib/TestStaticUserWebFilter.java
+++ /dev/null
@@ -1,86 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.http.lib;
-
-import static org.junit.Assert.assertEquals;
-import static org.mockito.Mockito.mock;
-
-import javax.servlet.FilterChain;
-import javax.servlet.FilterConfig;
-import javax.servlet.ServletResponse;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletRequestWrapper;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.CommonConfigurationKeys;
-import org.apache.hadoop.hbase.testclassification.MiscTests;
-import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.apache.hadoop.hbase.http.ServerConfigurationKeys;
-import org.apache.hadoop.hbase.http.lib.StaticUserWebFilter.StaticUserFilter;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.mockito.ArgumentCaptor;
-import org.mockito.Mockito;
-
-@Category({MiscTests.class, SmallTests.class})
-public class TestStaticUserWebFilter {
-  private FilterConfig mockConfig(String username) {
-    FilterConfig mock = Mockito.mock(FilterConfig.class);
-    Mockito.doReturn(username).when(mock).getInitParameter(
-            ServerConfigurationKeys.HBASE_HTTP_STATIC_USER);
-    return mock;
-  }
-  
-  @Test
-  public void testFilter() throws Exception {
-    FilterConfig config = mockConfig("myuser");
-    StaticUserFilter suf = new StaticUserFilter();
-    suf.init(config);
-    
-    ArgumentCaptor<HttpServletRequestWrapper> wrapperArg =
-      ArgumentCaptor.forClass(HttpServletRequestWrapper.class);
-
-    FilterChain chain = mock(FilterChain.class);
-    
-    suf.doFilter(mock(HttpServletRequest.class), mock(ServletResponse.class),
-        chain);
-        
-    Mockito.verify(chain).doFilter(wrapperArg.capture(), 
Mockito.<ServletResponse>anyObject());
-    
-    HttpServletRequestWrapper wrapper = wrapperArg.getValue();
-    assertEquals("myuser", wrapper.getUserPrincipal().getName());
-    assertEquals("myuser", wrapper.getRemoteUser());
-    
-    suf.destroy();
-  }
-  
-  @Test
-  public void testOldStyleConfiguration() {
-    Configuration conf = new Configuration();
-    conf.set("dfs.web.ugi", "joe,group1,group2");
-    assertEquals("joe", StaticUserWebFilter.getUsernameFromConf(conf));
-  }
-
-  @Test
-  public void testConfiguration() {
-    Configuration conf = new Configuration();
-    conf.set(CommonConfigurationKeys.HADOOP_HTTP_STATIC_USER, "dr.stack");
-    assertEquals("dr.stack", StaticUserWebFilter.getUsernameFromConf(conf));
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/d6982414/hbase-server/src/test/java/org/apache/hadoop/hbase/http/log/TestLogLevel.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/log/TestLogLevel.java 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/log/TestLogLevel.java
deleted file mode 100644
index e14e3b4..0000000
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/log/TestLogLevel.java
+++ /dev/null
@@ -1,92 +0,0 @@
-/**
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*     http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing, software
-* distributed under the License is distributed on an "AS IS" BASIS,
-* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-* See the License for the specific language governing permissions and
-* limitations under the License.
-*/
-package org.apache.hadoop.hbase.http.log;
-
-import static org.junit.Assert.assertTrue;
-
-import java.io.*;
-import java.net.*;
-
-import org.apache.hadoop.hbase.testclassification.MiscTests;
-import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.apache.hadoop.hbase.http.HttpServer;
-import org.apache.hadoop.net.NetUtils;
-import org.apache.commons.logging.*;
-import org.apache.commons.logging.impl.*;
-import org.apache.log4j.*;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-
-@Category({MiscTests.class, SmallTests.class})
-public class TestLogLevel {
-  static final PrintStream out = System.out;
-
-  @Test (timeout=60000)
-  @SuppressWarnings("deprecation")
-  public void testDynamicLogLevel() throws Exception {
-    String logName = TestLogLevel.class.getName();
-    Log testlog = LogFactory.getLog(logName);
-
-    //only test Log4JLogger
-    if (testlog instanceof Log4JLogger) {
-      Logger log = ((Log4JLogger)testlog).getLogger();
-      log.debug("log.debug1");
-      log.info("log.info1");
-      log.error("log.error1");
-      assertTrue(!Level.ERROR.equals(log.getEffectiveLevel()));
-
-      HttpServer server = null;
-      try {
-        server = new HttpServer.Builder().setName("..")
-            .addEndpoint(new URI("http://localhost:0";)).setFindPort(true)
-            .build();
-
-        server.start();
-        String authority = NetUtils.getHostPortString(server
-            .getConnectorAddress(0));
-
-        //servlet
-        URL url =
-            new URL("http://"; + authority + "/logLevel?log=" + logName + 
"&level=" + Level.ERROR);
-        out.println("*** Connecting to " + url);
-        try (BufferedReader in = new BufferedReader(new 
InputStreamReader(url.openStream()))) {
-          for(String line; (line = in.readLine()) != null; out.println(line));
-        }
-        log.debug("log.debug2");
-        log.info("log.info2");
-        log.error("log.error2");
-        assertTrue(Level.ERROR.equals(log.getEffectiveLevel()));
-
-        //command line
-        String[] args = {"-setlevel", authority, logName, 
Level.DEBUG.toString()};
-        LogLevel.main(args);
-        log.debug("log.debug3");
-        log.info("log.info3");
-        log.error("log.error3");
-        assertTrue(Level.DEBUG.equals(log.getEffectiveLevel()));
-      } finally {
-        if (server != null) {
-          server.stop();
-        }
-      }
-    }
-    else {
-      out.println(testlog.getClass() + " not tested.");
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/d6982414/hbase-server/src/test/java/org/apache/hadoop/hbase/http/resource/JerseyResource.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/resource/JerseyResource.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/resource/JerseyResource.java
deleted file mode 100644
index bf0e609..0000000
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/resource/JerseyResource.java
+++ /dev/null
@@ -1,64 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.http.resource;
-
-import java.io.IOException;
-import java.util.Map;
-import java.util.TreeMap;
-
-import javax.ws.rs.DefaultValue;
-import javax.ws.rs.GET;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.Produces;
-import javax.ws.rs.QueryParam;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.eclipse.jetty.util.ajax.JSON;
-
-/**
- * A simple Jersey resource class TestHttpServer.
- * The servlet simply puts the path and the op parameter in a map
- * and return it in JSON format in the response.
- */
-@Path("")
-public class JerseyResource {
-  private static final Log LOG = LogFactory.getLog(JerseyResource.class);
-
-  public static final String PATH = "path";
-  public static final String OP = "op";
-
-  @GET
-  @Path("{" + PATH + ":.*}")
-  @Produces({MediaType.APPLICATION_JSON})
-  public Response get(
-      @PathParam(PATH) @DefaultValue("UNKNOWN_" + PATH) final String path,
-      @QueryParam(OP) @DefaultValue("UNKNOWN_" + OP) final String op
-      ) throws IOException {
-    LOG.info("get: " + PATH + "=" + path + ", " + OP + "=" + op);
-
-    final Map<String, Object> m = new TreeMap<>();
-    m.put(PATH, path);
-    m.put(OP, op);
-    final String js = JSON.toString(m);
-    return Response.ok(js).type(MediaType.APPLICATION_JSON).build();
-  }
-}

Reply via email to