Repository: knox
Updated Branches:
  refs/heads/master 105dacd6a -> 5d2ed18d4


KNOX-1268 - Add support for HDFS-related service discovery with HTTPS 
configurations


Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/5d2ed18d
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/5d2ed18d
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/5d2ed18d

Branch: refs/heads/master
Commit: 5d2ed18d43c2f9c03dc894ea5ba247d7900907c4
Parents: 105dacd
Author: Phil Zampino <[email protected]>
Authored: Mon Apr 23 11:40:45 2018 -0400
Committer: Phil Zampino <[email protected]>
Committed: Mon Apr 23 11:40:45 2018 -0400

----------------------------------------------------------------------
 .../discovery/ambari/HDFSURLCreatorBase.java    |  93 ++++++---
 .../discovery/ambari/WebHdfsUrlCreatorTest.java | 195 +++++++++++++++++++
 2 files changed, 258 insertions(+), 30 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/5d2ed18d/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/HDFSURLCreatorBase.java
----------------------------------------------------------------------
diff --git 
a/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/HDFSURLCreatorBase.java
 
b/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/HDFSURLCreatorBase.java
index c6142df..54fa87e 100644
--- 
a/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/HDFSURLCreatorBase.java
+++ 
b/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/HDFSURLCreatorBase.java
@@ -24,10 +24,23 @@ import java.util.Map;
 
 public abstract class HDFSURLCreatorBase implements ServiceURLCreator {
 
-  private static final String SCHEME_HTTP  = "http";
-  private static final String SCHEME_HTTPS = "https";
+  static final String CONFIG_SERVICE_NAMENODE = "NAMENODE";
+  static final String CONFIG_SERVICE_HDFS     = "HDFS";
+  static final String CONFIG_TYPE_HDFS_SITE   = "hdfs-site";
+  static final String CONFIG_TYPE_CORE_SITE   = "core-site";
 
-  private static final String NAMESERVICE_PARAM = "discovery-nameservice";
+  static final String HTTP_POLICY_PROPERTY = "dfs.http.policy";
+
+  static final String HTTP_ONLY_POLICY  = "HTTP_ONLY";
+  static final String HTTPS_ONLY_POLICY = "HTTPS_ONLY";
+
+  static final String SCHEME_HTTP  = "http";
+  static final String SCHEME_HTTPS = "https";
+
+  static final String HTTP_ADDRESS_PROPERTY  = "dfs.namenode.http-address";
+  static final String HTTPS_ADDRESS_PROPERTY = "dfs.namenode.https-address";
+
+  static final String NAMESERVICE_PARAM = "discovery-nameservice";
 
   protected AmbariServiceDiscoveryMessages log = 
MessagesFactory.get(AmbariServiceDiscoveryMessages.class);
 
@@ -42,11 +55,12 @@ public abstract class HDFSURLCreatorBase implements 
ServiceURLCreator {
     List<String> urls = new ArrayList<>();
 
     if (getTargetService().equals(service)) {
-      AmbariCluster.ServiceConfiguration sc = 
cluster.getServiceConfiguration("HDFS", "hdfs-site");
+      AmbariCluster.ServiceConfiguration sc =
+                                      
cluster.getServiceConfiguration(CONFIG_SERVICE_HDFS, CONFIG_TYPE_HDFS_SITE);
       if (sc != null) {
         // First, check if it's HA config
         String nameServices = null;
-        AmbariComponent nameNodeComp = cluster.getComponent("NAMENODE");
+        AmbariComponent nameNodeComp = 
cluster.getComponent(CONFIG_SERVICE_NAMENODE);
         if (nameNodeComp != null) {
           nameServices = nameNodeComp.getConfigProperty("dfs.nameservices");
         }
@@ -66,7 +80,8 @@ public abstract class HDFSURLCreatorBase implements 
ServiceURLCreator {
               ns = nsParam;
             } else {
               // core-site.xml : dfs.defaultFS property (e.g., hdfs://ns1)
-              AmbariCluster.ServiceConfiguration coreSite = 
cluster.getServiceConfiguration("HDFS", "core-site");
+              AmbariCluster.ServiceConfiguration coreSite =
+                                        
cluster.getServiceConfiguration(CONFIG_SERVICE_HDFS, CONFIG_TYPE_CORE_SITE);
               if (coreSite != null) {
                 String defaultFS = 
coreSite.getProperties().get("fs.defaultFS");
                 if (defaultFS != null) {
@@ -88,28 +103,30 @@ public abstract class HDFSURLCreatorBase implements 
ServiceURLCreator {
           // nameservice. If this property is present, use its value to create 
the correct URLs.
           String nameServiceNodes = props.get("dfs.ha.namenodes." + ns);
           if (nameServiceNodes != null) {
+            String addressPropertyPrefix = getAddressPropertyPrefix();
             String[] nodes = nameServiceNodes.split(",");
             for (String node : nodes) {
-              String propertyValue = getHANameNodeHttpAddress(props, ns, node);
+              String propertyValue = 
getHANameNodeHttpAddress(addressPropertyPrefix, props, ns, node);
               if (propertyValue != null) {
                 urls.add(createURL(propertyValue));
               }
             }
           } else {
-            // Name node HTTP addresses are defined as properties of the form:
-            //      dfs.namenode.http-address.<NAMESERVICE>.nn<INDEX>
-            // So, this iterates over the nn<INDEX> properties until there is 
no such property (since it cannot be known how
-            // many are defined by any other means).
+            // Name node HTTP[S] addresses are defined as properties of the 
form:
+            //      dfs.namenode.http[s]-address.<NAMESERVICE>.nn<INDEX>
+            // So, this iterates over the nn<INDEX> properties until there is 
no such property (since it cannot be
+            // known how many are defined by any other means).
+            String addressPropertyPrefix = getAddressPropertyPrefix();
             int i = 1;
-            String propertyValue = getHANameNodeHttpAddress(props, ns, i++);
+            String propertyValue = 
getHANameNodeHttpAddress(addressPropertyPrefix, props, ns, i++);
             while (propertyValue != null) {
               urls.add(createURL(propertyValue));
-              propertyValue = getHANameNodeHttpAddress(props, ns, i++);
+              propertyValue = getHANameNodeHttpAddress(addressPropertyPrefix, 
props, ns, i++);
             }
           }
 
-        } else { // If it's not an HA configuration, get the single name node 
HTTP address
-          
urls.add(createURL(sc.getProperties().get("dfs.namenode.http-address")));
+        } else { // If it's not an HA configuration, get the single name node 
HTTP[S] address
+          
urls.add(createURL(sc.getProperties().get(getAddressPropertyPrefix())));
         }
       }
     }
@@ -119,7 +136,8 @@ public abstract class HDFSURLCreatorBase implements 
ServiceURLCreator {
 
 
   // Verify whether the declared nameservice is among the configured 
nameservices in the cluster
-  private static boolean 
validateDeclaredNameService(AmbariCluster.ServiceConfiguration hdfsSite, String 
declaredNameService) {
+  private static boolean 
validateDeclaredNameService(AmbariCluster.ServiceConfiguration hdfsSite,
+                                                     String                    
         declaredNameService) {
     boolean isValid = false;
     String nameservices = hdfsSite.getProperties().get("dfs.nameservices");
     if (nameservices != null) {
@@ -135,31 +153,46 @@ public abstract class HDFSURLCreatorBase implements 
ServiceURLCreator {
   }
 
 
-  private static String getHANameNodeHttpAddress(Map<String, String> props, 
String nameService, int index) {
-    return props.get("dfs.namenode.http-address." + nameService + ".nn" + 
index);
+  private static String getHANameNodeHttpAddress(String              
addressPropertyPrefix,
+                                                 Map<String, String> props,
+                                                 String              
nameService,
+                                                 int                 index) {
+    return props.get(addressPropertyPrefix + "." + nameService + ".nn" + 
index);
   }
 
 
-  private static String getHANameNodeHttpAddress(Map<String, String> props, 
String nameService, String node) {
-    return props.get("dfs.namenode.http-address." + nameService + "." + node);
+  private static String getHANameNodeHttpAddress(String              
addressPropertyPrefix,
+                                                 Map<String, String> props,
+                                                 String              
nameService,
+                                                 String              node) {
+    return props.get(addressPropertyPrefix + "." + nameService + "." + node);
   }
 
+  /**
+   * @return The HTTP or HTTPS address property name prefix, depending on the 
value of the dfs.http.policy property
+   */
+  private String getAddressPropertyPrefix() {
+    return HTTPS_ONLY_POLICY.equals(getHttpPolicy()) ? HTTPS_ADDRESS_PROPERTY 
: HTTP_ADDRESS_PROPERTY;
+  }
 
-  protected abstract String createURL(String address);
-
-
-  protected String getURLScheme() {
-    String scheme = SCHEME_HTTP;
+  private String getHttpPolicy() {
+    String httpPolicy = HTTP_ONLY_POLICY;
 
-    AmbariCluster.ServiceConfiguration sc = 
cluster.getServiceConfiguration("HDFS", "hdfs-site");
+    AmbariCluster.ServiceConfiguration sc = 
cluster.getServiceConfiguration(CONFIG_SERVICE_HDFS, CONFIG_TYPE_HDFS_SITE);
     if (sc != null) {
-      String httpPolicy = sc.getProperties().get("dfs.http.policy");
-      if (httpPolicy != null) {
-        scheme = httpPolicy.equals("HTTPS_ONLY") ? SCHEME_HTTPS : SCHEME_HTTP;
+      String propertyValue = sc.getProperties().get(HTTP_POLICY_PROPERTY);
+      if (propertyValue != null && !propertyValue.isEmpty()) {
+        httpPolicy = propertyValue;
       }
     }
+    return httpPolicy;
+  }
 
-    return scheme;
+  protected abstract String createURL(String address);
+
+
+  protected String getURLScheme() {
+    return HTTPS_ONLY_POLICY.equals(getHttpPolicy()) ? SCHEME_HTTPS : 
SCHEME_HTTP;
   }
 
 

http://git-wip-us.apache.org/repos/asf/knox/blob/5d2ed18d/gateway-discovery-ambari/src/test/java/org/apache/knox/gateway/topology/discovery/ambari/WebHdfsUrlCreatorTest.java
----------------------------------------------------------------------
diff --git 
a/gateway-discovery-ambari/src/test/java/org/apache/knox/gateway/topology/discovery/ambari/WebHdfsUrlCreatorTest.java
 
b/gateway-discovery-ambari/src/test/java/org/apache/knox/gateway/topology/discovery/ambari/WebHdfsUrlCreatorTest.java
new file mode 100644
index 0000000..46a2474
--- /dev/null
+++ 
b/gateway-discovery-ambari/src/test/java/org/apache/knox/gateway/topology/discovery/ambari/WebHdfsUrlCreatorTest.java
@@ -0,0 +1,195 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations 
under
+ * the License.
+ */
+package org.apache.knox.gateway.topology.discovery.ambari;
+
+import org.easymock.EasyMock;
+import org.junit.Test;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+
+public class WebHdfsUrlCreatorTest {
+
+
+  @Test
+  public void testHttpEndpointAddress() {
+    final String HTTP_ADDRESS  = "host1:20070";
+    final String HTTPS_ADDRESS = "host2:20470";
+
+    AmbariCluster.ServiceConfiguration hdfsSvcConfig = 
EasyMock.createNiceMock(AmbariCluster.ServiceConfiguration.class);
+
+    Map<String, String> configProps = new HashMap<>();
+    configProps.put(HDFSURLCreatorBase.HTTP_POLICY_PROPERTY, 
HDFSURLCreatorBase.HTTP_ONLY_POLICY);
+    configProps.put(HDFSURLCreatorBase.HTTP_ADDRESS_PROPERTY, HTTP_ADDRESS);
+    configProps.put(HDFSURLCreatorBase.HTTPS_ADDRESS_PROPERTY, HTTPS_ADDRESS);
+
+    
EasyMock.expect(hdfsSvcConfig.getProperties()).andReturn(configProps).anyTimes();
+    EasyMock.replay(hdfsSvcConfig);
+
+    AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+    EasyMock.expect(cluster.getServiceConfiguration("HDFS", "hdfs-site"))
+            .andReturn(hdfsSvcConfig)
+            .anyTimes();
+    EasyMock.replay(cluster);
+
+    WebHdfsUrlCreator c = new WebHdfsUrlCreator();
+    c.init(cluster);
+    List<String> urls = c.create("WEBHDFS", null);
+    assertNotNull(urls);
+    assertFalse(urls.isEmpty());
+    assertEquals(1, urls.size());
+    assertEquals("http://"; + HTTP_ADDRESS + "/webhdfs", urls.get(0));
+  }
+
+
+  @Test
+  public void testHttpsEndpointAddress() {
+    final String HTTP_ADDRESS  = "host1:20070";
+    final String HTTPS_ADDRESS = "host2:20470";
+
+    AmbariCluster.ServiceConfiguration hdfsSvcConfig = 
EasyMock.createNiceMock(AmbariCluster.ServiceConfiguration.class);
+
+    Map<String, String> configProps = new HashMap<>();
+    configProps.put(HDFSURLCreatorBase.HTTP_POLICY_PROPERTY, 
HDFSURLCreatorBase.HTTPS_ONLY_POLICY);
+    configProps.put(HDFSURLCreatorBase.HTTP_ADDRESS_PROPERTY, HTTP_ADDRESS);
+    configProps.put(HDFSURLCreatorBase.HTTPS_ADDRESS_PROPERTY, HTTPS_ADDRESS);
+
+    
EasyMock.expect(hdfsSvcConfig.getProperties()).andReturn(configProps).anyTimes();
+    EasyMock.replay(hdfsSvcConfig);
+
+    AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+    EasyMock.expect(cluster.getServiceConfiguration("HDFS", "hdfs-site"))
+            .andReturn(hdfsSvcConfig)
+            .anyTimes();
+    EasyMock.replay(cluster);
+
+    WebHdfsUrlCreator c = new WebHdfsUrlCreator();
+    c.init(cluster);
+    List<String> urls = c.create("WEBHDFS", null);
+    assertNotNull(urls);
+    assertFalse(urls.isEmpty());
+    assertEquals(1, urls.size());
+    assertEquals("https://"; + HTTPS_ADDRESS + "/webhdfs", urls.get(0));
+  }
+
+
+  @Test
+  public void testFederatedHttpEndpointAddress() {
+    final String HTTP_ADDRESS  = "host1:20070";
+    final String HTTPS_ADDRESS = "host2:20470";
+
+    AmbariComponent nnComp = EasyMock.createNiceMock(AmbariComponent.class);
+    
EasyMock.expect(nnComp.getConfigProperty("dfs.nameservices")).andReturn("X,Y").anyTimes();
+    EasyMock.replay(nnComp);
+
+    AmbariCluster.ServiceConfiguration coreSiteConfig = 
EasyMock.createNiceMock(AmbariCluster.ServiceConfiguration.class);
+    Map<String, String> corSiteProps = new HashMap<>();
+    corSiteProps.put("fs.defaultFS", "hdfs://X");
+    
EasyMock.expect(coreSiteConfig.getProperties()).andReturn(corSiteProps).anyTimes();
+    EasyMock.replay(coreSiteConfig);
+
+    AmbariCluster.ServiceConfiguration hdfsSiteConfig = 
EasyMock.createNiceMock(AmbariCluster.ServiceConfiguration.class);
+    Map<String, String> configProps = new HashMap<>();
+    configProps.put(HDFSURLCreatorBase.HTTP_POLICY_PROPERTY, 
HDFSURLCreatorBase.HTTP_ONLY_POLICY);
+    configProps.put(HDFSURLCreatorBase.HTTP_ADDRESS_PROPERTY, HTTP_ADDRESS);
+    configProps.put(HDFSURLCreatorBase.HTTPS_ADDRESS_PROPERTY, HTTPS_ADDRESS);
+    configProps.put("dfs.ha.namenodes.X", "nn1,nn2");
+    configProps.put(HDFSURLCreatorBase.HTTP_ADDRESS_PROPERTY + ".X.nn1", 
"host3:20070");
+    configProps.put(HDFSURLCreatorBase.HTTP_ADDRESS_PROPERTY + ".X.nn2", 
"host4:20070");
+
+    
EasyMock.expect(hdfsSiteConfig.getProperties()).andReturn(configProps).anyTimes();
+    EasyMock.replay(hdfsSiteConfig);
+
+    AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+    EasyMock.expect(cluster.getComponent("NAMENODE"))
+            .andReturn(nnComp)
+            .anyTimes();
+    EasyMock.expect(cluster.getServiceConfiguration("HDFS", "hdfs-site"))
+            .andReturn(hdfsSiteConfig)
+            .anyTimes();
+    EasyMock.expect(cluster.getServiceConfiguration("HDFS", "core-site"))
+            .andReturn(coreSiteConfig)
+            .anyTimes();
+    EasyMock.replay(cluster);
+
+    WebHdfsUrlCreator c = new WebHdfsUrlCreator();
+    c.init(cluster);
+    List<String> urls = c.create("WEBHDFS", null);
+    assertNotNull(urls);
+    assertFalse(urls.isEmpty());
+    assertEquals(2, urls.size());
+    assertTrue(urls.contains("http://"; + "host3:20070" + "/webhdfs"));
+    assertTrue(urls.contains("http://"; + "host4:20070" + "/webhdfs"));
+  }
+
+
+  @Test
+  public void testFederatedHttpsEndpointAddress() {
+    final String HTTP_ADDRESS  = "host1:20070";
+    final String HTTPS_ADDRESS = "host2:20470";
+
+    AmbariComponent nnComp = EasyMock.createNiceMock(AmbariComponent.class);
+    
EasyMock.expect(nnComp.getConfigProperty("dfs.nameservices")).andReturn("X,Y").anyTimes();
+    EasyMock.replay(nnComp);
+
+    AmbariCluster.ServiceConfiguration coreSiteConfig = 
EasyMock.createNiceMock(AmbariCluster.ServiceConfiguration.class);
+    Map<String, String> corSiteProps = new HashMap<>();
+    corSiteProps.put("fs.defaultFS", "hdfs://Y");
+    
EasyMock.expect(coreSiteConfig.getProperties()).andReturn(corSiteProps).anyTimes();
+    EasyMock.replay(coreSiteConfig);
+
+    AmbariCluster.ServiceConfiguration hdfsSiteConfig = 
EasyMock.createNiceMock(AmbariCluster.ServiceConfiguration.class);
+    Map<String, String> configProps = new HashMap<>();
+    configProps.put(HDFSURLCreatorBase.HTTP_POLICY_PROPERTY, 
HDFSURLCreatorBase.HTTPS_ONLY_POLICY);
+    configProps.put(HDFSURLCreatorBase.HTTP_ADDRESS_PROPERTY, HTTP_ADDRESS);
+    configProps.put(HDFSURLCreatorBase.HTTPS_ADDRESS_PROPERTY, HTTPS_ADDRESS);
+    configProps.put("dfs.ha.namenodes.Y", "nn7,nn8");
+    configProps.put(HDFSURLCreatorBase.HTTPS_ADDRESS_PROPERTY + ".Y.nn7", 
"host5:20470");
+    configProps.put(HDFSURLCreatorBase.HTTPS_ADDRESS_PROPERTY + ".Y.nn8", 
"host6:20470");
+
+    
EasyMock.expect(hdfsSiteConfig.getProperties()).andReturn(configProps).anyTimes();
+    EasyMock.replay(hdfsSiteConfig);
+
+    AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+    EasyMock.expect(cluster.getComponent("NAMENODE"))
+            .andReturn(nnComp)
+            .anyTimes();
+    EasyMock.expect(cluster.getServiceConfiguration("HDFS", "hdfs-site"))
+            .andReturn(hdfsSiteConfig)
+            .anyTimes();
+    EasyMock.expect(cluster.getServiceConfiguration("HDFS", "core-site"))
+            .andReturn(coreSiteConfig)
+            .anyTimes();
+    EasyMock.replay(cluster);
+
+    WebHdfsUrlCreator c = new WebHdfsUrlCreator();
+    c.init(cluster);
+    List<String> urls = c.create("WEBHDFS", null);
+    assertNotNull(urls);
+    assertFalse(urls.isEmpty());
+    assertEquals(2, urls.size());
+    assertTrue(urls.contains("https://"; + "host5:20470" + "/webhdfs"));
+    assertTrue(urls.contains("https://"; + "host6:20470" + "/webhdfs"));
+  }
+
+}

Reply via email to