Repository: knox
Updated Branches:
  refs/heads/master f1d3e1031 -> ec915ded8


KNOX-1193 - Added discovery support for LIVYSERVER, SPARKHISTORYUI, 
JOBHISTORYUI, YARNUI, FALCON, OOZIEUI, HBASEUI, HDFSUI, DRUID-COORDINATOR-UI 
and DRUID-OVERLORD-UI


Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/ec915ded
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/ec915ded
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/ec915ded

Branch: refs/heads/master
Commit: ec915ded83af86ad96d005c957b03b8807cd197e
Parents: f1d3e10
Author: Phil Zampino <[email protected]>
Authored: Mon Feb 26 17:13:54 2018 -0500
Committer: Phil Zampino <[email protected]>
Committed: Tue Feb 27 11:39:10 2018 -0500

----------------------------------------------------------------------
 .../ambari/AmbariDynamicServiceURLCreator.java  |  58 +++--
 .../discovery/ambari/HDFSURLCreatorBase.java    | 169 ++++++++++++
 .../discovery/ambari/HdfsUIUrlCreator.java      |  41 +++
 .../discovery/ambari/ServiceURLFactory.java     |   1 +
 .../discovery/ambari/WebHdfsUrlCreator.java     | 123 +--------
 ...iscovery-component-config-mapping.properties |   5 +
 .../ambari-service-discovery-url-mappings.xml   | 177 ++++++++++++-
 .../AmbariDynamicServiceURLCreatorTest.java     | 261 ++++++++++++++++++-
 8 files changed, 677 insertions(+), 158 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/ec915ded/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariDynamicServiceURLCreator.java
----------------------------------------------------------------------
diff --git 
a/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariDynamicServiceURLCreator.java
 
b/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariDynamicServiceURLCreator.java
index d769a18..ceb7b50 100644
--- 
a/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariDynamicServiceURLCreator.java
+++ 
b/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariDynamicServiceURLCreator.java
@@ -82,36 +82,38 @@ class AmbariDynamicServiceURLCreator implements 
ServiceURLCreator {
                 ServiceURLPropertyConfig.Property configProperty = 
config.getConfigProperty(serviceName, propertyName);
 
                 String propertyValue = null;
-                String propertyType = configProperty.getType();
-                if 
(ServiceURLPropertyConfig.Property.TYPE_SERVICE.equals(propertyType)) {
-                    
log.lookingUpServiceConfigProperty(configProperty.getService(), 
configProperty.getServiceConfig(), configProperty.getValue());
-                    AmbariCluster.ServiceConfiguration svcConfig =
-                        
cluster.getServiceConfiguration(configProperty.getService(), 
configProperty.getServiceConfig());
-                    if (svcConfig != null) {
-                        propertyValue = 
svcConfig.getProperties().get(configProperty.getValue());
-                    }
-                } else if 
(ServiceURLPropertyConfig.Property.TYPE_COMPONENT.equals(propertyType)) {
-                    String compName = configProperty.getComponent();
-                    if (compName != null) {
-                        AmbariComponent component = 
cluster.getComponent(compName);
-                        if (component != null) {
-                            if 
(ServiceURLPropertyConfig.Property.PROP_COMP_HOSTNAME.equals(configProperty.getValue()))
 {
-                                log.lookingUpComponentHosts(compName);
-                                
componentHostnames.addAll(component.getHostNames());
-                                hostNamePlaceholder = propertyName; // 
Remember the host name placeholder
-                            } else {
-                                log.lookingUpComponentConfigProperty(compName, 
configProperty.getValue());
-                                propertyValue = 
component.getConfigProperty(configProperty.getValue());
+                if (configProperty != null) {
+                    String propertyType = configProperty.getType();
+                    if 
(ServiceURLPropertyConfig.Property.TYPE_SERVICE.equals(propertyType)) {
+                        
log.lookingUpServiceConfigProperty(configProperty.getService(), 
configProperty.getServiceConfig(), configProperty.getValue());
+                        AmbariCluster.ServiceConfiguration svcConfig =
+                            
cluster.getServiceConfiguration(configProperty.getService(), 
configProperty.getServiceConfig());
+                        if (svcConfig != null) {
+                            propertyValue = 
svcConfig.getProperties().get(configProperty.getValue());
+                        }
+                    } else if 
(ServiceURLPropertyConfig.Property.TYPE_COMPONENT.equals(propertyType)) {
+                        String compName = configProperty.getComponent();
+                        if (compName != null) {
+                            AmbariComponent component = 
cluster.getComponent(compName);
+                            if (component != null) {
+                                if 
(ServiceURLPropertyConfig.Property.PROP_COMP_HOSTNAME.equals(configProperty.getValue()))
 {
+                                    log.lookingUpComponentHosts(compName);
+                                    
componentHostnames.addAll(component.getHostNames());
+                                    hostNamePlaceholder = propertyName; // 
Remember the host name placeholder
+                                } else {
+                                    
log.lookingUpComponentConfigProperty(compName, configProperty.getValue());
+                                    propertyValue = 
component.getConfigProperty(configProperty.getValue());
+                                }
                             }
                         }
-                    }
-                } else { // Derived property
-                    log.handlingDerivedProperty(serviceName, 
configProperty.getType(), configProperty.getName());
-                    ServiceURLPropertyConfig.Property p = 
config.getConfigProperty(serviceName, configProperty.getName());
-                    propertyValue = p.getValue();
-                    if (propertyValue == null) {
-                        if (p.getConditionHandler() != null) {
-                            propertyValue = 
p.getConditionHandler().evaluate(config, cluster);
+                    } else { // Derived property
+                        log.handlingDerivedProperty(serviceName, 
configProperty.getType(), configProperty.getName());
+                        ServiceURLPropertyConfig.Property p = 
config.getConfigProperty(serviceName, configProperty.getName());
+                        propertyValue = p.getValue();
+                        if (propertyValue == null) {
+                            if (p.getConditionHandler() != null) {
+                                propertyValue = 
p.getConditionHandler().evaluate(config, cluster);
+                            }
                         }
                     }
                 }

http://git-wip-us.apache.org/repos/asf/knox/blob/ec915ded/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/HDFSURLCreatorBase.java
----------------------------------------------------------------------
diff --git 
a/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/HDFSURLCreatorBase.java
 
b/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/HDFSURLCreatorBase.java
new file mode 100644
index 0000000..59d8d7f
--- /dev/null
+++ 
b/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/HDFSURLCreatorBase.java
@@ -0,0 +1,169 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations 
under
+ * the License.
+ */
+package org.apache.knox.gateway.topology.discovery.ambari;
+
+import org.apache.knox.gateway.i18n.messages.MessagesFactory;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
+public abstract class HDFSURLCreatorBase implements ServiceURLCreator {
+
+  private static final String SCHEME_HTTP  = "http";
+  private static final String SCHEME_HTTPS = "https";
+
+  private static final String NAMESERVICE_PARAM = "discovery-nameservice";
+
+  protected AmbariServiceDiscoveryMessages log = 
MessagesFactory.get(AmbariServiceDiscoveryMessages.class);
+
+  private AmbariCluster cluster = null;
+
+
+  HDFSURLCreatorBase(AmbariCluster cluster) {
+    this.cluster = cluster;
+  }
+
+  public List<String> create(String service, Map<String, String> 
serviceParams) {
+    List<String> urls = new ArrayList<>();
+
+    if (getTargetService().equals(service)) {
+      AmbariCluster.ServiceConfiguration sc = 
cluster.getServiceConfiguration("HDFS", "hdfs-site");
+      if (sc != null) {
+        // First, check if it's HA config
+        String nameServices = null;
+        AmbariComponent nameNodeComp = cluster.getComponent("NAMENODE");
+        if (nameNodeComp != null) {
+          nameServices = nameNodeComp.getConfigProperty("dfs.nameservices");
+        }
+
+        if (nameServices != null && !nameServices.isEmpty()) {
+          String ns = null;
+
+          // Parse the nameservices value
+          String[] namespaces = nameServices.split(",");
+
+          if (namespaces.length > 1) {
+            String nsParam = (serviceParams != null) ? 
serviceParams.get(NAMESERVICE_PARAM) : null;
+            if (nsParam != null) {
+              if (!validateDeclaredNameService(sc, nsParam)) {
+                log.undefinedHDFSNameService(nsParam);
+              }
+              ns = nsParam;
+            } else {
+              // core-site.xml : dfs.defaultFS property (e.g., hdfs://ns1)
+              AmbariCluster.ServiceConfiguration coreSite = 
cluster.getServiceConfiguration("HDFS", "core-site");
+              if (coreSite != null) {
+                String defaultFS = 
coreSite.getProperties().get("fs.defaultFS");
+                if (defaultFS != null) {
+                  ns = defaultFS.substring(defaultFS.lastIndexOf("/") + 1);
+                }
+              }
+            }
+          }
+
+          // If only a single namespace, or no namespace specified and no 
default configured, use the first in the "list"
+          if (ns == null) {
+            ns = namespaces[0];
+          }
+
+          // If it is an HA configuration
+          Map<String, String> props = sc.getProperties();
+
+          // More recent HDFS configurations support a property enumerating 
the node names associated with a
+          // nameservice. If this property is present, use its value to create 
the correct URLs.
+          String nameServiceNodes = props.get("dfs.ha.namenodes." + ns);
+          if (nameServiceNodes != null) {
+            String[] nodes = nameServiceNodes.split(",");
+            for (String node : nodes) {
+              String propertyValue = getHANameNodeHttpAddress(props, ns, node);
+              if (propertyValue != null) {
+                urls.add(createURL(propertyValue));
+              }
+            }
+          } else {
+            // Name node HTTP addresses are defined as properties of the form:
+            //      dfs.namenode.http-address.<NAMESERVICE>.nn<INDEX>
+            // So, this iterates over the nn<INDEX> properties until there is 
no such property (since it cannot be known how
+            // many are defined by any other means).
+            int i = 1;
+            String propertyValue = getHANameNodeHttpAddress(props, ns, i++);
+            while (propertyValue != null) {
+              urls.add(createURL(propertyValue));
+              propertyValue = getHANameNodeHttpAddress(props, ns, i++);
+            }
+          }
+
+        } else { // If it's not an HA configuration, get the single name node 
HTTP address
+          
urls.add(createURL(sc.getProperties().get("dfs.namenode.http-address")));
+        }
+      }
+    }
+
+    return urls;
+  }
+
+
+  // Verify whether the declared nameservice is among the configured 
nameservices in the cluster
+  private static boolean 
validateDeclaredNameService(AmbariCluster.ServiceConfiguration hdfsSite, String 
declaredNameService) {
+    boolean isValid = false;
+    String nameservices = hdfsSite.getProperties().get("dfs.nameservices");
+    if (nameservices != null) {
+      String[] namespaces = nameservices.split(",");
+      for (String ns : namespaces) {
+        if (ns.equals(declaredNameService)) {
+          isValid = true;
+          break;
+        }
+      }
+    }
+    return isValid;
+  }
+
+
+  private static String getHANameNodeHttpAddress(Map<String, String> props, 
String nameService, int index) {
+    return props.get("dfs.namenode.http-address." + nameService + ".nn" + 
index);
+  }
+
+
+  private static String getHANameNodeHttpAddress(Map<String, String> props, 
String nameService, String node) {
+    return props.get("dfs.namenode.http-address." + nameService + "." + node);
+  }
+
+
+  protected abstract String getTargetService();
+
+
+  protected abstract String createURL(String address);
+
+
+  protected String getURLScheme() {
+    String scheme = SCHEME_HTTP;
+
+    AmbariCluster.ServiceConfiguration sc = 
cluster.getServiceConfiguration("HDFS", "hdfs-site");
+    if (sc != null) {
+      String httpPolicy = sc.getProperties().get("dfs.http.policy");
+      if (httpPolicy != null) {
+        scheme = httpPolicy.equals("HTTPS_ONLY") ? SCHEME_HTTPS : SCHEME_HTTP;
+      }
+    }
+
+    return scheme;
+  }
+
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/ec915ded/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/HdfsUIUrlCreator.java
----------------------------------------------------------------------
diff --git 
a/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/HdfsUIUrlCreator.java
 
b/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/HdfsUIUrlCreator.java
new file mode 100644
index 0000000..10d7ede
--- /dev/null
+++ 
b/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/HdfsUIUrlCreator.java
@@ -0,0 +1,41 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations 
under
+ * the License.
+ */
+package org.apache.knox.gateway.topology.discovery.ambari;
+
+/**
+ * A ServiceURLCreator implementation for HDFSUI.
+ */
+public class HdfsUIUrlCreator extends HDFSURLCreatorBase {
+
+  private static final String SERVICE = "HDFSUI";
+
+
+  HdfsUIUrlCreator(AmbariCluster cluster) {
+    super(cluster);
+  }
+
+  @Override
+  protected String getTargetService() {
+    return SERVICE;
+  }
+
+  @Override
+  protected String createURL(String address) {
+    return getURLScheme() + "://" + address;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/ec915ded/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/ServiceURLFactory.java
----------------------------------------------------------------------
diff --git 
a/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/ServiceURLFactory.java
 
b/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/ServiceURLFactory.java
index f114930..e976022 100644
--- 
a/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/ServiceURLFactory.java
+++ 
b/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/ServiceURLFactory.java
@@ -38,6 +38,7 @@ public class ServiceURLFactory {
     // Custom (internal) URL creators
     urlCreators.put("NAMENODE", new NameNodeUrlCreator(cluster));
     urlCreators.put("WEBHDFS", new WebHdfsUrlCreator(cluster));
+    urlCreators.put("HDFSUI", new HdfsUIUrlCreator(cluster));
   }
 
 

http://git-wip-us.apache.org/repos/asf/knox/blob/ec915ded/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/WebHdfsUrlCreator.java
----------------------------------------------------------------------
diff --git 
a/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/WebHdfsUrlCreator.java
 
b/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/WebHdfsUrlCreator.java
index 6dcdf88..d17e87e 100644
--- 
a/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/WebHdfsUrlCreator.java
+++ 
b/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/WebHdfsUrlCreator.java
@@ -16,135 +16,26 @@
  */
 package org.apache.knox.gateway.topology.discovery.ambari;
 
-import org.apache.knox.gateway.i18n.messages.MessagesFactory;
-
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-
 /**
  * A ServiceURLCreator implementation for WEBHDFS.
  */
-public class WebHdfsUrlCreator implements ServiceURLCreator {
+public class WebHdfsUrlCreator extends HDFSURLCreatorBase {
 
   private static final String SERVICE = "WEBHDFS";
 
-  private static final String NAMESERVICE_PARAM = "discovery-nameservice";
-
-  private AmbariServiceDiscoveryMessages log = 
MessagesFactory.get(AmbariServiceDiscoveryMessages.class);
-
-  private AmbariCluster cluster = null;
 
   WebHdfsUrlCreator(AmbariCluster cluster) {
-    this.cluster = cluster;
+    super(cluster);
   }
 
   @Override
-  public List<String> create(String service, Map<String, String> 
serviceParams) {
-    List<String> urls = new ArrayList<>();
-
-    if (SERVICE.equals(service)) {
-      AmbariCluster.ServiceConfiguration sc = 
cluster.getServiceConfiguration("HDFS", "hdfs-site");
-      if (sc != null) {
-        // First, check if it's HA config
-        String nameServices = null;
-        AmbariComponent nameNodeComp = cluster.getComponent("NAMENODE");
-        if (nameNodeComp != null) {
-          nameServices = nameNodeComp.getConfigProperty("dfs.nameservices");
-        }
-
-        if (nameServices != null && !nameServices.isEmpty()) {
-          String ns = null;
-
-          // Parse the nameservices value
-          String[] namespaces = nameServices.split(",");
-
-          if (namespaces.length > 1) {
-            String nsParam = (serviceParams != null) ? 
serviceParams.get(NAMESERVICE_PARAM) : null;
-            if (nsParam != null) {
-              if (!validateDeclaredNameService(sc, nsParam)) {
-                log.undefinedHDFSNameService(nsParam);
-              }
-              ns = nsParam;
-            } else {
-              // core-site.xml : dfs.defaultFS property (e.g., hdfs://ns1)
-              AmbariCluster.ServiceConfiguration coreSite = 
cluster.getServiceConfiguration("HDFS", "core-site");
-              if (coreSite != null) {
-                String defaultFS = 
coreSite.getProperties().get("fs.defaultFS");
-                if (defaultFS != null) {
-                  ns = defaultFS.substring(defaultFS.lastIndexOf("/") + 1);
-                }
-              }
-            }
-          }
-
-          // If only a single namespace, or no namespace specified and no 
default configured, use the first in the "list"
-          if (ns == null) {
-             ns = namespaces[0];
-          }
-
-          // If it is an HA configuration
-          Map<String, String> props = sc.getProperties();
-
-          // More recent HDFS configurations support a property enumerating 
the node names associated with a
-          // nameservice. If this property is present, use its value to create 
the correct URLs.
-          String nameServiceNodes = props.get("dfs.ha.namenodes." + ns);
-          if (nameServiceNodes != null) {
-            String[] nodes = nameServiceNodes.split(",");
-            for (String node : nodes) {
-              String propertyValue = getHANameNodeHttpAddress(props, ns, node);
-              if (propertyValue != null) {
-                urls.add(createURL(propertyValue));
-              }
-            }
-          } else {
-            // Name node HTTP addresses are defined as properties of the form:
-            //      dfs.namenode.http-address.<NAMESERVICE>.nn<INDEX>
-            // So, this iterates over the nn<INDEX> properties until there is 
no such property (since it cannot be known how
-            // many are defined by any other means).
-            int i = 1;
-            String propertyValue = getHANameNodeHttpAddress(props, ns, i++);
-            while (propertyValue != null) {
-              urls.add(createURL(propertyValue));
-              propertyValue = getHANameNodeHttpAddress(props, ns, i++);
-            }
-          }
-
-        } else { // If it's not an HA configuration, get the single name node 
HTTP address
-          
urls.add(createURL(sc.getProperties().get("dfs.namenode.http-address")));
-        }
-      }
-    }
-
-    return urls;
-  }
-
-  // Verify whether the declared nameservice is among the configured 
nameservices in the cluster
-  private static boolean 
validateDeclaredNameService(AmbariCluster.ServiceConfiguration hdfsSite, String 
declaredNameService) {
-    boolean isValid = false;
-    String nameservices = hdfsSite.getProperties().get("dfs.nameservices");
-    if (nameservices != null) {
-      String[] namespaces = nameservices.split(",");
-      for (String ns : namespaces) {
-        if (ns.equals(declaredNameService)) {
-          isValid = true;
-          break;
-        }
-      }
-    }
-    return isValid;
+  protected String getTargetService() {
+    return SERVICE;
   }
 
-  private static String getHANameNodeHttpAddress(Map<String, String> props, 
String nameService, int index) {
-    return props.get("dfs.namenode.http-address." + nameService + ".nn" + 
index);
-  }
-
-  private static String getHANameNodeHttpAddress(Map<String, String> props, 
String nameService, String node) {
-    return props.get("dfs.namenode.http-address." + nameService + "." + node);
-  }
-
-  private static String createURL(String address) {
-    return "http://"; + address + "/webhdfs";
+  @Override
+  protected String createURL(String address) {
+    return getURLScheme() + "://" + address + "/webhdfs";
   }
 
 }

http://git-wip-us.apache.org/repos/asf/knox/blob/ec915ded/gateway-discovery-ambari/src/main/resources/ambari-service-discovery-component-config-mapping.properties
----------------------------------------------------------------------
diff --git 
a/gateway-discovery-ambari/src/main/resources/ambari-service-discovery-component-config-mapping.properties
 
b/gateway-discovery-ambari/src/main/resources/ambari-service-discovery-component-config-mapping.properties
index 52d93eb..48dd7b1 100644
--- 
a/gateway-discovery-ambari/src/main/resources/ambari-service-discovery-component-config-mapping.properties
+++ 
b/gateway-discovery-ambari/src/main/resources/ambari-service-discovery-component-config-mapping.properties
@@ -22,6 +22,7 @@
 ##########################################################################
 NAMENODE=hdfs-site
 RESOURCEMANAGER=yarn-site
+HISTORYSERVER=mapred-site
 OOZIE_SERVER=oozie-site
 HIVE_SERVER=hive-site
 WEBHCAT_SERVER=webhcat-site
@@ -34,3 +35,7 @@ DRUID_SUPERSET=druid-superset
 ATLAS_SERVER=application-properties
 ZEPPELIN_MASTER=zeppelin-config
 RANGER_ADMIN=ranger-admin-site
+FALCON_SERVER=falcon-env
+LIVY_SERVER=livy-conf
+SPARK_JOBHISTORYSERVER=spark-defaults
+

http://git-wip-us.apache.org/repos/asf/knox/blob/ec915ded/gateway-discovery-ambari/src/main/resources/ambari-service-discovery-url-mappings.xml
----------------------------------------------------------------------
diff --git 
a/gateway-discovery-ambari/src/main/resources/ambari-service-discovery-url-mappings.xml
 
b/gateway-discovery-ambari/src/main/resources/ambari-service-discovery-url-mappings.xml
index 3327ade..6b6907d 100644
--- 
a/gateway-discovery-ambari/src/main/resources/ambari-service-discovery-url-mappings.xml
+++ 
b/gateway-discovery-ambari/src/main/resources/ambari-service-discovery-url-mappings.xml
@@ -57,13 +57,41 @@
         </properties>
     </service>
 
+    <service name="OOZIEUI">
+        <url-pattern>{OOZIE_URL}</url-pattern>
+        <properties>
+            <property name="OOZIE_URL">
+                <component>OOZIE_SERVER</component>
+                <config-property>oozie.base.url</config-property>
+            </property>
+        </properties>
+    </service>
+
     <service name="WEBHBASE">
-        <url-pattern>http://{HOST}:60080</url-pattern>
+        <url-pattern>http://{HOST}:{MASTER_PORT}</url-pattern>
         <properties>
             <property name="HOST">
                 <component>HBASE_MASTER</component>
                 <hostname/>
             </property>
+            <property name="MASTER_PORT">
+                <component>HBASE_MASTER</component>
+                <config-property>hbase.master.info.port</config-property>
+            </property>
+        </properties>
+    </service>
+
+    <service name="HBASEUI">
+        <url-pattern>http://{HOST}:{MASTER_PORT}</url-pattern>
+        <properties>
+            <property name="HOST">
+                <component>HBASE_MASTER</component>
+                <hostname/>
+            </property>
+            <property name="MASTER_PORT">
+                <component>HBASE_MASTER</component>
+                <config-property>hbase.master.info.port</config-property>
+            </property>
         </properties>
     </service>
 
@@ -131,6 +159,40 @@
         </properties>
     </service>
 
+    <service name="HDFSUI"> <!-- TODO: Custom ServiceURLCreator for this due 
to NN federation for HDP 3.0-->
+        <url-pattern>{SCHEME}://{WEBAPP_ADDRESS}</url-pattern>
+        <properties>
+            <property name="WEBAPP_HTTP_ADDRESS">
+                <component>NAMENODE</component>
+                <config-property>dfs.namenode.http-address</config-property>
+            </property>
+            <property name="WEBAPP_HTTPS_ADDRESS">
+                <component>NAMENODE</component>
+                <config-property>dfs.namenode.https-address</config-property>
+            </property>
+            <property name="HTTP_POLICY">
+                <component>NAMENODE</component>
+                <config-property>dfs.http.policy</config-property>
+            </property>
+            <property name="SCHEME">
+                <config-property>
+                    <if property="HTTP_POLICY" value="HTTPS_ONLY">
+                        <then>https</then>
+                        <else>http</else>
+                    </if>
+                </config-property>
+            </property>
+            <property name="WEBAPP_ADDRESS">
+                <config-property>
+                    <if property="HTTP_POLICY" value="HTTPS_ONLY">
+                        <then>WEBAPP_HTTPS_ADDRESS</then>
+                        <else>WEBAPP_HTTP_ADDRESS</else>
+                    </if>
+                </config-property>
+            </property>
+        </properties>
+    </service>
+
     <service name="DRUID-COORDINATOR">
         <url-pattern>http://{HOST}:{PORT}</url-pattern>
         <properties>
@@ -145,6 +207,20 @@
         </properties>
     </service>
 
+    <service name="DRUID-COORDINATOR-UI">
+        <url-pattern>http://{HOST}:{PORT}</url-pattern>
+        <properties>
+            <property name="HOST">
+                <component>DRUID_COORDINATOR</component>
+                <hostname/>
+            </property>
+            <property name="PORT">
+                <component>DRUID_COORDINATOR</component>
+                <config-property>druid.port</config-property>
+            </property>
+        </properties>
+    </service>
+
     <service name="DRUID-BROKER">
         <url-pattern>http://{HOST}:{PORT}</url-pattern>
         <properties>
@@ -187,6 +263,20 @@
         </properties>
     </service>
 
+    <service name="DRUID-OVERLORD-UI">
+        <url-pattern>http://{HOST}:{PORT}</url-pattern>
+        <properties>
+            <property name="HOST">
+                <component>DRUID_OVERLORD</component>
+                <hostname/>
+            </property>
+            <property name="PORT">
+                <component>DRUID_OVERLORD</component>
+                <config-property>druid.port</config-property>
+            </property>
+        </properties>
+    </service>
+
     <service name="SUPERSET">
         <url-pattern>http://{HOST}:{PORT}</url-pattern>
         <properties>
@@ -391,10 +481,91 @@
         </properties>
     </service>
 
-<!-- TODO:
+    <service name="FALCON">
+        <url-pattern>http://{HOST}:{PORT}</url-pattern>
+        <properties>
+            <property name="HOST">
+                <component>FALCON_SERVER</component>
+                <hostname/>
+            </property>
+            <property name="PORT">
+                <component>FALCON_SERVER</component>
+                <config-property>falcon_port</config-property>
+            </property>
+        </properties>
+    </service>
+
+
     <service name="YARNUI">
+        <url-pattern>{SCHEME}://{YARN_RM_WEBAPP_ADDRESS}</url-pattern>
+        <properties>
+            <property name="HTTP_ADDRESS">
+                <component>RESOURCEMANAGER</component>
+                
<config-property>yarn.resourcemanager.webapp.address</config-property>
+            </property>
+            <property name="HTTPS_ADDRESS">
+                <component>RESOURCEMANAGER</component>
+                
<config-property>yarn.resourcemanager.webapp.https.address</config-property>
+            </property>
+            <property name="HTTP_POLICY">
+                <component>RESOURCEMANAGER</component>
+                <config-property>yarn.http.policy</config-property>
+            </property>
+            <property name="SCHEME">
+                <config-property>
+                    <if property="HTTP_POLICY" value="HTTPS_ONLY">
+                        <then>https</then>
+                        <else>http</else>
+                    </if>
+                </config-property>
+            </property>
+            <property name="YARN_RM_WEBAPP_ADDRESS">
+                <config-property>
+                    <if property="HTTP_POLICY" value="HTTPS_ONLY">
+                        <then>HTTPS_ADDRESS</then>
+                        <else>HTTP_ADDRESS</else>
+                    </if>
+                </config-property>
+            </property>
+        </properties>
     </service>
 
--->
+    <service name="JOBHISTORYUI">
+        <url-pattern>{JOBHISTORY_WEBAPP_ADDRESS}</url-pattern>
+        <properties>
+            <property name="JOBHISTORY_WEBAPP_ADDRESS">
+                <component>HISTORYSERVER</component>
+                
<config-property>mapreduce.jobhistory.webapp.address</config-property>
+            </property>
+        </properties>
+    </service>
+
+    <service name="SPARKHISTORYUI">
+        <url-pattern>http://{HOST}:{PORT}</url-pattern>
+        <properties>
+            <property name="HOST">
+                <component>SPARK_JOBHISTORYSERVER</component>
+                <hostname/>
+            </property>
+            <property name="PORT">
+                <component>SPARK_JOBHISTORYSERVER</component>
+                <config-property>spark.history.ui.port</config-property>
+            </property>
+        </properties>
+    </service>
+
+    <service name="LIVYSERVER">
+        <url-pattern>http://{HOST}:{PORT}</url-pattern>
+        <properties>
+            <property name="HOST">
+                <component>LIVY_SERVER</component>
+                <hostname/>
+            </property>
+            <property name="PORT">
+                <component>LIVY_SERVER</component>
+                <config-property>livy.server.port</config-property>
+            </property>
+        </properties>
+    </service>
 
 </service-discovery-url-mappings>

http://git-wip-us.apache.org/repos/asf/knox/blob/ec915ded/gateway-discovery-ambari/src/test/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariDynamicServiceURLCreatorTest.java
----------------------------------------------------------------------
diff --git 
a/gateway-discovery-ambari/src/test/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariDynamicServiceURLCreatorTest.java
 
b/gateway-discovery-ambari/src/test/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariDynamicServiceURLCreatorTest.java
index 806d7c0..23a5f5a 100644
--- 
a/gateway-discovery-ambari/src/test/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariDynamicServiceURLCreatorTest.java
+++ 
b/gateway-discovery-ambari/src/test/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariDynamicServiceURLCreatorTest.java
@@ -16,7 +16,6 @@
  */
 package org.apache.knox.gateway.topology.discovery.ambari;
 
-import org.apache.commons.io.FileUtils;
 import org.easymock.EasyMock;
 import org.junit.Test;
 
@@ -31,7 +30,6 @@ import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
 
-import static junit.framework.TestCase.assertNull;
 import static junit.framework.TestCase.assertTrue;
 import static junit.framework.TestCase.fail;
 import static org.junit.Assert.assertEquals;
@@ -302,17 +300,23 @@ public class AmbariDynamicServiceURLCreatorTest {
         assertEquals("http://"; + HOSTNAME + ":" + PORT + "/templeton", url);
     }
 
+
     @Test
     public void testOozieURLFromInternalMapping() throws Exception {
-        testOozieURL(null);
+        testOozieURL(null, "OOZIE");
     }
 
     @Test
     public void testOozieURLFromExternalMapping() throws Exception {
-        testOozieURL(TEST_MAPPING_CONFIG);
+        testOozieURL(TEST_MAPPING_CONFIG, "OOZIE");
+    }
+
+    @Test
+    public void testOozieUIURLFromInternalMapping() throws Exception {
+        testOozieURL(null, "OOZIEUI");
     }
 
-    private void testOozieURL(Object mappingConfiguration) throws Exception {
+    private void testOozieURL(Object mappingConfiguration, String serviceName) 
throws Exception {
         final String URL = "http://host3:2222";;
 
         AmbariComponent oozieServer = 
EasyMock.createNiceMock(AmbariComponent.class);
@@ -325,7 +329,10 @@ public class AmbariDynamicServiceURLCreatorTest {
 
         // Run the test
         AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, 
mappingConfiguration);
-        String url = builder.create("OOZIE", null).get(0);
+        List<String> urls = builder.create(serviceName, null);
+        assertNotNull(urls);
+        assertFalse(urls.isEmpty());
+        String url = urls.get(0);
         assertEquals(URL, url);
     }
 
@@ -341,8 +348,14 @@ public class AmbariDynamicServiceURLCreatorTest {
 
     private void testWebHBaseURL(Object mappingConfiguration) throws Exception 
{
         final String[] HOSTNAMES = {"host2", "host4"};
+        final String HBASE_MASTER_PORT_PROPERTY = "hbase.master.info.port";
 
         AmbariComponent hbaseMaster = 
EasyMock.createNiceMock(AmbariComponent.class);
+        Map<String, String> hbaseMasterConfig = new HashMap<>();
+        hbaseMasterConfig.put(HBASE_MASTER_PORT_PROPERTY, "60080");
+        
EasyMock.expect(hbaseMaster.getConfigProperties()).andReturn(hbaseMasterConfig).anyTimes();
+        
EasyMock.expect(hbaseMaster.getConfigProperty(HBASE_MASTER_PORT_PROPERTY))
+                
.andReturn(hbaseMasterConfig.get(HBASE_MASTER_PORT_PROPERTY)).anyTimes();
         List<String> hbaseMasterHosts = Arrays.asList(HOSTNAMES);
         
EasyMock.expect(hbaseMaster.getHostNames()).andReturn(hbaseMasterHosts).anyTimes();
         EasyMock.replay(hbaseMaster);
@@ -354,20 +367,44 @@ public class AmbariDynamicServiceURLCreatorTest {
         // Run the test
         AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, 
mappingConfiguration);
         List<String> urls = builder.create("WEBHBASE", null);
-        validateServiceURLs(urls, HOSTNAMES, "http", "60080", null);
+        validateServiceURLs(urls, HOSTNAMES, "http", 
hbaseMasterConfig.get(HBASE_MASTER_PORT_PROPERTY), null);
     }
 
+
     @Test
-    public void testWebHdfsURL() throws Exception {
+    public void testWebHdfsURLHttp() throws Exception {
         final String ADDRESS = "host3:1357";
-        assertEquals("http://"; + ADDRESS + "/webhdfs", 
getTestWebHdfsURL(ADDRESS));
+        assertEquals(("http://"; + ADDRESS + "/webhdfs"), 
getTestHdfsURL("WEBHDFS", ADDRESS, false));
     }
 
 
-    private String getTestWebHdfsURL(String address) throws Exception {
+    @Test
+    public void testWebHdfsURLHttps() throws Exception {
+        final String ADDRESS = "host3:1357";
+        assertEquals(("https://"; + ADDRESS + "/webhdfs"), 
getTestHdfsURL("WEBHDFS", ADDRESS, true));
+    }
+
+
+    @Test
+    public void testHdfsUIURLHttp() throws Exception {
+        final String ADDRESS = "host3:1357";
+        assertEquals(("http://"; + ADDRESS), getTestHdfsURL("HDFSUI", ADDRESS, 
false));
+    }
+
+
+    @Test
+    public void testHdfsUIURLHttps() throws Exception {
+        final String ADDRESS = "host3:1357";
+        assertEquals(("https://"; + ADDRESS), getTestHdfsURL("HDFSUI", ADDRESS, 
true));
+    }
+
+
+    private String getTestHdfsURL(String serviceName, String address, boolean 
isHttps) throws Exception {
         AmbariCluster.ServiceConfiguration hdfsSC = 
EasyMock.createNiceMock(AmbariCluster.ServiceConfiguration.class);
         Map<String, String> hdfsProps = new HashMap<>();
         hdfsProps.put("dfs.namenode.http-address", address);
+        hdfsProps.put("dfs.namenode.https-address", address);
+        hdfsProps.put("dfs.http.policy", (isHttps) ? "HTTPS_ONLY" : 
"HTTP_ONLY");
         
EasyMock.expect(hdfsSC.getProperties()).andReturn(hdfsProps).anyTimes();
         EasyMock.replay(hdfsSC);
 
@@ -376,12 +413,13 @@ public class AmbariDynamicServiceURLCreatorTest {
         EasyMock.replay(cluster);
 
         // Create the URL
-        List<String> urls = 
ServiceURLFactory.newInstance(cluster).create("WEBHDFS", null);
+        List<String> urls = 
ServiceURLFactory.newInstance(cluster).create(serviceName, null);
         assertNotNull(urls);
         assertFalse(urls.isEmpty());
         return urls.get(0);
     }
 
+
     @Test
     public void testWebHdfsURLHASingleNameService() throws Exception {
         final String NAMESERVICES   = "myNameServicesCluster";
@@ -415,6 +453,39 @@ public class AmbariDynamicServiceURLCreatorTest {
     }
 
 
+    @Test
+    public void testHdfsUIURLHASingleNameService() throws Exception {
+        final String NAMESERVICES   = "myNameServicesCluster";
+        final String HTTP_ADDRESS_1 = "host1:50070";
+        final String HTTP_ADDRESS_2 = "host2:50077";
+
+        final String EXPECTED_ADDR_1 = "http://"; + HTTP_ADDRESS_1;
+        final String EXPECTED_ADDR_2 = "http://"; + HTTP_ADDRESS_2;
+
+        AmbariComponent namenode = 
EasyMock.createNiceMock(AmbariComponent.class);
+        
EasyMock.expect(namenode.getConfigProperty("dfs.nameservices")).andReturn(NAMESERVICES).anyTimes();
+        EasyMock.replay(namenode);
+
+        AmbariCluster.ServiceConfiguration hdfsSC = 
EasyMock.createNiceMock(AmbariCluster.ServiceConfiguration.class);
+        Map<String, String> hdfsProps = new HashMap<>();
+        hdfsProps.put("dfs.namenode.http-address." + NAMESERVICES + ".nn1", 
HTTP_ADDRESS_1);
+        hdfsProps.put("dfs.namenode.http-address." + NAMESERVICES + ".nn2", 
HTTP_ADDRESS_2);
+        
EasyMock.expect(hdfsSC.getProperties()).andReturn(hdfsProps).anyTimes();
+        EasyMock.replay(hdfsSC);
+
+        AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+        
EasyMock.expect(cluster.getComponent("NAMENODE")).andReturn(namenode).anyTimes();
+        EasyMock.expect(cluster.getServiceConfiguration("HDFS", 
"hdfs-site")).andReturn(hdfsSC).anyTimes();
+        EasyMock.replay(cluster);
+
+        // Create the URL
+        List<String> webhdfsURLs = 
ServiceURLFactory.newInstance(cluster).create("HDFSUI", null);
+        assertEquals(2, webhdfsURLs.size());
+        assertTrue(webhdfsURLs.contains(EXPECTED_ADDR_1));
+        assertTrue(webhdfsURLs.contains(EXPECTED_ADDR_2));
+    }
+
+
     /**
      * Test federated NameNode scenario, which chooses the "first" nameservice 
because there is no information from
      * which one can be selected from among the set.
@@ -459,6 +530,49 @@ public class AmbariDynamicServiceURLCreatorTest {
 
 
     /**
+     * Test federated NameNode scenario, which chooses the "first" nameservice 
because there is no information from
+     * which one can be selected from among the set.
+     */
+    @Test
+    public void testHdfsUIURLHAMultipleNameServicesNoDefaultFS() throws 
Exception {
+        final String NS1 = "myns1";
+        final String NS2 = "myns2";
+        final String NAMESERVICES   = NS1 + "," + NS2;
+        final String HTTP_ADDRESS_11 = "host11:50070";
+        final String HTTP_ADDRESS_12 = "host12:50077";
+        final String HTTP_ADDRESS_21 = "host21:50070";
+        final String HTTP_ADDRESS_22 = "host22:50077";
+
+        final String EXPECTED_ADDR_1 = "http://"; + HTTP_ADDRESS_11;
+        final String EXPECTED_ADDR_2 = "http://"; + HTTP_ADDRESS_12;
+
+        AmbariComponent namenode = 
EasyMock.createNiceMock(AmbariComponent.class);
+        
EasyMock.expect(namenode.getConfigProperty("dfs.nameservices")).andReturn(NAMESERVICES).anyTimes();
+        EasyMock.replay(namenode);
+
+        AmbariCluster.ServiceConfiguration hdfsSC = 
EasyMock.createNiceMock(AmbariCluster.ServiceConfiguration.class);
+        Map<String, String> hdfsProps = new HashMap<>();
+        hdfsProps.put("dfs.namenode.http-address." + NS1 + ".nn1", 
HTTP_ADDRESS_11);
+        hdfsProps.put("dfs.namenode.http-address." + NS1 + ".nn2", 
HTTP_ADDRESS_12);
+        hdfsProps.put("dfs.namenode.http-address." + NS2 + ".nn1", 
HTTP_ADDRESS_21);
+        hdfsProps.put("dfs.namenode.http-address." + NS2 + ".nn2", 
HTTP_ADDRESS_22);
+        
EasyMock.expect(hdfsSC.getProperties()).andReturn(hdfsProps).anyTimes();
+        EasyMock.replay(hdfsSC);
+
+        AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+        
EasyMock.expect(cluster.getComponent("NAMENODE")).andReturn(namenode).anyTimes();
+        EasyMock.expect(cluster.getServiceConfiguration("HDFS", 
"hdfs-site")).andReturn(hdfsSC).anyTimes();
+        EasyMock.replay(cluster);
+
+        // Create the URL
+        List<String> webhdfsURLs = 
ServiceURLFactory.newInstance(cluster).create("HDFSUI", null);
+        assertEquals(2, webhdfsURLs.size());
+        assertTrue(webhdfsURLs.contains(EXPECTED_ADDR_1));
+        assertTrue(webhdfsURLs.contains(EXPECTED_ADDR_2));
+    }
+
+
+    /**
      * Test federated NameNode scenario, relying on the core-site property for 
identifying the default nameservice.
      */
     @Test
@@ -508,6 +622,55 @@ public class AmbariDynamicServiceURLCreatorTest {
 
 
     /**
+     * Test federated NameNode scenario, relying on the core-site property for 
identifying the default nameservice.
+     */
+    @Test
+    public void testHdfsUIURLFederatedNNWithDefaultFS() throws Exception {
+        final String NS1 = "myns1";
+        final String NS2 = "myns2";
+        final String NAMESERVICES   = NS1 + "," + NS2;
+        final String HTTP_ADDRESS_11 = "host11:50070";
+        final String HTTP_ADDRESS_12 = "host12:50077";
+        final String HTTP_ADDRESS_21 = "host21:50070";
+        final String HTTP_ADDRESS_22 = "host22:50077";
+
+        final String EXPECTED_ADDR_1 = "http://"; + HTTP_ADDRESS_21;
+        final String EXPECTED_ADDR_2 = "http://"; + HTTP_ADDRESS_22;
+
+        AmbariComponent namenode = 
EasyMock.createNiceMock(AmbariComponent.class);
+        
EasyMock.expect(namenode.getConfigProperty("dfs.nameservices")).andReturn(NAMESERVICES).anyTimes();
+        EasyMock.replay(namenode);
+
+        AmbariCluster.ServiceConfiguration hdfsSC = 
EasyMock.createNiceMock(AmbariCluster.ServiceConfiguration.class);
+        Map<String, String> hdfsProps = new HashMap<>();
+        hdfsProps.put("dfs.namenode.http-address." + NS1 + ".nn1", 
HTTP_ADDRESS_11);
+        hdfsProps.put("dfs.namenode.http-address." + NS1 + ".nn2", 
HTTP_ADDRESS_12);
+        hdfsProps.put("dfs.namenode.http-address." + NS2 + ".nn1", 
HTTP_ADDRESS_21);
+        hdfsProps.put("dfs.namenode.http-address." + NS2 + ".nn2", 
HTTP_ADDRESS_22);
+        
EasyMock.expect(hdfsSC.getProperties()).andReturn(hdfsProps).anyTimes();
+        EasyMock.replay(hdfsSC);
+
+        AmbariCluster.ServiceConfiguration coreSC = 
EasyMock.createNiceMock(AmbariCluster.ServiceConfiguration.class);
+        Map<String, String> coreProps = new HashMap<>();
+        coreProps.put("fs.defaultFS", NS2);
+        
EasyMock.expect(coreSC.getProperties()).andReturn(coreProps).anyTimes();
+        EasyMock.replay(coreSC);
+
+        AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+        
EasyMock.expect(cluster.getComponent("NAMENODE")).andReturn(namenode).anyTimes();
+        EasyMock.expect(cluster.getServiceConfiguration("HDFS", 
"hdfs-site")).andReturn(hdfsSC).anyTimes();
+        EasyMock.expect(cluster.getServiceConfiguration("HDFS", 
"core-site")).andReturn(coreSC).anyTimes();
+        EasyMock.replay(cluster);
+
+        // Create the URL
+        List<String> webhdfsURLs = 
ServiceURLFactory.newInstance(cluster).create("HDFSUI", null);
+        assertEquals(2, webhdfsURLs.size());
+        assertTrue(webhdfsURLs.contains(EXPECTED_ADDR_1));
+        assertTrue(webhdfsURLs.contains(EXPECTED_ADDR_2));
+    }
+
+
+    /**
      * Recent version of HDFS config include properties for mapping NN nodes 
to nameservices (e.g., dfs.ha.namenode.ns1).
      * This test verifies that discovery works correctly in those cases, when 
no nameservice is explicitly declared in
      * a descriptor.
@@ -562,6 +725,59 @@ public class AmbariDynamicServiceURLCreatorTest {
 
     /**
      * Recent version of HDFS config include properties for mapping NN nodes 
to nameservices (e.g., dfs.ha.namenode.ns1).
+     * This test verifies that discovery works correctly in those cases, when 
no nameservice is explicitly declared in
+     * a descriptor.
+     */
+    @Test
+    public void testHdfsUIURLFederatedNNWithDefaultFSAndHaNodes() throws 
Exception {
+        final String NS1 = "myns1";
+        final String NS2 = "myns2";
+        final String NAMESERVICES   = NS1 + "," + NS2;
+        final String HTTP_ADDRESS_11 = "host11:50070";
+        final String HTTP_ADDRESS_12 = "host12:50077";
+        final String HTTP_ADDRESS_21 = "host21:50070";
+        final String HTTP_ADDRESS_22 = "host22:50077";
+
+        final String EXPECTED_ADDR_1 = "http://"; + HTTP_ADDRESS_21;
+        final String EXPECTED_ADDR_2 = "http://"; + HTTP_ADDRESS_22;
+
+        AmbariComponent namenode = 
EasyMock.createNiceMock(AmbariComponent.class);
+        
EasyMock.expect(namenode.getConfigProperty("dfs.nameservices")).andReturn(NAMESERVICES).anyTimes();
+        EasyMock.replay(namenode);
+
+        AmbariCluster.ServiceConfiguration hdfsSC = 
EasyMock.createNiceMock(AmbariCluster.ServiceConfiguration.class);
+        Map<String, String> hdfsProps = new HashMap<>();
+        hdfsProps.put("dfs.namenode.http-address." + NS1 + ".nn11", 
HTTP_ADDRESS_11);
+        hdfsProps.put("dfs.namenode.http-address." + NS1 + ".nn12", 
HTTP_ADDRESS_12);
+        hdfsProps.put("dfs.namenode.http-address." + NS2 + ".nn21", 
HTTP_ADDRESS_21);
+        hdfsProps.put("dfs.namenode.http-address." + NS2 + ".nn22", 
HTTP_ADDRESS_22);
+        hdfsProps.put("dfs.ha.namenodes." + NS1, "nn11,nn12");
+        hdfsProps.put("dfs.ha.namenodes." + NS2, "nn21,nn22");
+        
EasyMock.expect(hdfsSC.getProperties()).andReturn(hdfsProps).anyTimes();
+        EasyMock.replay(hdfsSC);
+
+        AmbariCluster.ServiceConfiguration coreSC = 
EasyMock.createNiceMock(AmbariCluster.ServiceConfiguration.class);
+        Map<String, String> coreProps = new HashMap<>();
+        coreProps.put("fs.defaultFS", NS2);
+        
EasyMock.expect(coreSC.getProperties()).andReturn(coreProps).anyTimes();
+        EasyMock.replay(coreSC);
+
+        AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+        
EasyMock.expect(cluster.getComponent("NAMENODE")).andReturn(namenode).anyTimes();
+        EasyMock.expect(cluster.getServiceConfiguration("HDFS", 
"hdfs-site")).andReturn(hdfsSC).anyTimes();
+        EasyMock.expect(cluster.getServiceConfiguration("HDFS", 
"core-site")).andReturn(coreSC).anyTimes();
+        EasyMock.replay(cluster);
+
+        // Create the URL
+        List<String> webhdfsURLs = 
ServiceURLFactory.newInstance(cluster).create("HDFSUI", null);
+        assertEquals(2, webhdfsURLs.size());
+        assertTrue(webhdfsURLs.contains(EXPECTED_ADDR_1));
+        assertTrue(webhdfsURLs.contains(EXPECTED_ADDR_2));
+    }
+
+
+    /**
+     * Recent version of HDFS config include properties for mapping NN nodes 
to nameservices (e.g., dfs.ha.namenode.ns1).
      * This test verifies that discovery works correctly in those cases, when 
a nameservice is declared in descriptor.
      */
     @Test
@@ -951,6 +1167,29 @@ public class AmbariDynamicServiceURLCreatorTest {
 
 
     @Test
+    public void testFalconURL() throws Exception {
+        final String PORT = "8998";
+
+        final String[] HOSTNAMES = {"host2"};
+        final List<String> druidHosts = Arrays.asList(HOSTNAMES);
+
+        AmbariComponent falconServer = 
EasyMock.createNiceMock(AmbariComponent.class);
+        
EasyMock.expect(falconServer.getHostNames()).andReturn(druidHosts).anyTimes();
+        
EasyMock.expect(falconServer.getConfigProperty("falcon_port")).andReturn(PORT).anyTimes();
+        EasyMock.replay(falconServer);
+
+        AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+        
EasyMock.expect(cluster.getComponent("FALCON_SERVER")).andReturn(falconServer).anyTimes();
+        EasyMock.replay(cluster);
+
+        // Run the test
+        AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, null);
+        List<String> urls = builder.create("FALCON", null);
+        validateServiceURLs(urls, HOSTNAMES, "http", PORT, null);
+    }
+
+
+    @Test
     public void testMissingServiceComponentURL() throws Exception {
         AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
         
EasyMock.expect(cluster.getComponent("DRUID_BROKER")).andReturn(null).anyTimes();

Reply via email to