http://git-wip-us.apache.org/repos/asf/knox/blob/7b401def/gateway-discovery-ambari/src/test/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariDynamicServiceURLCreatorTest.java
----------------------------------------------------------------------
diff --git 
a/gateway-discovery-ambari/src/test/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariDynamicServiceURLCreatorTest.java
 
b/gateway-discovery-ambari/src/test/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariDynamicServiceURLCreatorTest.java
new file mode 100644
index 0000000..dd35dbb
--- /dev/null
+++ 
b/gateway-discovery-ambari/src/test/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariDynamicServiceURLCreatorTest.java
@@ -0,0 +1,876 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations 
under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery.ambari;
+
+import org.apache.commons.io.FileUtils;
+import org.easymock.EasyMock;
+import org.junit.Test;
+
+import java.io.File;
+import java.net.MalformedURLException;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+
+import static junit.framework.TestCase.assertTrue;
+import static junit.framework.TestCase.fail;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+
+
+public class AmbariDynamicServiceURLCreatorTest {
+
+    @Test
+    public void testHiveURLFromInternalMapping() throws Exception {
+        testHiveURL(null);
+    }
+
+    @Test
+    public void testHiveURLFromExternalMapping() throws Exception {
+        testHiveURL(TEST_MAPPING_CONFIG);
+    }
+
+    private void testHiveURL(Object mappingConfiguration) throws Exception {
+
+        final String   SERVICE_NAME = "HIVE";
+        final String[] HOSTNAMES    = {"host3", "host2", "host4"};
+        final String   HTTP_PATH    = "cliservice";
+        final String   HTTP_PORT    = "10001";
+        final String   BINARY_PORT  = "10000";
+
+        String expectedScheme = "http";
+
+        final List<String> hiveServerHosts = Arrays.asList(HOSTNAMES);
+
+        AmbariComponent hiveServer = 
EasyMock.createNiceMock(AmbariComponent.class);
+
+        AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+        
EasyMock.expect(cluster.getComponent("HIVE_SERVER")).andReturn(hiveServer).anyTimes();
+        EasyMock.replay(cluster);
+
+        // Configure HTTP Transport
+        
EasyMock.expect(hiveServer.getHostNames()).andReturn(hiveServerHosts).anyTimes();
+        
EasyMock.expect(hiveServer.getConfigProperty("hive.server2.use.SSL")).andReturn("false").anyTimes();
+        
EasyMock.expect(hiveServer.getConfigProperty("hive.server2.thrift.http.path")).andReturn(HTTP_PATH).anyTimes();
+        
EasyMock.expect(hiveServer.getConfigProperty("hive.server2.thrift.http.port")).andReturn(HTTP_PORT).anyTimes();
+        
EasyMock.expect(hiveServer.getConfigProperty("hive.server2.transport.mode")).andReturn("http").anyTimes();
+        EasyMock.replay(hiveServer);
+
+        // Run the test
+        AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, 
mappingConfiguration);
+        List<String> urls = builder.create(SERVICE_NAME);
+        assertEquals(HOSTNAMES.length, urls.size());
+        validateServiceURLs(urls, HOSTNAMES, expectedScheme, HTTP_PORT, 
HTTP_PATH);
+
+        // Configure BINARY Transport
+        EasyMock.reset(hiveServer);
+        
EasyMock.expect(hiveServer.getHostNames()).andReturn(hiveServerHosts).anyTimes();
+        
EasyMock.expect(hiveServer.getConfigProperty("hive.server2.use.SSL")).andReturn("false").anyTimes();
+        
EasyMock.expect(hiveServer.getConfigProperty("hive.server2.thrift.http.path")).andReturn("").anyTimes();
+        
EasyMock.expect(hiveServer.getConfigProperty("hive.server2.thrift.http.port")).andReturn(HTTP_PORT).anyTimes();
+        
EasyMock.expect(hiveServer.getConfigProperty("hive.server2.thrift.port")).andReturn(BINARY_PORT).anyTimes();
+        
EasyMock.expect(hiveServer.getConfigProperty("hive.server2.transport.mode")).andReturn("binary").anyTimes();
+        EasyMock.replay(hiveServer);
+
+        // Run the test
+        urls = builder.create(SERVICE_NAME);
+        assertEquals(HOSTNAMES.length, urls.size());
+        validateServiceURLs(urls, HOSTNAMES, expectedScheme, HTTP_PORT, "");
+
+        // Configure HTTPS Transport
+        EasyMock.reset(hiveServer);
+        
EasyMock.expect(hiveServer.getHostNames()).andReturn(hiveServerHosts).anyTimes();
+        
EasyMock.expect(hiveServer.getConfigProperty("hive.server2.use.SSL")).andReturn("true").anyTimes();
+        
EasyMock.expect(hiveServer.getConfigProperty("hive.server2.thrift.http.path")).andReturn(HTTP_PATH).anyTimes();
+        
EasyMock.expect(hiveServer.getConfigProperty("hive.server2.thrift.http.port")).andReturn(HTTP_PORT).anyTimes();
+        
EasyMock.expect(hiveServer.getConfigProperty("hive.server2.transport.mode")).andReturn("http").anyTimes();
+        EasyMock.replay(hiveServer);
+
+        // Run the test
+        expectedScheme = "https";
+        urls = builder.create(SERVICE_NAME);
+        assertEquals(HOSTNAMES.length, urls.size());
+        validateServiceURLs(urls, HOSTNAMES, expectedScheme, HTTP_PORT, 
HTTP_PATH);
+    }
+
+    @Test
+    public void testResourceManagerURLFromInternalMapping() throws Exception {
+        testResourceManagerURL(null);
+    }
+
+    @Test
+    public void testResourceManagerURLFromExternalMapping() throws Exception {
+        testResourceManagerURL(TEST_MAPPING_CONFIG);
+    }
+
+    private void testResourceManagerURL(Object mappingConfiguration) throws 
Exception {
+
+        final String HTTP_ADDRESS  = "host2:1111";
+        final String HTTPS_ADDRESS = "host2:22222";
+
+        // HTTP
+        AmbariComponent resman = 
EasyMock.createNiceMock(AmbariComponent.class);
+        setResourceManagerComponentExpectations(resman, HTTP_ADDRESS, 
HTTPS_ADDRESS, "HTTP");
+
+        AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+        
EasyMock.expect(cluster.getComponent("RESOURCEMANAGER")).andReturn(resman).anyTimes();
+        EasyMock.replay(cluster);
+
+        // Run the test
+        AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, 
mappingConfiguration);
+        String url = builder.create("RESOURCEMANAGER").get(0);
+        assertEquals("http://"; + HTTP_ADDRESS + "/ws", url);
+
+        // HTTPS
+        EasyMock.reset(resman);
+        setResourceManagerComponentExpectations(resman, HTTP_ADDRESS, 
HTTPS_ADDRESS, "HTTPS_ONLY");
+
+        // Run the test
+        url = builder.create("RESOURCEMANAGER").get(0);
+        assertEquals("https://"; + HTTPS_ADDRESS + "/ws", url);
+    }
+
+    private void setResourceManagerComponentExpectations(final AmbariComponent 
resmanMock,
+                                                         final String          
httpAddress,
+                                                         final String          
httpsAddress,
+                                                         final String          
httpPolicy) {
+        
EasyMock.expect(resmanMock.getConfigProperty("yarn.resourcemanager.webapp.address")).andReturn(httpAddress).anyTimes();
+        
EasyMock.expect(resmanMock.getConfigProperty("yarn.resourcemanager.webapp.https.address")).andReturn(httpsAddress).anyTimes();
+        
EasyMock.expect(resmanMock.getConfigProperty("yarn.http.policy")).andReturn(httpPolicy).anyTimes();
+        EasyMock.replay(resmanMock);
+    }
+
+    @Test
+    public void testJobTrackerURLFromInternalMapping() throws Exception {
+        testJobTrackerURL(null);
+    }
+
+    @Test
+    public void testJobTrackerURLFromExternalMapping() throws Exception {
+        testJobTrackerURL(TEST_MAPPING_CONFIG);
+    }
+
+    private void testJobTrackerURL(Object mappingConfiguration) throws 
Exception {
+        final String ADDRESS = "host2:5678";
+
+        AmbariComponent resman = 
EasyMock.createNiceMock(AmbariComponent.class);
+        
EasyMock.expect(resman.getConfigProperty("yarn.resourcemanager.address")).andReturn(ADDRESS).anyTimes();
+        EasyMock.replay(resman);
+
+        AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+        
EasyMock.expect(cluster.getComponent("RESOURCEMANAGER")).andReturn(resman).anyTimes();
+        EasyMock.replay(cluster);
+
+        // Run the test
+        AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, 
mappingConfiguration);
+        String url = builder.create("JOBTRACKER").get(0);
+        assertEquals("rpc://" + ADDRESS, url);
+    }
+
+    @Test
+    public void testNameNodeURLFromInternalMapping() throws Exception {
+        testNameNodeURL(null);
+    }
+
+    @Test
+    public void testNameNodeURLFromExternalMapping() throws Exception {
+        testNameNodeURL(TEST_MAPPING_CONFIG);
+    }
+
+    private void testNameNodeURL(Object mappingConfiguration) throws Exception 
{
+        final String ADDRESS = "host1:1234";
+
+        AmbariComponent namenode = 
EasyMock.createNiceMock(AmbariComponent.class);
+        
EasyMock.expect(namenode.getConfigProperty("dfs.namenode.rpc-address")).andReturn(ADDRESS).anyTimes();
+        EasyMock.replay(namenode);
+
+        AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+        
EasyMock.expect(cluster.getComponent("NAMENODE")).andReturn(namenode).anyTimes();
+        EasyMock.replay(cluster);
+
+        // Run the test
+        AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, 
mappingConfiguration);
+        String url = builder.create("NAMENODE").get(0);
+        assertEquals("hdfs://" + ADDRESS, url);
+    }
+
+    @Test
+    public void testWebHCatURLFromInternalMapping() throws Exception {
+        testWebHCatURL(null);
+    }
+
+    @Test
+    public void testWebHCatURLFromExternalMapping() throws Exception {
+        testWebHCatURL(TEST_MAPPING_CONFIG);
+    }
+
+    private void testWebHCatURL(Object mappingConfiguration) throws Exception {
+
+        final String HOSTNAME = "host3";
+        final String PORT     = "1919";
+
+        AmbariComponent webhcatServer = 
EasyMock.createNiceMock(AmbariComponent.class);
+        
EasyMock.expect(webhcatServer.getConfigProperty("templeton.port")).andReturn(PORT).anyTimes();
+        List<String> webHcatServerHosts = Collections.singletonList(HOSTNAME);
+        
EasyMock.expect(webhcatServer.getHostNames()).andReturn(webHcatServerHosts).anyTimes();
+        EasyMock.replay(webhcatServer);
+
+        AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+        
EasyMock.expect(cluster.getComponent("WEBHCAT_SERVER")).andReturn(webhcatServer).anyTimes();
+        EasyMock.replay(cluster);
+
+        // Run the test
+        AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, 
mappingConfiguration);
+        String url = builder.create("WEBHCAT").get(0);
+        assertEquals("http://"; + HOSTNAME + ":" + PORT + "/templeton", url);
+    }
+
+    @Test
+    public void testOozieURLFromInternalMapping() throws Exception {
+        testOozieURL(null);
+    }
+
+    @Test
+    public void testOozieURLFromExternalMapping() throws Exception {
+        testOozieURL(TEST_MAPPING_CONFIG);
+    }
+
+    private void testOozieURL(Object mappingConfiguration) throws Exception {
+        final String URL = "http://host3:2222";;
+
+        AmbariComponent oozieServer = 
EasyMock.createNiceMock(AmbariComponent.class);
+        
EasyMock.expect(oozieServer.getConfigProperty("oozie.base.url")).andReturn(URL).anyTimes();
+        EasyMock.replay(oozieServer);
+
+        AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+        
EasyMock.expect(cluster.getComponent("OOZIE_SERVER")).andReturn(oozieServer).anyTimes();
+        EasyMock.replay(cluster);
+
+        // Run the test
+        AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, 
mappingConfiguration);
+        String url = builder.create("OOZIE").get(0);
+        assertEquals(URL, url);
+    }
+
+    @Test
+    public void testWebHBaseURLFromInternalMapping() throws Exception {
+        testWebHBaseURL(null);
+    }
+
+    @Test
+    public void testWebHBaseURLFromExternalMapping() throws Exception {
+        testWebHBaseURL(TEST_MAPPING_CONFIG);
+    }
+
+    private void testWebHBaseURL(Object mappingConfiguration) throws Exception 
{
+        final String[] HOSTNAMES = {"host2", "host4"};
+
+        AmbariComponent hbaseMaster = 
EasyMock.createNiceMock(AmbariComponent.class);
+        List<String> hbaseMasterHosts = Arrays.asList(HOSTNAMES);
+        
EasyMock.expect(hbaseMaster.getHostNames()).andReturn(hbaseMasterHosts).anyTimes();
+        EasyMock.replay(hbaseMaster);
+
+        AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+        
EasyMock.expect(cluster.getComponent("HBASE_MASTER")).andReturn(hbaseMaster).anyTimes();
+        EasyMock.replay(cluster);
+
+        // Run the test
+        AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, 
mappingConfiguration);
+        List<String> urls = builder.create("WEBHBASE");
+        validateServiceURLs(urls, HOSTNAMES, "http", "60080", null);
+    }
+
+    @Test
+    public void testWebHdfsURLFromInternalMapping() throws Exception {
+        testWebHdfsURL(null);
+    }
+
+    @Test
+    public void testWebHdfsURLFromExternalMapping() throws Exception {
+        testWebHdfsURL(TEST_MAPPING_CONFIG);
+    }
+
+    @Test
+    public void testWebHdfsURLFromSystemPropertyOverride() throws Exception {
+        // Write the test mapping configuration to a temp file
+        File mappingFile = File.createTempFile("mapping-config", "xml");
+        FileUtils.write(mappingFile, OVERRIDE_MAPPING_FILE_CONTENTS, "utf-8");
+
+        // Set the system property to point to the temp file
+        
System.setProperty(AmbariDynamicServiceURLCreator.MAPPING_CONFIG_OVERRIDE_PROPERTY,
+                           mappingFile.getAbsolutePath());
+        try {
+            final String ADDRESS = "host3:1357";
+            // The URL creator should apply the file contents, and create the 
URL accordingly
+            String url = getTestWebHdfsURL(ADDRESS, null);
+
+            // Verify the URL matches the pattern from the file
+            assertEquals("http://"; + ADDRESS + "/webhdfs/OVERRIDE", url);
+        } finally {
+            // Reset the system property, and delete the temp file
+            
System.clearProperty(AmbariDynamicServiceURLCreator.MAPPING_CONFIG_OVERRIDE_PROPERTY);
+            mappingFile.delete();
+        }
+    }
+
+    private void testWebHdfsURL(Object mappingConfiguration) throws Exception {
+        final String ADDRESS = "host3:1357";
+        assertEquals("http://"; + ADDRESS + "/webhdfs", 
getTestWebHdfsURL(ADDRESS, mappingConfiguration));
+    }
+
+
+    private String getTestWebHdfsURL(String address, Object 
mappingConfiguration) throws Exception {
+        AmbariCluster.ServiceConfiguration hdfsSC = 
EasyMock.createNiceMock(AmbariCluster.ServiceConfiguration.class);
+        Map<String, String> hdfsProps = new HashMap<>();
+        hdfsProps.put("dfs.namenode.http-address", address);
+        
EasyMock.expect(hdfsSC.getProperties()).andReturn(hdfsProps).anyTimes();
+        EasyMock.replay(hdfsSC);
+
+        AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+        EasyMock.expect(cluster.getServiceConfiguration("HDFS", 
"hdfs-site")).andReturn(hdfsSC).anyTimes();
+        EasyMock.replay(cluster);
+
+        // Create the URL
+        AmbariDynamicServiceURLCreator creator = newURLCreator(cluster, 
mappingConfiguration);
+        return creator.create("WEBHDFS").get(0);
+    }
+
+
+    @Test
+    public void testAtlasApiURL() throws Exception {
+        final String ATLAS_REST_ADDRESS = "http://host2:21000";;
+
+        AmbariComponent atlasServer = 
EasyMock.createNiceMock(AmbariComponent.class);
+        
EasyMock.expect(atlasServer.getConfigProperty("atlas.rest.address")).andReturn(ATLAS_REST_ADDRESS).anyTimes();
+        EasyMock.replay(atlasServer);
+
+        AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+        
EasyMock.expect(cluster.getComponent("ATLAS_SERVER")).andReturn(atlasServer).anyTimes();
+        EasyMock.replay(cluster);
+
+        // Run the test
+        AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, null);
+        List<String> urls = builder.create("ATLAS-API");
+        assertEquals(1, urls.size());
+        assertEquals(ATLAS_REST_ADDRESS, urls.get(0));
+    }
+
+
+    @Test
+    public void testAtlasURL() throws Exception {
+        final String HTTP_PORT = "8787";
+        final String HTTPS_PORT = "8989";
+
+        final String[] HOSTNAMES = {"host1", "host4"};
+        final List<String> atlastServerHosts = Arrays.asList(HOSTNAMES);
+
+        AmbariComponent atlasServer = 
EasyMock.createNiceMock(AmbariComponent.class);
+        
EasyMock.expect(atlasServer.getHostNames()).andReturn(atlastServerHosts).anyTimes();
+        
EasyMock.expect(atlasServer.getConfigProperty("atlas.enableTLS")).andReturn("false").anyTimes();
+        
EasyMock.expect(atlasServer.getConfigProperty("atlas.server.http.port")).andReturn(HTTP_PORT).anyTimes();
+        
EasyMock.expect(atlasServer.getConfigProperty("atlas.server.https.port")).andReturn(HTTPS_PORT).anyTimes();
+        EasyMock.replay(atlasServer);
+
+        AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+        
EasyMock.expect(cluster.getComponent("ATLAS_SERVER")).andReturn(atlasServer).anyTimes();
+        EasyMock.replay(cluster);
+
+        // Run the test
+        AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, null);
+        List<String> urls = builder.create("ATLAS");
+        validateServiceURLs(urls, HOSTNAMES, "http", HTTP_PORT, null);
+
+        EasyMock.reset(atlasServer);
+        
EasyMock.expect(atlasServer.getHostNames()).andReturn(atlastServerHosts).anyTimes();
+        
EasyMock.expect(atlasServer.getConfigProperty("atlas.enableTLS")).andReturn("true").anyTimes();
+        
EasyMock.expect(atlasServer.getConfigProperty("atlas.server.http.port")).andReturn(HTTP_PORT).anyTimes();
+        
EasyMock.expect(atlasServer.getConfigProperty("atlas.server.https.port")).andReturn(HTTPS_PORT).anyTimes();
+        EasyMock.replay(atlasServer);
+
+        // Run the test
+        urls = builder.create("ATLAS");
+        validateServiceURLs(urls, HOSTNAMES, "https", HTTPS_PORT, null);
+    }
+
+
+    @Test
+    public void testZeppelinURL() throws Exception {
+        final String HTTP_PORT = "8787";
+        final String HTTPS_PORT = "8989";
+
+        final String[] HOSTNAMES = {"host1", "host4"};
+        final List<String> atlastServerHosts = Arrays.asList(HOSTNAMES);
+
+        AmbariComponent zeppelinMaster = 
EasyMock.createNiceMock(AmbariComponent.class);
+        
EasyMock.expect(zeppelinMaster.getHostNames()).andReturn(atlastServerHosts).anyTimes();
+        
EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.ssl")).andReturn("false").anyTimes();
+        
EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.server.port")).andReturn(HTTP_PORT).anyTimes();
+        
EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.server.ssl.port")).andReturn(HTTPS_PORT).anyTimes();
+        EasyMock.replay(zeppelinMaster);
+
+        AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+        
EasyMock.expect(cluster.getComponent("ZEPPELIN_MASTER")).andReturn(zeppelinMaster).anyTimes();
+        EasyMock.replay(cluster);
+
+        AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, null);
+
+        // Run the test
+        validateServiceURLs(builder.create("ZEPPELIN"), HOSTNAMES, "http", 
HTTP_PORT, null);
+
+        EasyMock.reset(zeppelinMaster);
+        
EasyMock.expect(zeppelinMaster.getHostNames()).andReturn(atlastServerHosts).anyTimes();
+        
EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.ssl")).andReturn("true").anyTimes();
+        
EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.server.port")).andReturn(HTTP_PORT).anyTimes();
+        
EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.server.ssl.port")).andReturn(HTTPS_PORT).anyTimes();
+        EasyMock.replay(zeppelinMaster);
+
+        // Run the test
+        validateServiceURLs(builder.create("ZEPPELIN"), HOSTNAMES, "https", 
HTTPS_PORT, null);
+    }
+
+
+    @Test
+    public void testZeppelinUiURL() throws Exception {
+        final String HTTP_PORT = "8787";
+        final String HTTPS_PORT = "8989";
+
+        final String[] HOSTNAMES = {"host1", "host4"};
+        final List<String> atlastServerHosts = Arrays.asList(HOSTNAMES);
+
+        AmbariComponent zeppelinMaster = 
EasyMock.createNiceMock(AmbariComponent.class);
+        
EasyMock.expect(zeppelinMaster.getHostNames()).andReturn(atlastServerHosts).anyTimes();
+        
EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.ssl")).andReturn("false").anyTimes();
+        
EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.server.port")).andReturn(HTTP_PORT).anyTimes();
+        
EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.server.ssl.port")).andReturn(HTTPS_PORT).anyTimes();
+        EasyMock.replay(zeppelinMaster);
+
+        AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+        
EasyMock.expect(cluster.getComponent("ZEPPELIN_MASTER")).andReturn(zeppelinMaster).anyTimes();
+        EasyMock.replay(cluster);
+
+        AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, null);
+
+        // Run the test
+        validateServiceURLs(builder.create("ZEPPELINUI"), HOSTNAMES, "http", 
HTTP_PORT, null);
+
+        EasyMock.reset(zeppelinMaster);
+        
EasyMock.expect(zeppelinMaster.getHostNames()).andReturn(atlastServerHosts).anyTimes();
+        
EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.ssl")).andReturn("true").anyTimes();
+        
EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.server.port")).andReturn(HTTP_PORT).anyTimes();
+        
EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.server.ssl.port")).andReturn(HTTPS_PORT).anyTimes();
+        EasyMock.replay(zeppelinMaster);
+
+        // Run the test
+        validateServiceURLs(builder.create("ZEPPELINUI"), HOSTNAMES, "https", 
HTTPS_PORT, null);
+    }
+
+
+    @Test
+    public void testZeppelinWsURL() throws Exception {
+        final String HTTP_PORT = "8787";
+        final String HTTPS_PORT = "8989";
+
+        final String[] HOSTNAMES = {"host1", "host4"};
+        final List<String> atlastServerHosts = Arrays.asList(HOSTNAMES);
+
+        AmbariComponent zeppelinMaster = 
EasyMock.createNiceMock(AmbariComponent.class);
+        
EasyMock.expect(zeppelinMaster.getHostNames()).andReturn(atlastServerHosts).anyTimes();
+        
EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.ssl")).andReturn("false").anyTimes();
+        
EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.server.port")).andReturn(HTTP_PORT).anyTimes();
+        
EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.server.ssl.port")).andReturn(HTTPS_PORT).anyTimes();
+        EasyMock.replay(zeppelinMaster);
+
+        AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+        
EasyMock.expect(cluster.getComponent("ZEPPELIN_MASTER")).andReturn(zeppelinMaster).anyTimes();
+        EasyMock.replay(cluster);
+
+        AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, null);
+
+        // Run the test
+        validateServiceURLs(builder.create("ZEPPELINWS"), HOSTNAMES, "ws", 
HTTP_PORT, null);
+
+        EasyMock.reset(zeppelinMaster);
+        
EasyMock.expect(zeppelinMaster.getHostNames()).andReturn(atlastServerHosts).anyTimes();
+        
EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.ssl")).andReturn("true").anyTimes();
+        
EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.server.port")).andReturn(HTTP_PORT).anyTimes();
+        
EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.server.ssl.port")).andReturn(HTTPS_PORT).anyTimes();
+        EasyMock.replay(zeppelinMaster);
+
+        // Run the test
+        validateServiceURLs(builder.create("ZEPPELINWS"), HOSTNAMES, "wss", 
HTTPS_PORT, null);
+    }
+
+
+    @Test
+    public void testDruidCoordinatorURL() throws Exception {
+        final String PORT = "8787";
+
+        final String[] HOSTNAMES = {"host3", "host2"};
+        final List<String> druidCoordinatorHosts = Arrays.asList(HOSTNAMES);
+
+        AmbariComponent druidCoordinator = 
EasyMock.createNiceMock(AmbariComponent.class);
+        
EasyMock.expect(druidCoordinator.getHostNames()).andReturn(druidCoordinatorHosts).anyTimes();
+        
EasyMock.expect(druidCoordinator.getConfigProperty("druid.port")).andReturn(PORT).anyTimes();
+        EasyMock.replay(druidCoordinator);
+
+        AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+        
EasyMock.expect(cluster.getComponent("DRUID_COORDINATOR")).andReturn(druidCoordinator).anyTimes();
+        EasyMock.replay(cluster);
+
+        // Run the test
+        AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, null);
+        List<String> urls = builder.create("DRUID-COORDINATOR");
+        validateServiceURLs(urls, HOSTNAMES, "http", PORT, null);
+    }
+
+
+    @Test
+    public void testDruidBrokerURL() throws Exception {
+        final String PORT = "8181";
+
+        final String[] HOSTNAMES = {"host4", "host3"};
+        final List<String> druidHosts = Arrays.asList(HOSTNAMES);
+
+        AmbariComponent druidBroker = 
EasyMock.createNiceMock(AmbariComponent.class);
+        
EasyMock.expect(druidBroker.getHostNames()).andReturn(druidHosts).anyTimes();
+        
EasyMock.expect(druidBroker.getConfigProperty("druid.port")).andReturn(PORT).anyTimes();
+        EasyMock.replay(druidBroker);
+
+        AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+        
EasyMock.expect(cluster.getComponent("DRUID_BROKER")).andReturn(druidBroker).anyTimes();
+        EasyMock.replay(cluster);
+
+        // Run the test
+        AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, null);
+        List<String> urls = builder.create("DRUID-BROKER");
+        validateServiceURLs(urls, HOSTNAMES, "http", PORT, null);
+    }
+
+
+    @Test
+    public void testDruidRouterURL() throws Exception {
+        final String PORT = "8282";
+
+        final String[] HOSTNAMES = {"host5", "host7"};
+        final List<String> druidHosts = Arrays.asList(HOSTNAMES);
+
+        AmbariComponent druidRouter = 
EasyMock.createNiceMock(AmbariComponent.class);
+        
EasyMock.expect(druidRouter.getHostNames()).andReturn(druidHosts).anyTimes();
+        
EasyMock.expect(druidRouter.getConfigProperty("druid.port")).andReturn(PORT).anyTimes();
+        EasyMock.replay(druidRouter);
+
+        AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+        
EasyMock.expect(cluster.getComponent("DRUID_ROUTER")).andReturn(druidRouter).anyTimes();
+        EasyMock.replay(cluster);
+
+        // Run the test
+        AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, null);
+        List<String> urls = builder.create("DRUID-ROUTER");
+        validateServiceURLs(urls, HOSTNAMES, "http", PORT, null);
+    }
+
+
+    @Test
+    public void testDruidOverlordURL() throws Exception {
+        final String PORT = "8383";
+
+        final String[] HOSTNAMES = {"host4", "host1"};
+        final List<String> druidHosts = Arrays.asList(HOSTNAMES);
+
+        AmbariComponent druidOverlord = 
EasyMock.createNiceMock(AmbariComponent.class);
+        
EasyMock.expect(druidOverlord.getHostNames()).andReturn(druidHosts).anyTimes();
+        
EasyMock.expect(druidOverlord.getConfigProperty("druid.port")).andReturn(PORT).anyTimes();
+        EasyMock.replay(druidOverlord);
+
+        AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+        
EasyMock.expect(cluster.getComponent("DRUID_OVERLORD")).andReturn(druidOverlord).anyTimes();
+        EasyMock.replay(cluster);
+
+        // Run the test
+        AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, null);
+        List<String> urls = builder.create("DRUID-OVERLORD");
+        validateServiceURLs(urls, HOSTNAMES, "http", PORT, null);
+    }
+
+
+    @Test
+    public void testDruidSupersetURL() throws Exception {
+        final String PORT = "8484";
+
+        final String[] HOSTNAMES = {"host4", "host1"};
+        final List<String> druidHosts = Arrays.asList(HOSTNAMES);
+
+        AmbariComponent druidSuperset = 
EasyMock.createNiceMock(AmbariComponent.class);
+        
EasyMock.expect(druidSuperset.getHostNames()).andReturn(druidHosts).anyTimes();
+        
EasyMock.expect(druidSuperset.getConfigProperty("SUPERSET_WEBSERVER_PORT")).andReturn(PORT).anyTimes();
+        EasyMock.replay(druidSuperset);
+
+        AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+        
EasyMock.expect(cluster.getComponent("DRUID_SUPERSET")).andReturn(druidSuperset).anyTimes();
+        EasyMock.replay(cluster);
+
+        // Run the test
+        AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, null);
+        List<String> urls = builder.create("SUPERSET");
+        validateServiceURLs(urls, HOSTNAMES, "http", PORT, null);
+    }
+
+
+    @Test
+    public void testMissingServiceComponentURL() throws Exception {
+        AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+        
EasyMock.expect(cluster.getComponent("DRUID_BROKER")).andReturn(null).anyTimes();
+        
EasyMock.expect(cluster.getComponent("HIVE_SERVER")).andReturn(null).anyTimes();
+        EasyMock.replay(cluster);
+
+        // Run the test
+        AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, null);
+        List<String> urls = builder.create("DRUID-BROKER");
+        assertNotNull(urls);
+        assertEquals(1, urls.size());
+        assertEquals("http://{HOST}:{PORT}";, urls.get(0));
+
+        urls = builder.create("HIVE");
+        assertNotNull(urls);
+        assertEquals(1, urls.size());
+        assertEquals("http://{HOST}:{PORT}/{PATH}";, urls.get(0));
+    }
+
+
+    /**
+     * Convenience method for creating AmbariDynamicServiceURLCreator 
instances from different mapping configuration
+     * input sources.
+     *
+     * @param cluster       The Ambari ServiceDiscovery Cluster model
+     * @param mappingConfig The mapping configuration, or null if the internal 
config should be used.
+     *
+     * @return An AmbariDynamicServiceURLCreator instance, capable of creating 
service URLs based on the specified
+     *         cluster's configuration details.
+     */
+    private static AmbariDynamicServiceURLCreator newURLCreator(AmbariCluster 
cluster, Object mappingConfig) throws Exception {
+        AmbariDynamicServiceURLCreator result = null;
+
+        if (mappingConfig == null) {
+            result = new AmbariDynamicServiceURLCreator(cluster);
+        } else {
+            if (mappingConfig instanceof String) {
+                result = new AmbariDynamicServiceURLCreator(cluster, (String) 
mappingConfig);
+            } else if (mappingConfig instanceof File) {
+                result = new AmbariDynamicServiceURLCreator(cluster, (File) 
mappingConfig);
+            }
+        }
+
+        return result;
+    }
+
+
+    /**
+     * Validate the specifed HIVE URLs.
+     *
+     * @param urlsToValidate The URLs to validate
+     * @param hostNames      The host names expected in the test URLs
+     * @param scheme         The expected scheme for the URLs
+     * @param port           The expected port for the URLs
+     * @param path           The expected path for the URLs
+     */
+    private static void validateServiceURLs(List<String> urlsToValidate,
+                                            String[]     hostNames,
+                                            String       scheme,
+                                            String       port,
+                                            String       path) throws 
MalformedURLException {
+
+        List<String> hostNamesToTest = new 
LinkedList<>(Arrays.asList(hostNames));
+        for (String url : urlsToValidate) {
+            URI test = null;
+            try {
+                // Make sure it's a valid URL
+                test = new URI(url);
+            } catch (URISyntaxException e) {
+                fail(e.getMessage());
+            }
+
+            // Validate the scheme
+            assertEquals(scheme, test.getScheme());
+
+            // Validate the port
+            assertEquals(port, String.valueOf(test.getPort()));
+
+            // If the expected path is not specified, don't validate it
+            if (path != null) {
+                assertEquals("/" + path, test.getPath());
+            }
+
+            // Validate the host name
+            assertTrue(hostNamesToTest.contains(test.getHost()));
+            hostNamesToTest.remove(test.getHost());
+        }
+        assertTrue(hostNamesToTest.isEmpty());
+    }
+
+
+    private static final String TEST_MAPPING_CONFIG =
+            "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n" +
+            "<service-discovery-url-mappings>\n" +
+            "  <service name=\"NAMENODE\">\n" +
+            "    
<url-pattern>hdfs://{DFS_NAMENODE_RPC_ADDRESS}</url-pattern>\n" +
+            "    <properties>\n" +
+            "      <property name=\"DFS_NAMENODE_RPC_ADDRESS\">\n" +
+            "        <component>NAMENODE</component>\n" +
+            "        
<config-property>dfs.namenode.rpc-address</config-property>\n" +
+            "      </property>\n" +
+            "    </properties>\n" +
+            "  </service>\n" +
+            "\n" +
+            "  <service name=\"JOBTRACKER\">\n" +
+            "    <url-pattern>rpc://{YARN_RM_ADDRESS}</url-pattern>\n" +
+            "    <properties>\n" +
+            "      <property name=\"YARN_RM_ADDRESS\">\n" +
+            "        <component>RESOURCEMANAGER</component>\n" +
+            "        
<config-property>yarn.resourcemanager.address</config-property>\n" +
+            "      </property>\n" +
+            "    </properties>\n" +
+            "  </service>\n" +
+            "\n" +
+            "  <service name=\"WEBHDFS\">\n" +
+            "    
<url-pattern>http://{WEBHDFS_ADDRESS}/webhdfs</url-pattern>\n" +
+            "    <properties>\n" +
+            "      <property name=\"WEBHDFS_ADDRESS\">\n" +
+            "        <service-config 
name=\"HDFS\">hdfs-site</service-config>\n" +
+            "        
<config-property>dfs.namenode.http-address</config-property>\n" +
+            "      </property>\n" +
+            "    </properties>\n" +
+            "  </service>\n" +
+            "\n" +
+            "  <service name=\"WEBHCAT\">\n" +
+            "    <url-pattern>http://{HOST}:{PORT}/templeton</url-pattern>\n" +
+            "    <properties>\n" +
+            "      <property name=\"HOST\">\n" +
+            "        <component>WEBHCAT_SERVER</component>\n" +
+            "        <hostname/>\n" +
+            "      </property>\n" +
+            "      <property name=\"PORT\">\n" +
+            "        <component>WEBHCAT_SERVER</component>\n" +
+            "        <config-property>templeton.port</config-property>\n" +
+            "      </property>\n" +
+            "    </properties>\n" +
+            "  </service>\n" +
+            "\n" +
+            "  <service name=\"OOZIE\">\n" +
+            "    <url-pattern>{OOZIE_ADDRESS}</url-pattern>\n" +
+            "    <properties>\n" +
+            "      <property name=\"OOZIE_ADDRESS\">\n" +
+            "        <component>OOZIE_SERVER</component>\n" +
+            "        <config-property>oozie.base.url</config-property>\n" +
+            "      </property>\n" +
+            "    </properties>\n" +
+            "  </service>\n" +
+            "\n" +
+            "  <service name=\"WEBHBASE\">\n" +
+            "    <url-pattern>http://{HOST}:60080</url-pattern>\n" +
+            "    <properties>\n" +
+            "      <property name=\"HOST\">\n" +
+            "        <component>HBASE_MASTER</component>\n" +
+            "        <hostname/>\n" +
+            "      </property>\n" +
+            "    </properties>\n" +
+            "  </service>\n" +
+            "  <service name=\"RESOURCEMANAGER\">\n" +
+            "    <url-pattern>{SCHEME}://{WEBAPP_ADDRESS}/ws</url-pattern>\n" +
+            "    <properties>\n" +
+            "      <property name=\"WEBAPP_HTTP_ADDRESS\">\n" +
+            "        <component>RESOURCEMANAGER</component>\n" +
+            "        
<config-property>yarn.resourcemanager.webapp.address</config-property>\n" +
+            "      </property>\n" +
+            "      <property name=\"WEBAPP_HTTPS_ADDRESS\">\n" +
+            "        <component>RESOURCEMANAGER</component>\n" +
+            "        
<config-property>yarn.resourcemanager.webapp.https.address</config-property>\n" 
+
+            "      </property>\n" +
+            "      <property name=\"HTTP_POLICY\">\n" +
+            "        <component>RESOURCEMANAGER</component>\n" +
+            "        <config-property>yarn.http.policy</config-property>\n" +
+            "      </property>\n" +
+            "      <property name=\"SCHEME\">\n" +
+            "        <config-property>\n" +
+            "          <if property=\"HTTP_POLICY\" value=\"HTTPS_ONLY\">\n" +
+            "            <then>https</then>\n" +
+            "            <else>http</else>\n" +
+            "          </if>\n" +
+            "        </config-property>\n" +
+            "      </property>\n" +
+            "      <property name=\"WEBAPP_ADDRESS\">\n" +
+            "        <component>RESOURCEMANAGER</component>\n" +
+            "        <config-property>\n" +
+            "          <if property=\"HTTP_POLICY\" value=\"HTTPS_ONLY\">\n" +
+            "            <then>WEBAPP_HTTPS_ADDRESS</then>\n" +
+            "            <else>WEBAPP_HTTP_ADDRESS</else>\n" +
+            "          </if>\n" +
+            "        </config-property>\n" +
+            "      </property>\n" +
+            "    </properties>\n" +
+            "  </service>\n" +
+            "  <service name=\"HIVE\">\n" +
+            "    <url-pattern>{SCHEME}://{HOST}:{PORT}/{PATH}</url-pattern>\n" 
+
+            "    <properties>\n" +
+            "      <property name=\"HOST\">\n" +
+            "        <component>HIVE_SERVER</component>\n" +
+            "        <hostname/>\n" +
+            "      </property>\n" +
+            "      <property name=\"USE_SSL\">\n" +
+            "        <component>HIVE_SERVER</component>\n" +
+            "        
<config-property>hive.server2.use.SSL</config-property>\n" +
+            "      </property>\n" +
+            "      <property name=\"PATH\">\n" +
+            "        <component>HIVE_SERVER</component>\n" +
+            "        
<config-property>hive.server2.thrift.http.path</config-property>\n" +
+            "      </property>\n" +
+            "      <property name=\"PORT\">\n" +
+            "        <component>HIVE_SERVER</component>\n" +
+            "        
<config-property>hive.server2.thrift.http.port</config-property>\n" +
+            "      </property>\n" +
+            "      <property name=\"SCHEME\">\n" +
+            "        <config-property>\n" +
+            "            <if property=\"USE_SSL\" value=\"true\">\n" +
+            "                <then>https</then>\n" +
+            "                <else>http</else>\n" +
+            "            </if>\n" +
+            "        </config-property>\n" +
+            "      </property>\n" +
+            "    </properties>\n" +
+            "  </service>\n" +
+            "</service-discovery-url-mappings>\n";
+
+
+    private static final String OVERRIDE_MAPPING_FILE_CONTENTS =
+            "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n" +
+            "<service-discovery-url-mappings>\n" +
+            "  <service name=\"WEBHDFS\">\n" +
+            "    
<url-pattern>http://{WEBHDFS_ADDRESS}/webhdfs/OVERRIDE</url-pattern>\n" +
+            "    <properties>\n" +
+            "      <property name=\"WEBHDFS_ADDRESS\">\n" +
+            "        <service-config 
name=\"HDFS\">hdfs-site</service-config>\n" +
+            "        
<config-property>dfs.namenode.http-address</config-property>\n" +
+            "      </property>\n" +
+            "    </properties>\n" +
+            "  </service>\n" +
+            "</service-discovery-url-mappings>\n";
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/7b401def/gateway-discovery-ambari/src/test/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryTest.java
----------------------------------------------------------------------
diff --git 
a/gateway-discovery-ambari/src/test/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryTest.java
 
b/gateway-discovery-ambari/src/test/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryTest.java
index 1e5e7b2..f7f0553 100644
--- 
a/gateway-discovery-ambari/src/test/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryTest.java
+++ 
b/gateway-discovery-ambari/src/test/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryTest.java
@@ -27,7 +27,9 @@ import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
-import static org.junit.Assert.*;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
 
 
 /**

http://git-wip-us.apache.org/repos/asf/knox/blob/7b401def/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandler.java
----------------------------------------------------------------------
diff --git 
a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandler.java
 
b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandler.java
index fb563fa..521b5b4 100644
--- 
a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandler.java
+++ 
b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandler.java
@@ -16,15 +16,28 @@
  */
 package org.apache.hadoop.gateway.topology.simple;
 
+import java.io.BufferedWriter;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileWriter;
+import java.io.InputStreamReader;
+import java.io.IOException;
+
+import java.net.URI;
+import java.net.URISyntaxException;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
 import org.apache.hadoop.gateway.i18n.messages.MessagesFactory;
 import org.apache.hadoop.gateway.services.Service;
 import 
org.apache.hadoop.gateway.topology.discovery.DefaultServiceDiscoveryConfig;
 import org.apache.hadoop.gateway.topology.discovery.ServiceDiscovery;
 import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryFactory;
 
-import java.io.*;
-import java.util.*;
-
 
 /**
  * Processes simple topology descriptors, producing full topology files, which 
can subsequently be deployed to the
@@ -78,19 +91,29 @@ public class SimpleDescriptorHandler {
                     descServiceURLs = cluster.getServiceURLs(serviceName);
                 }
 
-                // If there is at least one URL associated with the service, 
then add it to the map
+                // Validate the discovered service URLs
+                List<String> validURLs = new ArrayList<>();
                 if (descServiceURLs != null && !descServiceURLs.isEmpty()) {
-                    serviceURLs.put(serviceName, descServiceURLs);
+                    // Validate the URL(s)
+                    for (String descServiceURL : descServiceURLs) {
+                        if (validateURL(serviceName, descServiceURL)) {
+                            validURLs.add(descServiceURL);
+                        }
+                    }
+                }
+
+                // If there is at least one valid URL associated with the 
service, then add it to the map
+                if (!validURLs.isEmpty()) {
+                    serviceURLs.put(serviceName, validURLs);
                 } else {
                     log.failedToDiscoverClusterServiceURLs(serviceName, 
cluster.getName());
-                    throw new IllegalStateException("ServiceDiscovery failed 
to resolve any URLs for " + serviceName +
-                                                    ". Topology update 
aborted!");
                 }
             }
         } else {
             log.failedToDiscoverClusterServices(desc.getClusterName());
         }
 
+        BufferedWriter fw = null;
         topologyDescriptor = null;
         File providerConfig = null;
         try {
@@ -110,7 +133,7 @@ public class SimpleDescriptorHandler {
                 topologyFilename = desc.getClusterName();
             }
             topologyDescriptor = new File(destDirectory, topologyFilename + 
".xml");
-            FileWriter fw = new FileWriter(topologyDescriptor);
+            fw = new BufferedWriter(new FileWriter(topologyDescriptor));
 
             fw.write("<topology>\n");
 
@@ -123,8 +146,12 @@ public class SimpleDescriptorHandler {
             }
             policyReader.close();
 
+            // Sort the service names to write the services alphabetically
+            List<String> serviceNames = new ArrayList<>(serviceURLs.keySet());
+            Collections.sort(serviceNames);
+
             // Write the service declarations
-            for (String serviceName : serviceURLs.keySet()) {
+            for (String serviceName : serviceNames) {
                 fw.write("    <service>\n");
                 fw.write("        <role>" + serviceName + "</role>\n");
                 for (String url : serviceURLs.get(serviceName)) {
@@ -136,16 +163,37 @@ public class SimpleDescriptorHandler {
             fw.write("</topology>\n");
 
             fw.flush();
-            fw.close();
         } catch (IOException e) {
             
log.failedToGenerateTopologyFromSimpleDescriptor(topologyDescriptor.getName(), 
e);
             topologyDescriptor.delete();
+        } finally {
+            if (fw != null) {
+                try {
+                    fw.close();
+                } catch (IOException e) {
+                    // ignore
+                }
+            }
         }
 
         result.put("topology", topologyDescriptor);
         return result;
     }
 
+    private static boolean validateURL(String serviceName, String url) {
+        boolean result = false;
+
+        if (url != null && !url.isEmpty()) {
+            try {
+                new URI(url);
+                result = true;
+            } catch (URISyntaxException e) {
+                log.serviceURLValidationFailed(serviceName, url, e);
+            }
+        }
+
+        return result;
+    }
 
     private static File resolveProviderConfigurationReference(String 
reference, File srcDirectory) {
         File providerConfig;

http://git-wip-us.apache.org/repos/asf/knox/blob/7b401def/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorMessages.java
----------------------------------------------------------------------
diff --git 
a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorMessages.java
 
b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorMessages.java
index cf9aa28..2a2c4c1 100644
--- 
a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorMessages.java
+++ 
b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorMessages.java
@@ -29,7 +29,7 @@ public interface SimpleDescriptorMessages {
     void failedToDiscoverClusterServices(final String cluster);
 
     @Message(level = MessageLevel.ERROR,
-            text = "No URLs were discovered for {0} in the {1} cluster.")
+            text = "No valid URLs were discovered for {0} in the {1} cluster.")
     void failedToDiscoverClusterServiceURLs(final String serviceName, final 
String clusterName);
 
     @Message(level = MessageLevel.ERROR,
@@ -37,6 +37,12 @@ public interface SimpleDescriptorMessages {
     void failedToResolveProviderConfigRef(final String providerConfigRef);
 
     @Message(level = MessageLevel.ERROR,
+            text = "URL validation failed for {0} URL {1} : {2}")
+    void serviceURLValidationFailed(final String serviceName,
+                                    final String url,
+                                    @StackTrace( level = MessageLevel.DEBUG ) 
Exception e );
+
+    @Message(level = MessageLevel.ERROR,
             text = "Error generating topology {0} from simple descriptor: {1}")
     void failedToGenerateTopologyFromSimpleDescriptor(final String 
topologyFile,
                                                       @StackTrace( level = 
MessageLevel.DEBUG ) Exception e );

http://git-wip-us.apache.org/repos/asf/knox/blob/7b401def/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandlerTest.java
----------------------------------------------------------------------
diff --git 
a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandlerTest.java
 
b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandlerTest.java
index 90c7146..f79ef23 100644
--- 
a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandlerTest.java
+++ 
b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandlerTest.java
@@ -17,6 +17,23 @@
  */
 package org.apache.hadoop.gateway.topology.simple;
 
+import java.io.ByteArrayInputStream;
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+
+import javax.xml.xpath.XPath;
+import javax.xml.xpath.XPathConstants;
+import javax.xml.xpath.XPathFactory;
+
+import org.apache.commons.io.FileUtils;
 import org.apache.hadoop.gateway.topology.validation.TopologyValidator;
 import org.apache.hadoop.gateway.util.XmlUtils;
 import org.easymock.EasyMock;
@@ -26,91 +43,89 @@ import org.w3c.dom.Node;
 import org.w3c.dom.NodeList;
 import org.xml.sax.SAXException;
 
-import javax.xml.xpath.XPath;
-import javax.xml.xpath.XPathConstants;
-import javax.xml.xpath.XPathFactory;
-import java.io.*;
-import java.util.*;
-
-import static org.junit.Assert.*;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
 
 
 public class SimpleDescriptorHandlerTest {
 
     private static final String TEST_PROVIDER_CONFIG =
-            "    <gateway>\n" +
-                    "        <provider>\n" +
-                    "            <role>authentication</role>\n" +
-                    "            <name>ShiroProvider</name>\n" +
-                    "            <enabled>true</enabled>\n" +
-                    "            <param>\n" +
-                    "                <!-- \n" +
-                    "                session timeout in minutes,  this is 
really idle timeout,\n" +
-                    "                defaults to 30mins, if the property value 
is not defined,, \n" +
-                    "                current client authentication would 
expire if client idles contiuosly for more than this value\n" +
-                    "                -->\n" +
-                    "                <name>sessionTimeout</name>\n" +
-                    "                <value>30</value>\n" +
-                    "            </param>\n" +
-                    "            <param>\n" +
-                    "                <name>main.ldapRealm</name>\n" +
-                    "                
<value>org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm</value>\n" +
-                    "            </param>\n" +
-                    "            <param>\n" +
-                    "                <name>main.ldapContextFactory</name>\n" +
-                    "                
<value>org.apache.hadoop.gateway.shirorealm.KnoxLdapContextFactory</value>\n" +
-                    "            </param>\n" +
-                    "            <param>\n" +
-                    "                
<name>main.ldapRealm.contextFactory</name>\n" +
-                    "                <value>$ldapContextFactory</value>\n" +
-                    "            </param>\n" +
-                    "            <param>\n" +
-                    "                
<name>main.ldapRealm.userDnTemplate</name>\n" +
-                    "                
<value>uid={0},ou=people,dc=hadoop,dc=apache,dc=org</value>\n" +
-                    "            </param>\n" +
-                    "            <param>\n" +
-                    "                
<name>main.ldapRealm.contextFactory.url</name>\n" +
-                    "                <value>ldap://localhost:33389</value>\n" +
-                    "            </param>\n" +
-                    "            <param>\n" +
-                    "                
<name>main.ldapRealm.contextFactory.authenticationMechanism</name>\n" +
-                    "                <value>simple</value>\n" +
-                    "            </param>\n" +
-                    "            <param>\n" +
-                    "                <name>urls./**</name>\n" +
-                    "                <value>authcBasic</value>\n" +
-                    "            </param>\n" +
-                    "        </provider>\n" +
-                    "\n" +
-                    "        <provider>\n" +
-                    "            <role>identity-assertion</role>\n" +
-                    "            <name>Default</name>\n" +
-                    "            <enabled>true</enabled>\n" +
-                    "        </provider>\n" +
-                    "\n" +
-                    "        <!--\n" +
-                    "        Defines rules for mapping host names internal to 
a Hadoop cluster to externally accessible host names.\n" +
-                    "        For example, a hadoop service running in AWS may 
return a response that includes URLs containing the\n" +
-                    "        some AWS internal host name.  If the client needs 
to make a subsequent request to the host identified\n" +
-                    "        in those URLs they need to be mapped to external 
host names that the client Knox can use to connect.\n" +
-                    "\n" +
-                    "        If the external hostname and internal host names 
are same turn of this provider by setting the value of\n" +
-                    "        enabled parameter as false.\n" +
-                    "\n" +
-                    "        The name parameter specifies the external host 
names in a comma separated list.\n" +
-                    "        The value parameter specifies corresponding 
internal host names in a comma separated list.\n" +
-                    "\n" +
-                    "        Note that when you are using Sandbox, the 
external hostname needs to be localhost, as seen in out\n" +
-                    "        of box sandbox.xml.  This is because Sandbox uses 
port mapping to allow clients to connect to the\n" +
-                    "        Hadoop services using localhost.  In real 
clusters, external host names would almost never be localhost.\n" +
-                    "        -->\n" +
-                    "        <provider>\n" +
-                    "            <role>hostmap</role>\n" +
-                    "            <name>static</name>\n" +
-                    "            <enabled>true</enabled>\n" +
-                    "            
<param><name>localhost</name><value>sandbox,sandbox.hortonworks.com</value></param>\n"
 +
-                    "        </provider>\n" +
-                    "    </gateway>\n";
+        "    <gateway>\n" +
+        "        <provider>\n" +
+        "            <role>authentication</role>\n" +
+        "            <name>ShiroProvider</name>\n" +
+        "            <enabled>true</enabled>\n" +
+        "            <param>\n" +
+        "                <!-- \n" +
+        "                session timeout in minutes,  this is really idle 
timeout,\n" +
+        "                defaults to 30mins, if the property value is not 
defined,, \n" +
+        "                current client authentication would expire if client 
idles contiuosly for more than this value\n" +
+        "                -->\n" +
+        "                <name>sessionTimeout</name>\n" +
+        "                <value>30</value>\n" +
+        "            </param>\n" +
+        "            <param>\n" +
+        "                <name>main.ldapRealm</name>\n" +
+        "                
<value>org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm</value>\n" +
+        "            </param>\n" +
+        "            <param>\n" +
+        "                <name>main.ldapContextFactory</name>\n" +
+        "                
<value>org.apache.hadoop.gateway.shirorealm.KnoxLdapContextFactory</value>\n" +
+        "            </param>\n" +
+        "            <param>\n" +
+        "                <name>main.ldapRealm.contextFactory</name>\n" +
+        "                <value>$ldapContextFactory</value>\n" +
+        "            </param>\n" +
+        "            <param>\n" +
+        "                <name>main.ldapRealm.userDnTemplate</name>\n" +
+        "                
<value>uid={0},ou=people,dc=hadoop,dc=apache,dc=org</value>\n" +
+        "            </param>\n" +
+        "            <param>\n" +
+        "                <name>main.ldapRealm.contextFactory.url</name>\n" +
+        "                <value>ldap://localhost:33389</value>\n" +
+        "            </param>\n" +
+        "            <param>\n" +
+        "                
<name>main.ldapRealm.contextFactory.authenticationMechanism</name>\n" +
+        "                <value>simple</value>\n" +
+        "            </param>\n" +
+        "            <param>\n" +
+        "                <name>urls./**</name>\n" +
+        "                <value>authcBasic</value>\n" +
+        "            </param>\n" +
+        "        </provider>\n" +
+        "\n" +
+        "        <provider>\n" +
+        "            <role>identity-assertion</role>\n" +
+        "            <name>Default</name>\n" +
+        "            <enabled>true</enabled>\n" +
+        "        </provider>\n" +
+        "\n" +
+        "        <!--\n" +
+        "        Defines rules for mapping host names internal to a Hadoop 
cluster to externally accessible host names.\n" +
+        "        For example, a hadoop service running in AWS may return a 
response that includes URLs containing the\n" +
+        "        some AWS internal host name.  If the client needs to make a 
subsequent request to the host identified\n" +
+        "        in those URLs they need to be mapped to external host names 
that the client Knox can use to connect.\n" +
+        "\n" +
+        "        If the external hostname and internal host names are same 
turn of this provider by setting the value of\n" +
+        "        enabled parameter as false.\n" +
+        "\n" +
+        "        The name parameter specifies the external host names in a 
comma separated list.\n" +
+        "        The value parameter specifies corresponding internal host 
names in a comma separated list.\n" +
+        "\n" +
+        "        Note that when you are using Sandbox, the external hostname 
needs to be localhost, as seen in out\n" +
+        "        of box sandbox.xml.  This is because Sandbox uses port 
mapping to allow clients to connect to the\n" +
+        "        Hadoop services using localhost.  In real clusters, external 
host names would almost never be localhost.\n" +
+        "        -->\n" +
+        "        <provider>\n" +
+        "            <role>hostmap</role>\n" +
+        "            <name>static</name>\n" +
+        "            <enabled>true</enabled>\n" +
+        "            
<param><name>localhost</name><value>sandbox,sandbox.hortonworks.com</value></param>\n"
 +
+        "        </provider>\n" +
+        "    </gateway>\n";
 
 
     /**
@@ -134,7 +149,7 @@ public class SimpleDescriptorHandlerTest {
         serviceURLs.put("WEBHBASE", null);
         serviceURLs.put("HIVE", null);
         serviceURLs.put("RESOURCEMANAGER", null);
-        serviceURLs.put("AMBARIUI", 
Arrays.asList("http://c6401.ambari.apache.org:8080";));
+        serviceURLs.put("AMBARIUI", 
Collections.singletonList("http://c6401.ambari.apache.org:8080";));
 
         // Write the externalized provider config to a temp file
         File providerConfig = writeProviderConfig("ambari-cluster-policy.xml", 
TEST_PROVIDER_CONFIG);
@@ -225,14 +240,152 @@ public class SimpleDescriptorHandlerTest {
     }
 
 
-    private File writeProviderConfig(String path, String content) throws 
IOException {
-        File f = new File(path);
+    /**
+     * KNOX-1006
+     *
+     * Verify the behavior of the SimpleDescriptorHandler when service 
discovery fails to produce a valid URL for
+     * a service.
+     *
+     * N.B. This test depends on the PropertiesFileServiceDiscovery extension 
being configured:
+     *             
org.apache.hadoop.gateway.topology.discovery.test.extension.PropertiesFileServiceDiscovery
+     */
+    @Test
+    public void testInvalidServiceURLFromDiscovery() throws Exception {
+        final String CLUSTER_NAME = "myproperties";
+
+        // Configure the PropertiesFile Service Discovery implementation for 
this test
+        final String DEFAULT_VALID_SERVICE_URL = 
"http://localhost:9999/thiswillwork";;
+        Properties serviceDiscoverySourceProps = new Properties();
+        serviceDiscoverySourceProps.setProperty(CLUSTER_NAME + ".NAMENODE",
+                                                
DEFAULT_VALID_SERVICE_URL.replace("http", "hdfs"));
+        serviceDiscoverySourceProps.setProperty(CLUSTER_NAME + ".JOBTRACKER",
+                                                
DEFAULT_VALID_SERVICE_URL.replace("http", "rpc"));
+        serviceDiscoverySourceProps.setProperty(CLUSTER_NAME + ".WEBHDFS",     
    DEFAULT_VALID_SERVICE_URL);
+        serviceDiscoverySourceProps.setProperty(CLUSTER_NAME + ".WEBHCAT",     
    DEFAULT_VALID_SERVICE_URL);
+        serviceDiscoverySourceProps.setProperty(CLUSTER_NAME + ".OOZIE",       
    DEFAULT_VALID_SERVICE_URL);
+        serviceDiscoverySourceProps.setProperty(CLUSTER_NAME + ".WEBHBASE",    
    DEFAULT_VALID_SERVICE_URL);
+        serviceDiscoverySourceProps.setProperty(CLUSTER_NAME + ".HIVE",        
    "{SCHEME}://localhost:10000/");
+        serviceDiscoverySourceProps.setProperty(CLUSTER_NAME + 
".RESOURCEMANAGER", DEFAULT_VALID_SERVICE_URL);
+        serviceDiscoverySourceProps.setProperty(CLUSTER_NAME + ".AMBARIUI",    
    DEFAULT_VALID_SERVICE_URL);
+        File serviceDiscoverySource = File.createTempFile("service-discovery", 
".properties");
+        serviceDiscoverySourceProps.store(new 
FileOutputStream(serviceDiscoverySource),
+                                          "Test Service Discovery Source");
+
+        // Prepare a mock SimpleDescriptor
+        final String type = "PROPERTIES_FILE";
+        final String address = serviceDiscoverySource.getAbsolutePath();
+        final Map<String, List<String>> serviceURLs = new HashMap<>();
+        serviceURLs.put("NAMENODE", null);
+        serviceURLs.put("JOBTRACKER", null);
+        serviceURLs.put("WEBHDFS", null);
+        serviceURLs.put("WEBHCAT", null);
+        serviceURLs.put("OOZIE", null);
+        serviceURLs.put("WEBHBASE", null);
+        serviceURLs.put("HIVE", null);
+        serviceURLs.put("RESOURCEMANAGER", null);
+        serviceURLs.put("AMBARIUI", 
Collections.singletonList("http://c6401.ambari.apache.org:8080";));
 
-        Writer fw = new FileWriter(f);
-        fw.write(content);
-        fw.flush();
-        fw.close();
+        // Write the externalized provider config to a temp file
+        File providerConfig = writeProviderConfig("ambari-cluster-policy.xml", 
TEST_PROVIDER_CONFIG);
+
+        File topologyFile = null;
+        try {
+            File destDir = (new File(".")).getCanonicalFile();
+
+            // Mock out the simple descriptor
+            SimpleDescriptor testDescriptor = 
EasyMock.createNiceMock(SimpleDescriptor.class);
+            
EasyMock.expect(testDescriptor.getName()).andReturn("mysimpledescriptor").anyTimes();
+            
EasyMock.expect(testDescriptor.getDiscoveryAddress()).andReturn(address).anyTimes();
+            
EasyMock.expect(testDescriptor.getDiscoveryType()).andReturn(type).anyTimes();
+            
EasyMock.expect(testDescriptor.getDiscoveryUser()).andReturn(null).anyTimes();
+            
EasyMock.expect(testDescriptor.getProviderConfig()).andReturn(providerConfig.getAbsolutePath()).anyTimes();
+            
EasyMock.expect(testDescriptor.getClusterName()).andReturn(CLUSTER_NAME).anyTimes();
+            List<SimpleDescriptor.Service> serviceMocks = new ArrayList<>();
+            for (String serviceName : serviceURLs.keySet()) {
+                SimpleDescriptor.Service svc = 
EasyMock.createNiceMock(SimpleDescriptor.Service.class);
+                
EasyMock.expect(svc.getName()).andReturn(serviceName).anyTimes();
+                
EasyMock.expect(svc.getURLs()).andReturn(serviceURLs.get(serviceName)).anyTimes();
+                EasyMock.replay(svc);
+                serviceMocks.add(svc);
+            }
+            
EasyMock.expect(testDescriptor.getServices()).andReturn(serviceMocks).anyTimes();
+            EasyMock.replay(testDescriptor);
+
+            // Invoke the simple descriptor handler
+            Map<String, File> files =
+                    SimpleDescriptorHandler.handle(testDescriptor,
+                                                   
providerConfig.getParentFile(), // simple desc co-located with provider config
+                                                   destDir);
+
+            topologyFile = files.get("topology");
 
+            // Validate the resulting topology descriptor
+            assertTrue(topologyFile.exists());
+
+            // Validate the topology descriptor's correctness
+            TopologyValidator validator = new TopologyValidator( 
topologyFile.getAbsolutePath() );
+            if( !validator.validateTopology() ){
+                throw new SAXException( validator.getErrorString() );
+            }
+
+            XPathFactory xPathfactory = XPathFactory.newInstance();
+            XPath xpath = xPathfactory.newXPath();
+
+            // Parse the topology descriptor
+            Document topologyXml = XmlUtils.readXml(topologyFile);
+
+            // Validate the provider configuration
+            Document extProviderConf = XmlUtils.readXml(new 
ByteArrayInputStream(TEST_PROVIDER_CONFIG.getBytes()));
+            Node gatewayNode = (Node) 
xpath.compile("/topology/gateway").evaluate(topologyXml, XPathConstants.NODE);
+            assertTrue("Resulting provider config should be identical to the 
referenced content.",
+                    
extProviderConf.getDocumentElement().isEqualNode(gatewayNode));
+
+            // Validate the service declarations
+            List<String> topologyServices = new ArrayList<>();
+            Map<String, List<String>> topologyServiceURLs = new HashMap<>();
+            NodeList serviceNodes =
+                    (NodeList) 
xpath.compile("/topology/service").evaluate(topologyXml, 
XPathConstants.NODESET);
+            for (int serviceNodeIndex=0; serviceNodeIndex < 
serviceNodes.getLength(); serviceNodeIndex++) {
+                Node serviceNode = serviceNodes.item(serviceNodeIndex);
+                Node roleNode = (Node) 
xpath.compile("role/text()").evaluate(serviceNode, XPathConstants.NODE);
+                assertNotNull(roleNode);
+                String role = roleNode.getNodeValue();
+                topologyServices.add(role);
+                NodeList urlNodes = (NodeList) 
xpath.compile("url/text()").evaluate(serviceNode, XPathConstants.NODESET);
+                for(int urlNodeIndex = 0 ; urlNodeIndex < 
urlNodes.getLength(); urlNodeIndex++) {
+                    Node urlNode = urlNodes.item(urlNodeIndex);
+                    assertNotNull(urlNode);
+                    String url = urlNode.getNodeValue();
+                    assertNotNull("Every declared service should have a URL.", 
url);
+                    if (!topologyServiceURLs.containsKey(role)) {
+                        topologyServiceURLs.put(role, new ArrayList<String>());
+                    }
+                    topologyServiceURLs.get(role).add(url);
+                }
+            }
+
+            // There should not be a service element for HIVE, since it had no 
valid URLs
+            assertEquals("Unexpected number of service declarations.", 
serviceURLs.size() - 1, topologyServices.size());
+            assertFalse("The HIVE service should have been omitted from the 
generated topology.", topologyServices.contains("HIVE"));
+
+            assertEquals("Unexpected number of service URLs.", 
serviceURLs.size() - 1, topologyServiceURLs.size());
+
+        } catch (Exception e) {
+            e.printStackTrace();
+            fail(e.getMessage());
+        } finally {
+            serviceDiscoverySource.delete();
+            providerConfig.delete();
+            if (topologyFile != null) {
+                topologyFile.delete();
+            }
+        }
+    }
+
+
+    private File writeProviderConfig(String path, String content) throws 
IOException {
+        File f = new File(path);
+        FileUtils.write(f, content);
         return f;
     }
 

Reply via email to