http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandler.java ---------------------------------------------------------------------- diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandler.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandler.java new file mode 100644 index 0000000..fb563fa --- /dev/null +++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandler.java @@ -0,0 +1,186 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ +package org.apache.hadoop.gateway.topology.simple; + +import org.apache.hadoop.gateway.i18n.messages.MessagesFactory; +import org.apache.hadoop.gateway.services.Service; +import org.apache.hadoop.gateway.topology.discovery.DefaultServiceDiscoveryConfig; +import org.apache.hadoop.gateway.topology.discovery.ServiceDiscovery; +import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryFactory; + +import java.io.*; +import java.util.*; + + +/** + * Processes simple topology descriptors, producing full topology files, which can subsequently be deployed to the + * gateway. + */ +public class SimpleDescriptorHandler { + + private static final Service[] NO_GATEWAY_SERVICES = new Service[]{}; + + private static final SimpleDescriptorMessages log = MessagesFactory.get(SimpleDescriptorMessages.class); + + public static Map<String, File> handle(File desc) throws IOException { + return handle(desc, NO_GATEWAY_SERVICES); + } + + public static Map<String, File> handle(File desc, Service...gatewayServices) throws IOException { + return handle(desc, desc.getParentFile(), gatewayServices); + } + + public static Map<String, File> handle(File desc, File destDirectory) throws IOException { + return handle(desc, destDirectory, NO_GATEWAY_SERVICES); + } + + public static Map<String, File> handle(File desc, File destDirectory, Service...gatewayServices) throws IOException { + return handle(SimpleDescriptorFactory.parse(desc.getAbsolutePath()), desc.getParentFile(), destDirectory, gatewayServices); + } + + public static Map<String, File> handle(SimpleDescriptor desc, File srcDirectory, File destDirectory) { + return handle(desc, srcDirectory, destDirectory, NO_GATEWAY_SERVICES); + } + + public static Map<String, File> handle(SimpleDescriptor desc, File srcDirectory, File destDirectory, Service...gatewayServices) { + Map<String, File> result = new HashMap<>(); + + File topologyDescriptor; + + DefaultServiceDiscoveryConfig sdc = new DefaultServiceDiscoveryConfig(desc.getDiscoveryAddress()); + sdc.setUser(desc.getDiscoveryUser()); + sdc.setPasswordAlias(desc.getDiscoveryPasswordAlias()); + ServiceDiscovery sd = ServiceDiscoveryFactory.get(desc.getDiscoveryType(), gatewayServices); + ServiceDiscovery.Cluster cluster = sd.discover(sdc, desc.getClusterName()); + + Map<String, List<String>> serviceURLs = new HashMap<>(); + + if (cluster != null) { + for (SimpleDescriptor.Service descService : desc.getServices()) { + String serviceName = descService.getName(); + + List<String> descServiceURLs = descService.getURLs(); + if (descServiceURLs == null || descServiceURLs.isEmpty()) { + descServiceURLs = cluster.getServiceURLs(serviceName); + } + + // If there is at least one URL associated with the service, then add it to the map + if (descServiceURLs != null && !descServiceURLs.isEmpty()) { + serviceURLs.put(serviceName, descServiceURLs); + } else { + log.failedToDiscoverClusterServiceURLs(serviceName, cluster.getName()); + throw new IllegalStateException("ServiceDiscovery failed to resolve any URLs for " + serviceName + + ". Topology update aborted!"); + } + } + } else { + log.failedToDiscoverClusterServices(desc.getClusterName()); + } + + topologyDescriptor = null; + File providerConfig = null; + try { + // Verify that the referenced provider configuration exists before attempting to reading it + providerConfig = resolveProviderConfigurationReference(desc.getProviderConfig(), srcDirectory); + if (providerConfig == null) { + log.failedToResolveProviderConfigRef(desc.getProviderConfig()); + throw new IllegalArgumentException("Unresolved provider configuration reference: " + + desc.getProviderConfig() + " ; Topology update aborted!"); + } + result.put("reference", providerConfig); + + // TODO: Should the contents of the provider config be validated before incorporating it into the topology? + + String topologyFilename = desc.getName(); + if (topologyFilename == null) { + topologyFilename = desc.getClusterName(); + } + topologyDescriptor = new File(destDirectory, topologyFilename + ".xml"); + FileWriter fw = new FileWriter(topologyDescriptor); + + fw.write("<topology>\n"); + + // Copy the externalized provider configuration content into the topology descriptor in-line + InputStreamReader policyReader = new InputStreamReader(new FileInputStream(providerConfig)); + char[] buffer = new char[1024]; + int count; + while ((count = policyReader.read(buffer)) > 0) { + fw.write(buffer, 0, count); + } + policyReader.close(); + + // Write the service declarations + for (String serviceName : serviceURLs.keySet()) { + fw.write(" <service>\n"); + fw.write(" <role>" + serviceName + "</role>\n"); + for (String url : serviceURLs.get(serviceName)) { + fw.write(" <url>" + url + "</url>\n"); + } + fw.write(" </service>\n"); + } + + fw.write("</topology>\n"); + + fw.flush(); + fw.close(); + } catch (IOException e) { + log.failedToGenerateTopologyFromSimpleDescriptor(topologyDescriptor.getName(), e); + topologyDescriptor.delete(); + } + + result.put("topology", topologyDescriptor); + return result; + } + + + private static File resolveProviderConfigurationReference(String reference, File srcDirectory) { + File providerConfig; + + // If the reference includes a path + if (reference.contains(File.separator)) { + // Check if it's an absolute path + providerConfig = new File(reference); + if (!providerConfig.exists()) { + // If it's not an absolute path, try treating it as a relative path + providerConfig = new File(srcDirectory, reference); + if (!providerConfig.exists()) { + providerConfig = null; + } + } + } else { // No file path, just a name + // Check if it's co-located with the referencing descriptor + providerConfig = new File(srcDirectory, reference); + if (!providerConfig.exists()) { + // Check the shared-providers config location + File sharedProvidersDir = new File(srcDirectory, "../shared-providers"); + if (sharedProvidersDir.exists()) { + providerConfig = new File(sharedProvidersDir, reference); + if (!providerConfig.exists()) { + // Check if it's a valid name without the extension + providerConfig = new File(sharedProvidersDir, reference + ".xml"); + if (!providerConfig.exists()) { + providerConfig = null; + } + } + } + } + } + + return providerConfig; + } + +}
http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorImpl.java ---------------------------------------------------------------------- diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorImpl.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorImpl.java new file mode 100644 index 0000000..32ceba9 --- /dev/null +++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorImpl.java @@ -0,0 +1,111 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.gateway.topology.simple; + +import com.fasterxml.jackson.annotation.JsonProperty; + +import java.util.ArrayList; +import java.util.List; + +class SimpleDescriptorImpl implements SimpleDescriptor { + + @JsonProperty("discovery-type") + private String discoveryType; + + @JsonProperty("discovery-address") + private String discoveryAddress; + + @JsonProperty("discovery-user") + private String discoveryUser; + + @JsonProperty("discovery-pwd-alias") + private String discoveryPasswordAlias; + + @JsonProperty("provider-config-ref") + private String providerConfig; + + @JsonProperty("cluster") + private String cluster; + + @JsonProperty("services") + private List<ServiceImpl> services; + + private String name = null; + + void setName(String name) { + this.name = name; + } + + @Override + public String getName() { + return name; + } + + @Override + public String getDiscoveryType() { + return discoveryType; + } + + @Override + public String getDiscoveryAddress() { + return discoveryAddress; + } + + @Override + public String getDiscoveryUser() { + return discoveryUser; + } + + @Override + public String getDiscoveryPasswordAlias() { + return discoveryPasswordAlias; + } + + @Override + public String getClusterName() { + return cluster; + } + + @Override + public String getProviderConfig() { + return providerConfig; + } + + @Override + public List<Service> getServices() { + List<Service> result = new ArrayList<>(); + result.addAll(services); + return result; + } + + public static class ServiceImpl implements Service { + private String name; + private List<String> urls; + + @Override + public String getName() { + return name; + } + + @Override + public List<String> getURLs() { + return urls; + } + } + +} http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorMessages.java ---------------------------------------------------------------------- diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorMessages.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorMessages.java new file mode 100644 index 0000000..cf9aa28 --- /dev/null +++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorMessages.java @@ -0,0 +1,44 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * <p> + * http://www.apache.org/licenses/LICENSE-2.0 + * <p> + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ +package org.apache.hadoop.gateway.topology.simple; + +import org.apache.hadoop.gateway.i18n.messages.Message; +import org.apache.hadoop.gateway.i18n.messages.MessageLevel; +import org.apache.hadoop.gateway.i18n.messages.Messages; +import org.apache.hadoop.gateway.i18n.messages.StackTrace; + +@Messages(logger="org.apache.gateway.topology.simple") +public interface SimpleDescriptorMessages { + + @Message(level = MessageLevel.ERROR, + text = "Service discovery for cluster {0} failed.") + void failedToDiscoverClusterServices(final String cluster); + + @Message(level = MessageLevel.ERROR, + text = "No URLs were discovered for {0} in the {1} cluster.") + void failedToDiscoverClusterServiceURLs(final String serviceName, final String clusterName); + + @Message(level = MessageLevel.ERROR, + text = "Failed to resolve the referenced provider configuration {0}.") + void failedToResolveProviderConfigRef(final String providerConfigRef); + + @Message(level = MessageLevel.ERROR, + text = "Error generating topology {0} from simple descriptor: {1}") + void failedToGenerateTopologyFromSimpleDescriptor(final String topologyFile, + @StackTrace( level = MessageLevel.DEBUG ) Exception e ); + +} http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/test/java/org/apache/hadoop/gateway/services/topology/DefaultTopologyServiceTest.java ---------------------------------------------------------------------- diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/services/topology/DefaultTopologyServiceTest.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/services/topology/DefaultTopologyServiceTest.java index 55cd5cc..498d750 100644 --- a/gateway-server/src/test/java/org/apache/hadoop/gateway/services/topology/DefaultTopologyServiceTest.java +++ b/gateway-server/src/test/java/org/apache/hadoop/gateway/services/topology/DefaultTopologyServiceTest.java @@ -22,8 +22,12 @@ import org.apache.commons.io.IOUtils; import org.apache.commons.io.monitor.FileAlterationMonitor; import org.apache.commons.io.monitor.FileAlterationObserver; import org.apache.hadoop.gateway.config.GatewayConfig; +import org.apache.hadoop.gateway.services.security.AliasService; import org.apache.hadoop.gateway.services.topology.impl.DefaultTopologyService; import org.apache.hadoop.gateway.topology.*; +import org.apache.hadoop.gateway.topology.discovery.ServiceDiscovery; +import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryConfig; +import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryFactory; import org.apache.hadoop.test.TestUtils; import org.easymock.EasyMock; import org.junit.After; @@ -36,6 +40,8 @@ import java.io.InputStream; import java.io.OutputStream; import java.util.*; +import static org.easymock.EasyMock.anyObject; +import static org.easymock.EasyMock.isA; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.core.IsNull.notNullValue; @@ -78,9 +84,17 @@ public class DefaultTopologyServiceTest { public void testGetTopologies() throws Exception { File dir = createDir(); - long time = dir.lastModified(); + File topologyDir = new File(dir, "topologies"); + + File descriptorsDir = new File(dir, "descriptors"); + descriptorsDir.mkdirs(); + + File sharedProvidersDir = new File(dir, "shared-providers"); + sharedProvidersDir.mkdirs(); + + long time = topologyDir.lastModified(); try { - createFile(dir, "one.xml", "org/apache/hadoop/gateway/topology/file/topology-one.xml", time); + createFile(topologyDir, "one.xml", "org/apache/hadoop/gateway/topology/file/topology-one.xml", time); TestTopologyListener topoListener = new TestTopologyListener(); FileAlterationMonitor monitor = new FileAlterationMonitor(Long.MAX_VALUE); @@ -89,17 +103,16 @@ public class DefaultTopologyServiceTest { Map<String, String> c = new HashMap<>(); GatewayConfig config = EasyMock.createNiceMock(GatewayConfig.class); - EasyMock.expect(config.getGatewayTopologyDir()).andReturn(dir.toString()).anyTimes(); + EasyMock.expect(config.getGatewayTopologyDir()).andReturn(topologyDir.getAbsolutePath()).anyTimes(); + EasyMock.expect(config.getGatewayConfDir()).andReturn(descriptorsDir.getParentFile().getAbsolutePath()).anyTimes(); EasyMock.replay(config); provider.init(config, c); - provider.addTopologyChangeListener(topoListener); provider.reloadTopologies(); - Collection<Topology> topologies = provider.getTopologies(); assertThat(topologies, notNullValue()); assertThat(topologies.size(), is(1)); @@ -110,7 +123,7 @@ public class DefaultTopologyServiceTest { topoListener.events.clear(); // Add a file to the directory. - File two = createFile(dir, "two.xml", "org/apache/hadoop/gateway/topology/file/topology-two.xml", 1L); + File two = createFile(topologyDir, "two.xml", "org/apache/hadoop/gateway/topology/file/topology-two.xml", 1L); provider.reloadTopologies(); topologies = provider.getTopologies(); assertThat(topologies.size(), is(2)); @@ -131,7 +144,7 @@ public class DefaultTopologyServiceTest { assertThat(event.getTopology(), notNullValue()); // Update a file in the directory. - two = createFile(dir, "two.xml", "org/apache/hadoop/gateway/topology/file/topology-three.xml", 2L); + two = createFile(topologyDir, "two.xml", "org/apache/hadoop/gateway/topology/file/topology-three.xml", 2L); provider.reloadTopologies(); topologies = provider.getTopologies(); assertThat(topologies.size(), is(2)); @@ -153,6 +166,49 @@ public class DefaultTopologyServiceTest { topology = topologies.iterator().next(); assertThat(topology.getName(), is("one")); assertThat(topology.getTimestamp(), is(time)); + + // Add a simple descriptor to the descriptors dir to verify topology generation and loading (KNOX-1006) + // N.B. This part of the test depends on the DummyServiceDiscovery extension being configured: + // org.apache.hadoop.gateway.topology.discovery.test.extension.DummyServiceDiscovery + AliasService aliasService = EasyMock.createNiceMock(AliasService.class); + EasyMock.expect(aliasService.getPasswordFromAliasForGateway(anyObject(String.class))).andReturn(null).anyTimes(); + EasyMock.replay(aliasService); + DefaultTopologyService.DescriptorsMonitor dm = + new DefaultTopologyService.DescriptorsMonitor(topologyDir, aliasService); + + // Write out the referenced provider config first + File provCfgFile = createFile(sharedProvidersDir, + "ambari-cluster-policy.xml", + "org/apache/hadoop/gateway/topology/file/ambari-cluster-policy.xml", + 1L); + try { + // Create the simple descriptor in the descriptors dir + File simpleDesc = + createFile(descriptorsDir, + "four.json", + "org/apache/hadoop/gateway/topology/file/simple-topology-four.json", + 1L); + + // Trigger the topology generation by noticing the simple descriptor + dm.onFileChange(simpleDesc); + + // Load the generated topology + provider.reloadTopologies(); + topologies = provider.getTopologies(); + assertThat(topologies.size(), is(2)); + names = new HashSet<>(Arrays.asList("one", "four")); + iterator = topologies.iterator(); + topology = iterator.next(); + assertThat(names, hasItem(topology.getName())); + names.remove(topology.getName()); + topology = iterator.next(); + assertThat(names, hasItem(topology.getName())); + names.remove(topology.getName()); + assertThat(names.size(), is(0)); + } finally { + provCfgFile.delete(); + + } } finally { FileUtils.deleteQuietly(dir); } http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/PropertiesFileServiceDiscoveryTest.java ---------------------------------------------------------------------- diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/PropertiesFileServiceDiscoveryTest.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/PropertiesFileServiceDiscoveryTest.java new file mode 100644 index 0000000..269bed2 --- /dev/null +++ b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/PropertiesFileServiceDiscoveryTest.java @@ -0,0 +1,90 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ +package org.apache.hadoop.gateway.topology.discovery; + +import org.apache.hadoop.gateway.services.security.impl.DefaultAliasService; +import org.apache.hadoop.gateway.topology.discovery.test.extension.PropertiesFileServiceDiscoveryType; +import org.junit.Test; + +import java.io.File; +import java.io.FileOutputStream; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Properties; + +import static org.junit.Assert.*; + + +public class PropertiesFileServiceDiscoveryTest { + + private static final Map<String, String> clusterProperties = new HashMap<>(); + static { + clusterProperties.put("mycluster.name", "mycluster"); + clusterProperties.put("mycluster.NAMENODE", "hdfs://namenodehost:8020"); + clusterProperties.put("mycluster.JOBTRACKER", "rpc://jobtrackerhostname:8050"); + clusterProperties.put("mycluster.WEBHCAT", "http://webhcathost:50111/templeton"); + clusterProperties.put("mycluster.OOZIE", "http://ooziehost:11000/oozie"); + clusterProperties.put("mycluster.HIVE", "http://hivehostname:10001/clipath"); + clusterProperties.put("mycluster.RESOURCEMANAGER", "http://remanhost:8088/ws"); + } + + private static final Properties config = new Properties(); + static { + for (String name : clusterProperties.keySet()) { + config.setProperty(name, clusterProperties.get(name)); + } + } + + + @Test + public void testPropertiesFileServiceDiscovery() throws Exception { + ServiceDiscovery sd = ServiceDiscoveryFactory.get("PROPERTIES_FILE"); + assertNotNull(sd); + + String discoveryAddress = this.getClass().getName() + "__test-discovery-source.properties"; + File discoverySource = new File(discoveryAddress); + try { + config.store(new FileOutputStream(discoverySource), "Test discovery source for PropertiesFileServiceDiscovery"); + + ServiceDiscovery.Cluster c = + sd.discover(new DefaultServiceDiscoveryConfig(discoverySource.getAbsolutePath()), "mycluster"); + assertNotNull(c); + for (String name : clusterProperties.keySet()) { + assertEquals(clusterProperties.get(name), c.getServiceURLs(name.split("\\.")[1]).get(0)); + } + } finally { + discoverySource.delete(); + } + } + + + private void printServiceURLs(ServiceDiscovery.Cluster cluster, String...services) { + for (String name : services) { + String value = ""; + List<String> urls = cluster.getServiceURLs(name); + if (urls != null && !urls.isEmpty()) { + for (String url : urls) { + value += url + " "; + } + } + System.out.println(String.format("%18s: %s", name, value)); + } + } + + +} http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscoveryFactoryTest.java ---------------------------------------------------------------------- diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscoveryFactoryTest.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscoveryFactoryTest.java new file mode 100644 index 0000000..d592ede --- /dev/null +++ b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscoveryFactoryTest.java @@ -0,0 +1,81 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ +package org.apache.hadoop.gateway.topology.discovery; + +import org.apache.hadoop.gateway.services.security.AliasService; +import org.apache.hadoop.gateway.services.security.impl.DefaultAliasService; +import org.apache.hadoop.gateway.topology.discovery.test.extension.PropertiesFileServiceDiscoveryType; +import org.junit.Test; + +import java.lang.reflect.Field; + +import static org.junit.Assert.*; + + +public class ServiceDiscoveryFactoryTest { + + @Test + public void testGetDummyImpl() throws Exception { + String TYPE = "DUMMY"; + ServiceDiscovery sd = ServiceDiscoveryFactory.get(TYPE); + assertNotNull("Expected to get a ServiceDiscovery object.", sd); + assertEquals("Unexpected ServiceDiscovery type.", TYPE, sd.getType()); + } + + + @Test + public void testGetDummyImplWithMismatchedCase() throws Exception { + String TYPE = "dUmmY"; + ServiceDiscovery sd = ServiceDiscoveryFactory.get(TYPE); + assertNotNull("Expected to get a ServiceDiscovery object.", sd); + assertEquals("Unexpected ServiceDiscovery type.", TYPE.toUpperCase(), sd.getType()); + } + + + @Test + public void testGetInvalidImpl() throws Exception { + String TYPE = "InValID"; + ServiceDiscovery sd = ServiceDiscoveryFactory.get(TYPE); + assertNull("Unexpected ServiceDiscovery object.", sd); + } + + + @Test + public void testGetImplWithMismatchedType() throws Exception { + String TYPE = "DeclaredType"; + ServiceDiscovery sd = ServiceDiscoveryFactory.get(TYPE); + assertNull("Unexpected ServiceDiscovery object.", sd); + } + + + @Test + public void testGetPropertiesFileImplWithAliasServiceInjection() throws Exception { + String TYPE = "PROPERTIES_FILE"; + ServiceDiscovery sd = ServiceDiscoveryFactory.get(TYPE, new DefaultAliasService()); + assertNotNull("Expected to get a ServiceDiscovery object.", sd); + assertEquals("Unexpected ServiceDiscovery type.", TYPE, sd.getType()); + + // Verify that the AliasService was injected as expected + Field aliasServiceField = sd.getClass().getDeclaredField("aliasService"); + aliasServiceField.setAccessible(true); + Object fieldValue = aliasServiceField.get(sd); + assertNotNull(fieldValue); + assertTrue(AliasService.class.isAssignableFrom(fieldValue.getClass())); + } + + +} http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/DummyServiceDiscovery.java ---------------------------------------------------------------------- diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/DummyServiceDiscovery.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/DummyServiceDiscovery.java new file mode 100644 index 0000000..4a5323e --- /dev/null +++ b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/DummyServiceDiscovery.java @@ -0,0 +1,66 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * <p> + * http://www.apache.org/licenses/LICENSE-2.0 + * <p> + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ +package org.apache.hadoop.gateway.topology.discovery.test.extension; + +import org.apache.hadoop.gateway.topology.discovery.ServiceDiscovery; +import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryConfig; + +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +/** + * This implementation is intended to be used by tests for which the actual service URLs are of no importance, such that + * tests can be written without having a valid service registry (e.g., Ambari) available. + */ +public class DummyServiceDiscovery implements ServiceDiscovery { + + static final String TYPE = "DUMMY"; + + private static final Cluster DUMMY = new Cluster() { + @Override + public String getName() { + return "dummy"; + } + + @Override + public List<String> getServiceURLs(String serviceName) { + return Collections.singletonList("http://servicehost:9999/dummy"); + } + }; + + private static final Map<String, Cluster> CLUSTERS = new HashMap<>(); + static { + CLUSTERS.put(DUMMY.getName(), DUMMY); + } + + @Override + public String getType() { + return TYPE; + } + + @Override + public Map<String, Cluster> discover(ServiceDiscoveryConfig config) { + return CLUSTERS; + } + + @Override + public Cluster discover(ServiceDiscoveryConfig config, String clusterName) { + return DUMMY; + } +} http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/DummyServiceDiscoveryType.java ---------------------------------------------------------------------- diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/DummyServiceDiscoveryType.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/DummyServiceDiscoveryType.java new file mode 100644 index 0000000..d47c38d --- /dev/null +++ b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/DummyServiceDiscoveryType.java @@ -0,0 +1,32 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * <p> + * http://www.apache.org/licenses/LICENSE-2.0 + * <p> + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ +package org.apache.hadoop.gateway.topology.discovery.test.extension; + +import org.apache.hadoop.gateway.topology.discovery.ServiceDiscovery; +import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType; + +public class DummyServiceDiscoveryType implements ServiceDiscoveryType { + @Override + public String getType() { + return DummyServiceDiscovery.TYPE; + } + + @Override + public ServiceDiscovery newInstance() { + return new DummyServiceDiscovery(); + } +} http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/PropertiesFileServiceDiscovery.java ---------------------------------------------------------------------- diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/PropertiesFileServiceDiscovery.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/PropertiesFileServiceDiscovery.java new file mode 100644 index 0000000..a7fc34a --- /dev/null +++ b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/PropertiesFileServiceDiscovery.java @@ -0,0 +1,108 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.gateway.topology.discovery.test.extension; + +import org.apache.hadoop.gateway.services.security.AliasService; +import org.apache.hadoop.gateway.topology.discovery.GatewayService; +import org.apache.hadoop.gateway.topology.discovery.ServiceDiscovery; +import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryConfig; + +import java.io.FileInputStream; +import java.io.IOException; +import java.util.*; + +class PropertiesFileServiceDiscovery implements ServiceDiscovery { + + static final String TYPE = "PROPERTIES_FILE"; + + @GatewayService + AliasService aliasService; + + @Override + public String getType() { + return TYPE; + } + + @Override + public Map<String, ServiceDiscovery.Cluster> discover(ServiceDiscoveryConfig config) { + + Map<String, ServiceDiscovery.Cluster> result = new HashMap<>(); + + Properties p = new Properties(); + try { + p.load(new FileInputStream(config.getAddress())); + + Map<String, Map<String, List<String>>> clusters = new HashMap<>(); + for (Object key : p.keySet()) { + String propertyKey = (String)key; + String[] parts = propertyKey.split("\\."); + if (parts.length == 2) { + String clusterName = parts[0]; + String serviceName = parts[1]; + String serviceURL = p.getProperty(propertyKey); + if (!clusters.containsKey(clusterName)) { + clusters.put(clusterName, new HashMap<String, List<String>>()); + } + Map<String, List<String>> serviceURLs = clusters.get(clusterName); + if (!serviceURLs.containsKey(serviceName)) { + serviceURLs.put(serviceName, new ArrayList<String>()); + } + serviceURLs.get(serviceName).add(serviceURL); + } + } + + for (String clusterName : clusters.keySet()) { + result.put(clusterName, + new PropertiesFileServiceDiscovery.Cluster(clusterName, clusters.get(clusterName))); + } + } catch (IOException e) { + e.printStackTrace(); + } + + return result; + } + + + @Override + public ServiceDiscovery.Cluster discover(ServiceDiscoveryConfig config, String clusterName) { + Map<String, ServiceDiscovery.Cluster> clusters = discover(config); + return clusters.get(clusterName); + } + + + static class Cluster implements ServiceDiscovery.Cluster { + private String name; + private Map<String, List<String>> serviceURLS = new HashMap<>(); + + Cluster(String name, Map<String, List<String>> serviceURLs) { + this.name = name; + this.serviceURLS.putAll(serviceURLs); + } + + @Override + public String getName() { + return name; + } + + @Override + public List<String> getServiceURLs(String serviceName) { + return serviceURLS.get(serviceName); + } + } + +} http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/PropertiesFileServiceDiscoveryType.java ---------------------------------------------------------------------- diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/PropertiesFileServiceDiscoveryType.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/PropertiesFileServiceDiscoveryType.java new file mode 100644 index 0000000..2cfd998 --- /dev/null +++ b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/PropertiesFileServiceDiscoveryType.java @@ -0,0 +1,35 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.gateway.topology.discovery.test.extension; + +import org.apache.hadoop.gateway.topology.discovery.ServiceDiscovery; +import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType; + +public class PropertiesFileServiceDiscoveryType implements ServiceDiscoveryType { + + @Override + public String getType() { + return PropertiesFileServiceDiscovery.TYPE; + } + + @Override + public ServiceDiscovery newInstance() { + return new PropertiesFileServiceDiscovery(); + } + +} http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/SneakyServiceDiscoveryImpl.java ---------------------------------------------------------------------- diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/SneakyServiceDiscoveryImpl.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/SneakyServiceDiscoveryImpl.java new file mode 100644 index 0000000..8f7b71a --- /dev/null +++ b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/SneakyServiceDiscoveryImpl.java @@ -0,0 +1,40 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ +package org.apache.hadoop.gateway.topology.discovery.test.extension; + +import org.apache.hadoop.gateway.topology.discovery.ServiceDiscovery; +import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryConfig; + +import java.util.Map; + +public class SneakyServiceDiscoveryImpl implements ServiceDiscovery { + @Override + public String getType() { + return "ActualType"; + } + + @Override + public Map<String, Cluster> discover(ServiceDiscoveryConfig config) { + return null; + } + + @Override + public Cluster discover(ServiceDiscoveryConfig config, String clusterName) { + return null; + } + +} http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/SneakyServiceDiscoveryType.java ---------------------------------------------------------------------- diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/SneakyServiceDiscoveryType.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/SneakyServiceDiscoveryType.java new file mode 100644 index 0000000..97665dc --- /dev/null +++ b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/SneakyServiceDiscoveryType.java @@ -0,0 +1,33 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ +package org.apache.hadoop.gateway.topology.discovery.test.extension; + +import org.apache.hadoop.gateway.topology.discovery.ServiceDiscovery; +import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType; + +public class SneakyServiceDiscoveryType implements ServiceDiscoveryType { + @Override + public String getType() { + return "DeclaredType"; + } + + @Override + public ServiceDiscovery newInstance() { + return new SneakyServiceDiscoveryImpl(); + } +} + http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorFactoryTest.java ---------------------------------------------------------------------- diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorFactoryTest.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorFactoryTest.java new file mode 100644 index 0000000..3dac66a --- /dev/null +++ b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorFactoryTest.java @@ -0,0 +1,218 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ +package org.apache.hadoop.gateway.topology.simple; + +import java.io.File; +import java.io.FileWriter; +import java.io.Writer; +import java.util.*; + +import org.junit.Test; +import static org.junit.Assert.*; + + +public class SimpleDescriptorFactoryTest { + + + @Test + public void testParseJSONSimpleDescriptor() throws Exception { + + final String discoveryType = "AMBARI"; + final String discoveryAddress = "http://c6401.ambari.apache.org:8080"; + final String discoveryUser = "admin"; + final String providerConfig = "ambari-cluster-policy.xml"; + final String clusterName = "myCluster"; + + final Map<String, List<String>> services = new HashMap<>(); + services.put("NODEMANAGER", null); + services.put("JOBTRACKER", null); + services.put("RESOURCEMANAGER", null); + services.put("HIVE", Arrays.asList("http://c6401.ambari.apache.org", "http://c6402.ambari.apache.org", "http://c6403.ambari.apache.org")); + services.put("AMBARIUI", Arrays.asList("http://c6401.ambari.apache.org:8080")); + + String fileName = "test-topology.json"; + File testJSON = null; + try { + testJSON = writeJSON(fileName, discoveryType, discoveryAddress, discoveryUser, providerConfig, clusterName, services); + SimpleDescriptor sd = SimpleDescriptorFactory.parse(testJSON.getAbsolutePath()); + validateSimpleDescriptor(sd, discoveryType, discoveryAddress, providerConfig, clusterName, services); + } catch (Exception e) { + e.printStackTrace(); + } finally { + if (testJSON != null) { + try { + testJSON.delete(); + } catch (Exception e) { + // Ignore + } + } + } + } + + @Test + public void testParseYAMLSimpleDescriptor() throws Exception { + + final String discoveryType = "AMBARI"; + final String discoveryAddress = "http://c6401.ambari.apache.org:8080"; + final String discoveryUser = "joeblow"; + final String providerConfig = "ambari-cluster-policy.xml"; + final String clusterName = "myCluster"; + + final Map<String, List<String>> services = new HashMap<>(); + services.put("NODEMANAGER", null); + services.put("JOBTRACKER", null); + services.put("RESOURCEMANAGER", null); + services.put("HIVE", Arrays.asList("http://c6401.ambari.apache.org", "http://c6402.ambari.apache.org", "http://c6403.ambari.apache.org")); + services.put("AMBARIUI", Arrays.asList("http://c6401.ambari.apache.org:8080")); + + String fileName = "test-topology.yml"; + File testYAML = null; + try { + testYAML = writeYAML(fileName, discoveryType, discoveryAddress, discoveryUser, providerConfig, clusterName, services); + SimpleDescriptor sd = SimpleDescriptorFactory.parse(testYAML.getAbsolutePath()); + validateSimpleDescriptor(sd, discoveryType, discoveryAddress, providerConfig, clusterName, services); + } catch (Exception e) { + e.printStackTrace(); + } finally { + if (testYAML != null) { + try { + testYAML.delete(); + } catch (Exception e) { + // Ignore + } + } + } + } + + + private void validateSimpleDescriptor(SimpleDescriptor sd, + String discoveryType, + String discoveryAddress, + String providerConfig, + String clusterName, + Map<String, List<String>> expectedServices) { + assertNotNull(sd); + assertEquals(discoveryType, sd.getDiscoveryType()); + assertEquals(discoveryAddress, sd.getDiscoveryAddress()); + assertEquals(providerConfig, sd.getProviderConfig()); + assertEquals(clusterName, sd.getClusterName()); + + List<SimpleDescriptor.Service> actualServices = sd.getServices(); + + assertEquals(expectedServices.size(), actualServices.size()); + + for (SimpleDescriptor.Service actualService : actualServices) { + assertTrue(expectedServices.containsKey(actualService.getName())); + assertEquals(expectedServices.get(actualService.getName()), actualService.getURLs()); + } + } + + + private File writeJSON(String path, String content) throws Exception { + File f = new File(path); + + Writer fw = new FileWriter(f); + fw.write(content); + fw.flush(); + fw.close(); + + return f; + } + + + private File writeJSON(String path, + String discoveryType, + String discoveryAddress, + String discoveryUser, + String providerConfig, + String clusterName, + Map<String, List<String>> services) throws Exception { + File f = new File(path); + + Writer fw = new FileWriter(f); + fw.write("{" + "\n"); + fw.write("\"discovery-type\":\"" + discoveryType + "\",\n"); + fw.write("\"discovery-address\":\"" + discoveryAddress + "\",\n"); + fw.write("\"discovery-user\":\"" + discoveryUser + "\",\n"); + fw.write("\"provider-config-ref\":\"" + providerConfig + "\",\n"); + fw.write("\"cluster\":\"" + clusterName + "\",\n"); + fw.write("\"services\":[\n"); + + int i = 0; + for (String name : services.keySet()) { + fw.write("{\"name\":\"" + name + "\""); + List<String> urls = services.get(name); + if (urls != null) { + fw.write(", \"urls\":["); + Iterator<String> urlIter = urls.iterator(); + while (urlIter.hasNext()) { + fw.write("\"" + urlIter.next() + "\""); + if (urlIter.hasNext()) { + fw.write(", "); + } + } + fw.write("]"); + } + fw.write("}"); + if (i++ < services.size() - 1) { + fw.write(","); + } + fw.write("\n"); + } + fw.write("]\n"); + fw.write("}\n"); + fw.flush(); + fw.close(); + + return f; + } + + private File writeYAML(String path, + String discoveryType, + String discoveryAddress, + String discoveryUser, + String providerConfig, + String clusterName, + Map<String, List<String>> services) throws Exception { + File f = new File(path); + + Writer fw = new FileWriter(f); + fw.write("---" + "\n"); + fw.write("discovery-type: " + discoveryType + "\n"); + fw.write("discovery-address: " + discoveryAddress + "\n"); + fw.write("discovery-user: " + discoveryUser + "\n"); + fw.write("provider-config-ref: " + providerConfig + "\n"); + fw.write("cluster: " + clusterName+ "\n"); + fw.write("services:\n"); + for (String name : services.keySet()) { + fw.write(" - name: " + name + "\n"); + List<String> urls = services.get(name); + if (urls != null) { + fw.write(" urls:\n"); + for (String url : urls) { + fw.write(" - " + url + "\n"); + } + } + } + fw.flush(); + fw.close(); + + return f; + } + + +} http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandlerTest.java ---------------------------------------------------------------------- diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandlerTest.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandlerTest.java new file mode 100644 index 0000000..90c7146 --- /dev/null +++ b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandlerTest.java @@ -0,0 +1,239 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.gateway.topology.simple; + +import org.apache.hadoop.gateway.topology.validation.TopologyValidator; +import org.apache.hadoop.gateway.util.XmlUtils; +import org.easymock.EasyMock; +import org.junit.Test; +import org.w3c.dom.Document; +import org.w3c.dom.Node; +import org.w3c.dom.NodeList; +import org.xml.sax.SAXException; + +import javax.xml.xpath.XPath; +import javax.xml.xpath.XPathConstants; +import javax.xml.xpath.XPathFactory; +import java.io.*; +import java.util.*; + +import static org.junit.Assert.*; + + +public class SimpleDescriptorHandlerTest { + + private static final String TEST_PROVIDER_CONFIG = + " <gateway>\n" + + " <provider>\n" + + " <role>authentication</role>\n" + + " <name>ShiroProvider</name>\n" + + " <enabled>true</enabled>\n" + + " <param>\n" + + " <!-- \n" + + " session timeout in minutes, this is really idle timeout,\n" + + " defaults to 30mins, if the property value is not defined,, \n" + + " current client authentication would expire if client idles contiuosly for more than this value\n" + + " -->\n" + + " <name>sessionTimeout</name>\n" + + " <value>30</value>\n" + + " </param>\n" + + " <param>\n" + + " <name>main.ldapRealm</name>\n" + + " <value>org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm</value>\n" + + " </param>\n" + + " <param>\n" + + " <name>main.ldapContextFactory</name>\n" + + " <value>org.apache.hadoop.gateway.shirorealm.KnoxLdapContextFactory</value>\n" + + " </param>\n" + + " <param>\n" + + " <name>main.ldapRealm.contextFactory</name>\n" + + " <value>$ldapContextFactory</value>\n" + + " </param>\n" + + " <param>\n" + + " <name>main.ldapRealm.userDnTemplate</name>\n" + + " <value>uid={0},ou=people,dc=hadoop,dc=apache,dc=org</value>\n" + + " </param>\n" + + " <param>\n" + + " <name>main.ldapRealm.contextFactory.url</name>\n" + + " <value>ldap://localhost:33389</value>\n" + + " </param>\n" + + " <param>\n" + + " <name>main.ldapRealm.contextFactory.authenticationMechanism</name>\n" + + " <value>simple</value>\n" + + " </param>\n" + + " <param>\n" + + " <name>urls./**</name>\n" + + " <value>authcBasic</value>\n" + + " </param>\n" + + " </provider>\n" + + "\n" + + " <provider>\n" + + " <role>identity-assertion</role>\n" + + " <name>Default</name>\n" + + " <enabled>true</enabled>\n" + + " </provider>\n" + + "\n" + + " <!--\n" + + " Defines rules for mapping host names internal to a Hadoop cluster to externally accessible host names.\n" + + " For example, a hadoop service running in AWS may return a response that includes URLs containing the\n" + + " some AWS internal host name. If the client needs to make a subsequent request to the host identified\n" + + " in those URLs they need to be mapped to external host names that the client Knox can use to connect.\n" + + "\n" + + " If the external hostname and internal host names are same turn of this provider by setting the value of\n" + + " enabled parameter as false.\n" + + "\n" + + " The name parameter specifies the external host names in a comma separated list.\n" + + " The value parameter specifies corresponding internal host names in a comma separated list.\n" + + "\n" + + " Note that when you are using Sandbox, the external hostname needs to be localhost, as seen in out\n" + + " of box sandbox.xml. This is because Sandbox uses port mapping to allow clients to connect to the\n" + + " Hadoop services using localhost. In real clusters, external host names would almost never be localhost.\n" + + " -->\n" + + " <provider>\n" + + " <role>hostmap</role>\n" + + " <name>static</name>\n" + + " <enabled>true</enabled>\n" + + " <param><name>localhost</name><value>sandbox,sandbox.hortonworks.com</value></param>\n" + + " </provider>\n" + + " </gateway>\n"; + + + /** + * KNOX-1006 + * + * N.B. This test depends on the DummyServiceDiscovery extension being configured: + * org.apache.hadoop.gateway.topology.discovery.test.extension.DummyServiceDiscovery + */ + @Test + public void testSimpleDescriptorHandler() throws Exception { + + final String type = "DUMMY"; + final String address = "http://c6401.ambari.apache.org:8080"; + final String clusterName = "dummy"; + final Map<String, List<String>> serviceURLs = new HashMap<>(); + serviceURLs.put("NAMENODE", null); + serviceURLs.put("JOBTRACKER", null); + serviceURLs.put("WEBHDFS", null); + serviceURLs.put("WEBHCAT", null); + serviceURLs.put("OOZIE", null); + serviceURLs.put("WEBHBASE", null); + serviceURLs.put("HIVE", null); + serviceURLs.put("RESOURCEMANAGER", null); + serviceURLs.put("AMBARIUI", Arrays.asList("http://c6401.ambari.apache.org:8080")); + + // Write the externalized provider config to a temp file + File providerConfig = writeProviderConfig("ambari-cluster-policy.xml", TEST_PROVIDER_CONFIG); + + File topologyFile = null; + try { + File destDir = (new File(".")).getCanonicalFile(); + + // Mock out the simple descriptor + SimpleDescriptor testDescriptor = EasyMock.createNiceMock(SimpleDescriptor.class); + EasyMock.expect(testDescriptor.getName()).andReturn("mysimpledescriptor").anyTimes(); + EasyMock.expect(testDescriptor.getDiscoveryAddress()).andReturn(address).anyTimes(); + EasyMock.expect(testDescriptor.getDiscoveryType()).andReturn(type).anyTimes(); + EasyMock.expect(testDescriptor.getDiscoveryUser()).andReturn(null).anyTimes(); + EasyMock.expect(testDescriptor.getProviderConfig()).andReturn(providerConfig.getAbsolutePath()).anyTimes(); + EasyMock.expect(testDescriptor.getClusterName()).andReturn(clusterName).anyTimes(); + List<SimpleDescriptor.Service> serviceMocks = new ArrayList<>(); + for (String serviceName : serviceURLs.keySet()) { + SimpleDescriptor.Service svc = EasyMock.createNiceMock(SimpleDescriptor.Service.class); + EasyMock.expect(svc.getName()).andReturn(serviceName).anyTimes(); + EasyMock.expect(svc.getURLs()).andReturn(serviceURLs.get(serviceName)).anyTimes(); + EasyMock.replay(svc); + serviceMocks.add(svc); + } + EasyMock.expect(testDescriptor.getServices()).andReturn(serviceMocks).anyTimes(); + EasyMock.replay(testDescriptor); + + // Invoke the simple descriptor handler + Map<String, File> files = + SimpleDescriptorHandler.handle(testDescriptor, + providerConfig.getParentFile(), // simple desc co-located with provider config + destDir); + topologyFile = files.get("topology"); + + // Validate the resulting topology descriptor + assertTrue(topologyFile.exists()); + + // Validate the topology descriptor's correctness + TopologyValidator validator = new TopologyValidator( topologyFile.getAbsolutePath() ); + if( !validator.validateTopology() ){ + throw new SAXException( validator.getErrorString() ); + } + + XPathFactory xPathfactory = XPathFactory.newInstance(); + XPath xpath = xPathfactory.newXPath(); + + // Parse the topology descriptor + Document topologyXml = XmlUtils.readXml(topologyFile); + + // Validate the provider configuration + Document extProviderConf = XmlUtils.readXml(new ByteArrayInputStream(TEST_PROVIDER_CONFIG.getBytes())); + Node gatewayNode = (Node) xpath.compile("/topology/gateway").evaluate(topologyXml, XPathConstants.NODE); + assertTrue("Resulting provider config should be identical to the referenced content.", + extProviderConf.getDocumentElement().isEqualNode(gatewayNode)); + + // Validate the service declarations + Map<String, List<String>> topologyServiceURLs = new HashMap<>(); + NodeList serviceNodes = + (NodeList) xpath.compile("/topology/service").evaluate(topologyXml, XPathConstants.NODESET); + for (int serviceNodeIndex=0; serviceNodeIndex < serviceNodes.getLength(); serviceNodeIndex++) { + Node serviceNode = serviceNodes.item(serviceNodeIndex); + Node roleNode = (Node) xpath.compile("role/text()").evaluate(serviceNode, XPathConstants.NODE); + assertNotNull(roleNode); + String role = roleNode.getNodeValue(); + NodeList urlNodes = (NodeList) xpath.compile("url/text()").evaluate(serviceNode, XPathConstants.NODESET); + for(int urlNodeIndex = 0 ; urlNodeIndex < urlNodes.getLength(); urlNodeIndex++) { + Node urlNode = urlNodes.item(urlNodeIndex); + assertNotNull(urlNode); + String url = urlNode.getNodeValue(); + assertNotNull("Every declared service should have a URL.", url); + if (!topologyServiceURLs.containsKey(role)) { + topologyServiceURLs.put(role, new ArrayList<String>()); + } + topologyServiceURLs.get(role).add(url); + } + } + assertEquals("Unexpected number of service declarations.", serviceURLs.size(), topologyServiceURLs.size()); + + } catch (Exception e) { + e.printStackTrace(); + fail(e.getMessage()); + } finally { + providerConfig.delete(); + if (topologyFile != null) { + topologyFile.delete(); + } + } + } + + + private File writeProviderConfig(String path, String content) throws IOException { + File f = new File(path); + + Writer fw = new FileWriter(f); + fw.write(content); + fw.flush(); + fw.close(); + + return f; + } + +} http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/test/resources/META-INF/services/org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType ---------------------------------------------------------------------- diff --git a/gateway-server/src/test/resources/META-INF/services/org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType b/gateway-server/src/test/resources/META-INF/services/org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType new file mode 100644 index 0000000..82a6f86 --- /dev/null +++ b/gateway-server/src/test/resources/META-INF/services/org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType @@ -0,0 +1,21 @@ +########################################################################## +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +########################################################################## + +org.apache.hadoop.gateway.topology.discovery.test.extension.SneakyServiceDiscoveryType +org.apache.hadoop.gateway.topology.discovery.test.extension.PropertiesFileServiceDiscoveryType +org.apache.hadoop.gateway.topology.discovery.test.extension.DummyServiceDiscoveryType \ No newline at end of file http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/file/ambari-cluster-policy.xml ---------------------------------------------------------------------- diff --git a/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/file/ambari-cluster-policy.xml b/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/file/ambari-cluster-policy.xml new file mode 100644 index 0000000..8223bea --- /dev/null +++ b/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/file/ambari-cluster-policy.xml @@ -0,0 +1,74 @@ +<gateway> + <provider> + <role>authentication</role> + <name>ShiroProvider</name> + <enabled>true</enabled> + <param> + <!-- + session timeout in minutes, this is really idle timeout, + defaults to 30mins, if the property value is not defined,, + current client authentication would expire if client idles contiuosly for more than this value + --> + <name>sessionTimeout</name> + <value>30</value> + </param> + <param> + <name>main.ldapRealm</name> + <value>org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm</value> + </param> + <param> + <name>main.ldapContextFactory</name> + <value>org.apache.hadoop.gateway.shirorealm.KnoxLdapContextFactory</value> + </param> + <param> + <name>main.ldapRealm.contextFactory</name> + <value>$ldapContextFactory</value> + </param> + <param> + <name>main.ldapRealm.userDnTemplate</name> + <value>uid={0},ou=people,dc=hadoop,dc=apache,dc=org</value> + </param> + <param> + <name>main.ldapRealm.contextFactory.url</name> + <value>ldap://localhost:33389</value> + </param> + <param> + <name>main.ldapRealm.contextFactory.authenticationMechanism</name> + <value>simple</value> + </param> + <param> + <name>urls./**</name> + <value>authcBasic</value> + </param> + </provider> + + <provider> + <role>identity-assertion</role> + <name>Default</name> + <enabled>true</enabled> + </provider> + + <!-- + Defines rules for mapping host names internal to a Hadoop cluster to externally accessible host names. + For example, a hadoop service running in AWS may return a response that includes URLs containing the + some AWS internal host name. If the client needs to make a subsequent request to the host identified + in those URLs they need to be mapped to external host names that the client Knox can use to connect. + + If the external hostname and internal host names are same turn of this provider by setting the value of + enabled parameter as false. + + The name parameter specifies the external host names in a comma separated list. + The value parameter specifies corresponding internal host names in a comma separated list. + + Note that when you are using Sandbox, the external hostname needs to be localhost, as seen in out + of box sandbox.xml. This is because Sandbox uses port mapping to allow clients to connect to the + Hadoop services using localhost. In real clusters, external host names would almost never be localhost. + --> + <provider> + <role>hostmap</role> + <name>static</name> + <enabled>true</enabled> + <param><name>localhost</name><value>sandbox,sandbox.hortonworks.com</value></param> + </provider> + +</gateway> \ No newline at end of file http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/file/simple-topology-four.json ---------------------------------------------------------------------- diff --git a/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/file/simple-topology-four.json b/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/file/simple-topology-four.json new file mode 100644 index 0000000..45407a7 --- /dev/null +++ b/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/file/simple-topology-four.json @@ -0,0 +1,18 @@ +{ + "discovery-type":"DUMMY", + "discovery-address":"http://c6401.ambari.apache.org:8080", + "provider-config-ref":"../shared-providers/ambari-cluster-policy.xml", + "cluster":"dummy", + "services":[ + {"name":"NAMENODE"}, + {"name":"JOBTRACKER"}, + {"name":"WEBHDFS"}, + {"name":"WEBHCAT"}, + {"name":"OOZIE"}, + {"name":"WEBHBASE"}, + {"name":"HIVE"}, + {"name":"RESOURCEMANAGER"}, + {"name":"AMBARI", "urls":["http://c6401.ambari.apache.org:8080"]}, + {"name":"AMBARIUI", "urls":["http://c6401.ambari.apache.org:8080"]} + ] +} \ No newline at end of file http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/GatewayService.java ---------------------------------------------------------------------- diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/GatewayService.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/GatewayService.java new file mode 100644 index 0000000..554ddbe --- /dev/null +++ b/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/GatewayService.java @@ -0,0 +1,29 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ +package org.apache.hadoop.gateway.topology.discovery; + +import java.lang.annotation.*; + +/** + * This annotation can be used to inject gateway services into a ServiceDiscovery implementation. + */ +@Documented +@Target(ElementType.FIELD) +@Retention(RetentionPolicy.RUNTIME) +public @interface GatewayService { + +} http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscovery.java ---------------------------------------------------------------------- diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscovery.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscovery.java new file mode 100644 index 0000000..eefa30b --- /dev/null +++ b/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscovery.java @@ -0,0 +1,76 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ +package org.apache.hadoop.gateway.topology.discovery; + +import java.util.List; +import java.util.Map; + + +/** + * Implementations provide the means by which Hadoop service endpoint URLs are discovered from a source with knowledge + * about the service topology of one or more clusters. + */ +public interface ServiceDiscovery { + + /** + * This is the type specified in a simple descriptor to indicate which ServiceDiscovery implementation to employ. + * + * @return The identifier for the service discovery type. + */ + String getType(); + + + /** + * Discover details of all the clusters known to the target registry. + * + * @param config The configuration for the discovery invocation + * + * @return A Map of the discovered service data, keyed by the cluster name. + */ + Map<String, Cluster> discover(ServiceDiscoveryConfig config); + + + /** + * Discover details for a single cluster. + * + * @param config The configuration for the discovery invocation + * @param clusterName The name of a particular cluster + * + * @return The discovered service data for the specified cluster + */ + Cluster discover(ServiceDiscoveryConfig config, String clusterName); + + + /** + * A handle to the service discovery result. + */ + interface Cluster { + + /** + * @return The name of the cluster + */ + String getName(); + + /** + * @param serviceName The name of the service + * @return The URLs for the specified service in this cluster. + */ + List<String> getServiceURLs(String serviceName); + } + + +} http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscoveryConfig.java ---------------------------------------------------------------------- diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscoveryConfig.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscoveryConfig.java new file mode 100644 index 0000000..6b2e741 --- /dev/null +++ b/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscoveryConfig.java @@ -0,0 +1,42 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ +package org.apache.hadoop.gateway.topology.discovery; + +/** + * ServiceDiscovery configuration details. + */ +public interface ServiceDiscoveryConfig { + + /** + * + * @return The address of the discovery source. + */ + String getAddress(); + + /** + * + * @return The username configured for interactions with the discovery source. + */ + String getUser(); + + /** + * + * @return The alias for the password required for interactions with the discovery source. + */ + String getPasswordAlias(); + +} http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscoveryType.java ---------------------------------------------------------------------- diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscoveryType.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscoveryType.java new file mode 100644 index 0000000..cddced1 --- /dev/null +++ b/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscoveryType.java @@ -0,0 +1,40 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ +package org.apache.hadoop.gateway.topology.discovery; + +/** + * ServiceDiscovery extensions must implement this interface to add support for a new discovery source. + * + * The ServiceLoader mechanism is used to include ServiceDiscovery extensions, and implementations of this interface + * are the providers. + */ +public interface ServiceDiscoveryType { + + /** + * + * @return The identifier for the discovery type. + */ + String getType(); + + + /** + * + * @return A new instance of the ServiceDiscovery implementation provided by this type. + */ + ServiceDiscovery newInstance(); + +} http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/pom.xml ---------------------------------------------------------------------- diff --git a/pom.xml b/pom.xml index d2f4176..2708f6b 100644 --- a/pom.xml +++ b/pom.xml @@ -45,6 +45,7 @@ <module>gateway-i18n-logging-log4j</module> <module>gateway-i18n-logging-sl4j</module> <module>gateway-spi</module> + <module>gateway-discovery-ambari</module> <module>gateway-server</module> <module>gateway-server-launcher</module> <module>gateway-server-xforwarded-filter</module> @@ -684,6 +685,11 @@ </dependency> <dependency> <groupId>${gateway-group}</groupId> + <artifactId>gateway-discovery-ambari</artifactId> + <version>${gateway-version}</version> + </dependency> + <dependency> + <groupId>${gateway-group}</groupId> <artifactId>gateway-release</artifactId> <version>${gateway-version}</version> </dependency> @@ -702,17 +708,16 @@ <artifactId>gateway-shell-samples</artifactId> <version>${gateway-version}</version> </dependency> - - <dependency> - <groupId>org.picketlink</groupId> - <artifactId>picketlink-federation</artifactId> - <version>2.7.0.CR3</version> - </dependency> - <dependency> - <groupId>org.jboss.logging</groupId> - <artifactId>jboss-logging</artifactId> - <version>3.2.0.Final</version> - </dependency> + <dependency> + <groupId>org.picketlink</groupId> + <artifactId>picketlink-federation</artifactId> + <version>2.7.0.CR3</version> + </dependency> + <dependency> + <groupId>org.jboss.logging</groupId> + <artifactId>jboss-logging</artifactId> + <version>3.2.0.Final</version> + </dependency> <dependency> <groupId>org.glassfish.jersey.containers</groupId> <artifactId>jersey-container-servlet</artifactId>
