Repository: knox Updated Branches: refs/heads/master e0fea744d -> 2420226cb
KNOX-1269 - Generate services declared in descriptor without URLs or params, but with valid HaProvider ZK config Project: http://git-wip-us.apache.org/repos/asf/knox/repo Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/2420226c Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/2420226c Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/2420226c Branch: refs/heads/master Commit: 2420226cbde79a8b7649e02122baadbbe4acbf06 Parents: e0fea74 Author: Phil Zampino <[email protected]> Authored: Mon Apr 23 14:41:28 2018 -0400 Committer: Phil Zampino <[email protected]> Committed: Mon Apr 23 14:41:28 2018 -0400 ---------------------------------------------------------------------- .../simple/SimpleDescriptorHandler.java | 25 +++++++++++++++- .../simple/SimpleDescriptorHandlerTest.java | 30 ++++++++++++++------ 2 files changed, 45 insertions(+), 10 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/knox/blob/2420226c/gateway-server/src/main/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorHandler.java ---------------------------------------------------------------------- diff --git a/gateway-server/src/main/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorHandler.java b/gateway-server/src/main/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorHandler.java index e83f961..28a8094 100644 --- a/gateway-server/src/main/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorHandler.java +++ b/gateway-server/src/main/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorHandler.java @@ -104,6 +104,7 @@ public class SimpleDescriptorHandler { public static Map<String, File> handle(GatewayConfig config, SimpleDescriptor desc, File srcDirectory, File destDirectory, Service...gatewayServices) { + List<String> declaredServiceNames = new ArrayList<>(); List<String> validServiceNames = new ArrayList<>(); Map<String, String> serviceVersions = new HashMap<>(); Map<String, Map<String, String>> serviceParams = new HashMap<>(); @@ -120,6 +121,7 @@ public class SimpleDescriptorHandler { for (SimpleDescriptor.Service descService : desc.getServices()) { String serviceName = descService.getName(); + declaredServiceNames.add(serviceName); String serviceVer = descService.getVersion(); if (serviceVer != null) { @@ -173,7 +175,15 @@ public class SimpleDescriptorHandler { } // Generate the topology file - return generateTopology(desc, srcDirectory, destDirectory, cluster, validServiceNames, serviceVersions, serviceURLs, serviceParams); + return generateTopology(desc, + srcDirectory, + destDirectory, + cluster, + declaredServiceNames, + validServiceNames, + serviceVersions, + serviceURLs, + serviceParams); } @@ -338,6 +348,7 @@ public class SimpleDescriptorHandler { final File srcDirectory, final File destDirectory, final ServiceDiscovery.Cluster cluster, + final List<String> declaredServiceNames, final List<String> validServiceNames, final Map<String, String> serviceVersions, final Map<String, List<String>> serviceURLs, @@ -425,6 +436,18 @@ public class SimpleDescriptorHandler { // Services // Sort the service names to write the services alphabetically List<String> serviceNames = new ArrayList<>(validServiceNames); + + // Add any declared services, which were not validated, but which have ZK-based HA provider config + for (String haServiceName : haServiceParams.keySet()) { + // If the service configured for HA was declared in the descriptor, then add it to the services to be + // serialized (if it's not already included) + if (declaredServiceNames.contains(haServiceName)) { + if (!serviceNames.contains(haServiceName)) { + serviceNames.add(haServiceName); + } + } + } + Collections.sort(serviceNames); // Write the service declarations http://git-wip-us.apache.org/repos/asf/knox/blob/2420226c/gateway-server/src/test/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorHandlerTest.java ---------------------------------------------------------------------- diff --git a/gateway-server/src/test/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorHandlerTest.java b/gateway-server/src/test/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorHandlerTest.java index aee2908..0063a4e 100644 --- a/gateway-server/src/test/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorHandlerTest.java +++ b/gateway-server/src/test/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorHandlerTest.java @@ -58,6 +58,7 @@ import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; +import static org.testng.Assert.assertNull; public class SimpleDescriptorHandlerTest { @@ -123,6 +124,7 @@ public class SimpleDescriptorHandlerTest { " <param><name>HIVE</name><value>enabled=auto;retrySleep=20;maxRetryAttempts=3</value></param>\n" + " <param><name>WEBHDFS</name><value>enabled=true;retrySleep=40;maxRetryAttempts=5</value></param>\n" + " <param><name>WEBHBASE</name><value>enabled=auto;retrySleep=30;maxRetryAttempts=3;maxFailoverAttempts=2;failoverSleep=10</value></param>\n" + + " <param><name>ATLAS</name><value>enabled=auto;retrySleep=30;maxRetryAttempts=3;maxFailoverAttempts=2;failoverSleep=10</value></param>\n" + " </provider>\n" + " </gateway>\n"; @@ -535,6 +537,9 @@ public class SimpleDescriptorHandlerTest { final String WEBHBASE_HA_ENABLED = "false"; final String WEBHBASE_HA_ENSEMBLE = "http://zkhost1:1281,http://zkhost2:1281"; + final String ATLAS_HA_ENABLED = "true"; + final String ATLAS_HA_ENSEMBLE = "http://zkhost5:1281,http://zkhost6:1281,http://zkhost7:1281"; + final Properties DISCOVERY_PROPERTIES = new Properties(); DISCOVERY_PROPERTIES.setProperty(clusterName + ".name", clusterName); DISCOVERY_PROPERTIES.setProperty(clusterName + ".NAMENODE.url", "hdfs://namenodehost:8020"); @@ -547,6 +552,8 @@ public class SimpleDescriptorHandlerTest { DISCOVERY_PROPERTIES.setProperty(clusterName + ".HIVE.namespace", HIVE_HA_NAMESPACE); DISCOVERY_PROPERTIES.setProperty(clusterName + ".WEBHBASE.haEnabled", WEBHBASE_HA_ENABLED); DISCOVERY_PROPERTIES.setProperty(clusterName + ".WEBHBASE.ensemble", WEBHBASE_HA_ENSEMBLE); + DISCOVERY_PROPERTIES.setProperty(clusterName + ".ATLAS.haEnabled", ATLAS_HA_ENABLED); + DISCOVERY_PROPERTIES.setProperty(clusterName + ".ATLAS.ensemble", ATLAS_HA_ENSEMBLE); try { DISCOVERY_PROPERTIES.store(new FileOutputStream(discoveryConfig), null); @@ -558,6 +565,8 @@ public class SimpleDescriptorHandlerTest { serviceURLs.put("NAMENODE", null); serviceURLs.put("WEBHDFS", null); serviceURLs.put("RESOURCEMANAGER", null); + serviceURLs.put("ATLAS", null); + serviceURLs.put("HIVE", null); // Write the externalized provider config to a temp file File providerConfig = new File(System.getProperty("java.io.tmpdir"), "ambari-cluster-policy.xml"); @@ -626,6 +635,7 @@ public class SimpleDescriptorHandlerTest { validateGeneratedProviderConfiguration(testProviderConfiguration, gatewayNode); // Validate the service declarations + List<String> generatedServiceDeclarations = new ArrayList<>(); Map<String, List<String>> topologyServiceURLs = new HashMap<>(); NodeList serviceNodes = (NodeList) xpath.compile("/topology/service").evaluate(topologyXml, XPathConstants.NODESET); @@ -636,6 +646,7 @@ public class SimpleDescriptorHandlerTest { Node roleNode = (Node) xpath.compile("role/text()").evaluate(serviceNode, XPathConstants.NODE); assertNotNull(roleNode); String role = roleNode.getNodeValue(); + generatedServiceDeclarations.add(role); // Validate the explicit version for the WEBHDFS service if ("WEBHDFS".equals(role)) { @@ -655,7 +666,7 @@ public class SimpleDescriptorHandlerTest { assertEquals(2, urlNodes.getLength()); } - // Validate the HIV service params + // Validate the HIVE service params if ("HIVE".equals(role)) { // Expecting HA-related service params NodeList paramNodes = (NodeList) xpath.compile("param").evaluate(serviceNode, XPathConstants.NODESET); @@ -675,23 +686,24 @@ public class SimpleDescriptorHandlerTest { assertEquals(HIVE_HA_NAMESPACE, hiveServiceParams.get("zookeeperNamespace")); } - // Validate the HIV service params - if ("WEBHBASE".equals(role)) { + // Validate the ATLAS service params + if ("ATLAS".equals(role)) { // Expecting HA-related service params NodeList paramNodes = (NodeList) xpath.compile("param").evaluate(serviceNode, XPathConstants.NODESET); assertNotNull(paramNodes); - Map<String, String> webhbaseServiceParams = new HashMap<>(); + Map<String, String> atlasServiceParams = new HashMap<>(); for (int paramNodeIndex=0; paramNodeIndex < paramNodes.getLength(); paramNodeIndex++) { Node paramNode = paramNodes.item(paramNodeIndex); Node nameNode = (Node) xpath.compile("name/text()").evaluate(paramNode, XPathConstants.NODE); assertNotNull(nameNode); Node valueNode = (Node) xpath.compile("value/text()").evaluate(paramNode, XPathConstants.NODE); assertNotNull(valueNode); - webhbaseServiceParams.put(nameNode.getNodeValue(), valueNode.getNodeValue()); + atlasServiceParams.put(nameNode.getNodeValue(), valueNode.getNodeValue()); } - assertEquals("Expected false because enabled=auto and service config indicates HA is NOT enabled", - WEBHBASE_HA_ENABLED, webhbaseServiceParams.get("haEnabled")); - assertEquals(WEBHBASE_HA_ENSEMBLE, webhbaseServiceParams.get("zookeeperEnsemble")); + assertEquals("Expected true because enabled=auto and service config indicates HA is enabled", + ATLAS_HA_ENABLED, atlasServiceParams.get("haEnabled")); + assertEquals(ATLAS_HA_ENSEMBLE, atlasServiceParams.get("zookeeperEnsemble")); + assertNull(atlasServiceParams.get("zookeeperNamespace")); } // Validate the URLs @@ -711,7 +723,7 @@ public class SimpleDescriptorHandlerTest { } } } - assertEquals("Unexpected number of service declarations.", serviceURLs.size(), topologyServiceURLs.size()); + assertEquals("Unexpected number of service declarations.", serviceURLs.size(), generatedServiceDeclarations.size()); } catch (Exception e) { e.printStackTrace();
