This is an automated email from the ASF dual-hosted git repository.

benyoka pushed a commit to branch trunk
in repository https://gitbox.apache.org/repos/asf/ambari.git


The following commit(s) were added to refs/heads/trunk by this push:
     new b84ad9f  AMBARI-24592 fix handling of stack defaults in blueprints 
(benyoka) (#2243)
b84ad9f is described below

commit b84ad9f03af89b7adabb22e14b429728246bf544
Author: benyoka <beny...@users.noreply.github.com>
AuthorDate: Tue Sep 4 20:28:00 2018 +0200

    AMBARI-24592 fix handling of stack defaults in blueprints (benyoka) (#2243)
---
 .../internal/BlueprintConfigurationProcessor.java  | 38 ++++++++--
 .../BlueprintConfigurationProcessorTest.java       | 86 +++++++++++++++++++++-
 2 files changed, 115 insertions(+), 9 deletions(-)

diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessor.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessor.java
index e109559..19c8e9f 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessor.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessor.java
@@ -40,6 +40,9 @@ import java.util.Set;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
+import javax.annotation.Nullable;
+import javax.validation.constraints.NotNull;
+
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.ConfigHelper;
@@ -3377,7 +3380,7 @@ public class BlueprintConfigurationProcessor {
    *          to this).
    * @throws ConfigurationTopologyException
    */
-  private void setStackToolsAndFeatures(Configuration configuration, 
Set<String> configTypesUpdated)
+  protected void setStackToolsAndFeatures(Configuration configuration, 
Set<String> configTypesUpdated)
     throws ConfigurationTopologyException {
     ConfigHelper configHelper = 
clusterTopology.getAmbariContext().getConfigHelper();
     Stack stack = clusterTopology.getBlueprint().getStack();
@@ -3386,8 +3389,10 @@ public class BlueprintConfigurationProcessor {
 
     StackId stackId = new StackId(stackName, stackVersion);
 
-    Set<String> properties = 
Sets.newHashSet(ConfigHelper.CLUSTER_ENV_STACK_NAME_PROPERTY,
-      ConfigHelper.CLUSTER_ENV_STACK_ROOT_PROPERTY, 
ConfigHelper.CLUSTER_ENV_STACK_TOOLS_PROPERTY,
+    Set<String> properties = Sets.newHashSet(
+      ConfigHelper.CLUSTER_ENV_STACK_NAME_PROPERTY,
+      ConfigHelper.CLUSTER_ENV_STACK_ROOT_PROPERTY,
+      ConfigHelper.CLUSTER_ENV_STACK_TOOLS_PROPERTY,
       ConfigHelper.CLUSTER_ENV_STACK_FEATURES_PROPERTY,
       ConfigHelper.CLUSTER_ENV_STACK_PACKAGES_PROPERTY);
 
@@ -3397,11 +3402,14 @@ public class BlueprintConfigurationProcessor {
 
       for( String property : properties ){
         if (clusterEnvDefaultProperties.containsKey(property)) {
-          configuration.setProperty(CLUSTER_ENV_CONFIG_TYPE_NAME, property,
-            clusterEnvDefaultProperties.get(property));
-
-          // make sure to include the configuration type as being updated
-          configTypesUpdated.add(CLUSTER_ENV_CONFIG_TYPE_NAME);
+          String newValue = clusterEnvDefaultProperties.get(property);
+          String previous = 
configuration.setProperty(CLUSTER_ENV_CONFIG_TYPE_NAME, property, newValue);
+          if (!Objects.equals(
+            trimValue(previous, stack, CLUSTER_ENV_CONFIG_TYPE_NAME, property),
+            trimValue(newValue, stack, CLUSTER_ENV_CONFIG_TYPE_NAME, 
property))) {
+            // in case a property is updated make sure to include cluster-env 
as being updated
+            configTypesUpdated.add(CLUSTER_ENV_CONFIG_TYPE_NAME);
+          }
         }
       }
     } catch( AmbariException ambariException ){
@@ -3410,6 +3418,20 @@ public class BlueprintConfigurationProcessor {
     }
   }
 
+  private @Nullable String trimValue(@Nullable String value,
+                                     @NotNull Stack stack,
+                                     @NotNull String configType,
+                                     @NotNull String propertyName) {
+    if (null == value) {
+      return null;
+    }
+    else {
+      TrimmingStrategy trimmingStrategy =
+        PropertyValueTrimmingStrategyDefiner.defineTrimmingStrategy(stack, 
propertyName, configType);
+      return trimmingStrategy.trim(value);
+    }
+  }
+
   /**
    * Ensure that the specified property exists.
    * If not, set a default value.
diff --git 
a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessorTest.java
 
b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessorTest.java
index c37e659..996c09d 100644
--- 
a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessorTest.java
+++ 
b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessorTest.java
@@ -23,6 +23,11 @@ import static java.util.Collections.emptyList;
 import static java.util.Collections.emptyMap;
 import static java.util.stream.Collectors.toList;
 import static java.util.stream.Collectors.toSet;
+import static 
org.apache.ambari.server.state.ConfigHelper.CLUSTER_ENV_STACK_FEATURES_PROPERTY;
+import static 
org.apache.ambari.server.state.ConfigHelper.CLUSTER_ENV_STACK_NAME_PROPERTY;
+import static 
org.apache.ambari.server.state.ConfigHelper.CLUSTER_ENV_STACK_PACKAGES_PROPERTY;
+import static 
org.apache.ambari.server.state.ConfigHelper.CLUSTER_ENV_STACK_ROOT_PROPERTY;
+import static 
org.apache.ambari.server.state.ConfigHelper.CLUSTER_ENV_STACK_TOOLS_PROPERTY;
 import static org.easymock.EasyMock.anyObject;
 import static org.easymock.EasyMock.expect;
 import static org.easymock.EasyMock.replay;
@@ -106,12 +111,15 @@ public class BlueprintConfigurationProcessorTest extends 
EasyMockSupport {
 
   private static final Configuration EMPTY_CONFIG = new 
Configuration(emptyMap(), emptyMap());
   private final Map<String, Collection<String>> serviceComponents = new 
HashMap<>();
-  private final Map<String, Map<String, String>> stackProperties = new 
HashMap<>();
   private final Map<String, String> defaultClusterEnvProperties = new 
HashMap<>();
 
   private final String STACK_NAME = "testStack";
   private final String STACK_VERSION = "1";
 
+  private final String CLUSTER_ENV_PROP = "cluster-env";
+
+  private final Map<String, Map<String, String>> stackProperties = new 
HashMap<>();
+
   @Rule
   public EasyMockRule mocks = new EasyMockRule(this);
 
@@ -6520,6 +6528,82 @@ public class BlueprintConfigurationProcessorTest extends 
EasyMockSupport {
       expectedHostNameTwo, clusterEnv.get("dfs_ha_initial_namenode_standby"));
   }
 
+  private Map<String, String> defaultStackProps() {
+    return Maps.newHashMap(ImmutableMap.of(
+      CLUSTER_ENV_STACK_NAME_PROPERTY, STACK_NAME,
+      CLUSTER_ENV_STACK_ROOT_PROPERTY, "/usr/" + STACK_NAME,
+      CLUSTER_ENV_STACK_TOOLS_PROPERTY, "{ some tools... }",
+      CLUSTER_ENV_STACK_FEATURES_PROPERTY, "{ some features... }",
+      CLUSTER_ENV_STACK_PACKAGES_PROPERTY, "{ some packages... }"
+    ));
+  }
+
+  @Test
+  public void testSetStackToolsAndFeatures_ClusterEnvDidNotChange() throws 
Exception {
+    defaultClusterEnvProperties.putAll(defaultStackProps());
+    Map<String, Map<String, String>> blueprintProps = 
Maps.newHashMap(ImmutableMap.of(
+      "cluster-env", defaultStackProps()
+    ));
+    Configuration clusterConfig = new Configuration(blueprintProps, 
emptyMap());
+
+    TestHostGroup group = new TestHostGroup("groups1", 
Sets.newHashSet("NAMENODE"), ImmutableSet.of("host1"));
+    ClusterTopology topology = createClusterTopology(bp, clusterConfig, 
ImmutableSet.of(group));
+
+    BlueprintConfigurationProcessor updater = new 
BlueprintConfigurationProcessor(topology);
+
+    Set<String> configTypesUpdated = Sets.newHashSet();
+    updater.setStackToolsAndFeatures(clusterConfig, configTypesUpdated);
+    assertEquals("cluster-env should NOT have been updated", 
ImmutableSet.of(), configTypesUpdated);
+  }
+
+
+  @Test
+  public void testSetStackToolsAndFeatures_ClusterEnvChanged() throws 
Exception {
+    defaultClusterEnvProperties.putAll(defaultStackProps());
+    Map<String, String> blueprintClusterEnv = defaultStackProps();
+    // change something to trigger cluter-env added to the changed configs
+    blueprintClusterEnv.put(CLUSTER_ENV_STACK_ROOT_PROPERTY, "/opt/" + 
STACK_NAME);
+
+    Map<String, Map<String, String>> blueprintProps = 
Maps.newHashMap(ImmutableMap.of(
+      "cluster-env", blueprintClusterEnv
+    ));
+    Configuration clusterConfig = new Configuration(blueprintProps, 
emptyMap());
+
+    TestHostGroup group = new TestHostGroup("groups1", 
Sets.newHashSet("NAMENODE"), ImmutableSet.of("host1"));
+    ClusterTopology topology = createClusterTopology(bp, clusterConfig, 
ImmutableSet.of(group));
+
+    BlueprintConfigurationProcessor updater = new 
BlueprintConfigurationProcessor(topology);
+
+    Set<String> configTypesUpdated = Sets.newHashSet();
+    updater.setStackToolsAndFeatures(clusterConfig, configTypesUpdated);
+    assertEquals("cluster-env should have been updated", 
ImmutableSet.of("cluster-env"), configTypesUpdated);
+  }
+
+  @Test
+  public void 
testSetStackToolsAndFeatures_ClusterEnvChanged_TrimmedValuesEqual() throws 
Exception {
+    defaultClusterEnvProperties.putAll(defaultStackProps());
+    Map<String, String> blueprintClusterEnv = defaultStackProps();
+    // This change should not be considered as an update to cluster-env as 
trimmed values are still equal
+    blueprintClusterEnv.put(
+      CLUSTER_ENV_STACK_ROOT_PROPERTY,
+      blueprintClusterEnv.get(CLUSTER_ENV_STACK_ROOT_PROPERTY) + "       \n");
+
+    Map<String, Map<String, String>> blueprintProps = 
Maps.newHashMap(ImmutableMap.of(
+      "cluster-env", blueprintClusterEnv
+    ));
+    Configuration clusterConfig = new Configuration(blueprintProps, 
emptyMap());
+
+    TestHostGroup group = new TestHostGroup("groups1", 
Sets.newHashSet("NAMENODE"), ImmutableSet.of("host1"));
+    ClusterTopology topology = createClusterTopology(bp, clusterConfig, 
ImmutableSet.of(group));
+
+    BlueprintConfigurationProcessor updater = new 
BlueprintConfigurationProcessor(topology);
+
+    Set<String> configTypesUpdated = Sets.newHashSet();
+    updater.setStackToolsAndFeatures(clusterConfig, configTypesUpdated);
+    assertEquals("cluster-env should NOT have been updated", 
ImmutableSet.of(), configTypesUpdated);
+  }
+
+
   @Test
   public void testParseNameServices() throws Exception {
     Map<String, String> hdfsSiteConfigMap =

Reply via email to