Repository: ambari
Updated Branches:
  refs/heads/trunk 904cb434f -> fad971d39


AMBARI-9702. Zookeeper start failed after upgrading secured cluster (rlevas)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/fad971d3
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/fad971d3
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/fad971d3

Branch: refs/heads/trunk
Commit: fad971d392ff8b1e856e42a7fa16989ce2d21029
Parents: 904cb43
Author: Robert Levas <[email protected]>
Authored: Sat Feb 21 12:13:15 2015 -0500
Committer: Robert Levas <[email protected]>
Committed: Sat Feb 21 12:13:15 2015 -0500

----------------------------------------------------------------------
 .../server/upgrade/AbstractUpgradeCatalog.java  |   9 +-
 .../server/upgrade/UpgradeCatalog200.java       |  88 ++++++++++++++-
 .../server/upgrade/UpgradeCatalog200Test.java   | 108 +++++++++++++++++++
 3 files changed, 203 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/fad971d3/ambari-server/src/main/java/org/apache/ambari/server/upgrade/AbstractUpgradeCatalog.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/AbstractUpgradeCatalog.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/AbstractUpgradeCatalog.java
index 1479840..1f3c09a 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/AbstractUpgradeCatalog.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/AbstractUpgradeCatalog.java
@@ -61,6 +61,13 @@ public abstract class AbstractUpgradeCatalog implements 
UpgradeCatalog {
   protected StackUpgradeUtil stackUpgradeUtil;
 
   protected Injector injector;
+
+  /**
+   * The user name to use as the authenticated user when perform authenticated 
tasks or operations
+   * that require the name of the authenticated user
+   */
+  protected static final String AUTHENTICATED_USER_NAME = "ambari-upgrade";
+
   private static final Logger LOG = LoggerFactory.getLogger
     (AbstractUpgradeCatalog.class);
   private static final Map<String, UpgradeCatalog> upgradeCatalogMap =
@@ -297,7 +304,7 @@ public abstract class AbstractUpgradeCatalog implements 
UpgradeCatalog {
 
           Config baseConfig = cluster.getConfig(cr.getType(), 
cr.getVersionTag());
           if (baseConfig != null) {
-            String authName = "ambari-upgrade";
+            String authName = AUTHENTICATED_USER_NAME;
 
             if (cluster.addDesiredConfig(authName, 
Collections.singleton(baseConfig)) != null) {
               String oldConfigString = (oldConfig != null) ? " from='" + 
oldConfig.getTag() + "'" : "";

http://git-wip-us.apache.org/repos/asf/ambari/blob/fad971d3/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog200.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog200.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog200.java
index 69e35df..8d8f6c4 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog200.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog200.java
@@ -25,7 +25,7 @@ import java.util.Collections;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
-import java.util.Collections;
+import java.util.Set;
 
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
@@ -49,6 +49,8 @@ import 
org.apache.ambari.server.orm.entities.ServiceDesiredStateEntity;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.Config;
+import org.apache.ambari.server.state.ConfigHelper;
+import org.apache.ambari.server.state.PropertyInfo;
 import org.apache.ambari.server.state.SecurityState;
 import org.apache.ambari.server.state.SecurityType;
 import org.apache.ambari.server.state.UpgradeState;
@@ -314,6 +316,7 @@ public class UpgradeCatalog200 extends 
AbstractUpgradeCatalog {
     updateTezConfiguration();
     addMissingConfigs();
     persistHDPRepo();
+    updateClusterEnvConfiguration();
   }
 
   protected void persistHDPRepo() throws AmbariException{
@@ -334,6 +337,7 @@ public class UpgradeCatalog200 extends 
AbstractUpgradeCatalog {
                 stackRepoId, baseUrl);
       }
     }
+   
   }
 
   protected void updateTezConfiguration() throws AmbariException {
@@ -491,4 +495,86 @@ public class UpgradeCatalog200 extends 
AbstractUpgradeCatalog {
   protected void addMissingConfigs() throws AmbariException {
     updateConfigurationProperties("hive-site", 
Collections.singletonMap("hive.server2.transport.mode", "binary"), false, 
false);
   }
+
+  /**
+   * Update the cluster-env configuration (in all clusters) to add missing 
properties and remove
+   * obsolete properties.
+   *
+   * @throws org.apache.ambari.server.AmbariException
+   */
+  protected void updateClusterEnvConfiguration() throws AmbariException {
+    AmbariManagementController ambariManagementController = 
injector.getInstance(AmbariManagementController.class);
+    ConfigHelper configHelper = injector.getInstance(ConfigHelper.class);
+
+    Clusters clusters = ambariManagementController.getClusters();
+
+    if (clusters != null) {
+      Map<String, Cluster> clusterMap = clusters.getClusters();
+
+      if (clusterMap != null) {
+        for (final Cluster cluster : clusterMap.values()) {
+          Config configClusterEnv = 
cluster.getDesiredConfigByType("cluster-env");
+
+          if (configClusterEnv != null) {
+            Map<String, String> properties = configClusterEnv.getProperties();
+
+            if (properties != null) {
+              // -----------------------------------------
+              // Add missing properties
+
+              if (!properties.containsKey("smokeuser_principal_name")) {
+                // Add smokeuser_principal_name, from cluster-env/smokeuser
+                // Ideally a realm should be added, but for now we can assume 
the default realm and
+                // leave it off
+                String smokeUser = properties.get("smokeuser");
+
+                if ((smokeUser == null) || smokeUser.isEmpty()) {
+                  // If the smokeuser property is not set in the current 
configuration set, grab
+                  // it from the stack defaults:
+                  Set<PropertyInfo> stackProperties = 
configHelper.getStackProperties(cluster);
+
+                  if (stackProperties != null) {
+                    for (PropertyInfo propertyInfo : stackProperties) {
+                      String filename = propertyInfo.getFilename();
+
+                      if ((filename != null) && 
"cluster-env".equals(ConfigHelper.fileNameToConfigType(filename))) {
+                        smokeUser = propertyInfo.getValue();
+                        break;
+                      }
+                    }
+                  }
+
+                  // If a default value for smokeuser was not found, force it 
to be "ambari-qa"
+                  if ((smokeUser == null) || smokeUser.isEmpty()) {
+                    smokeUser = "ambari-qa";
+                  }
+                }
+
+                properties.put("smokeuser_principal_name", smokeUser);
+              }
+
+              // Add missing properties (end)
+              // -----------------------------------------
+
+              // -----------------------------------------
+              // Remove obsolete properties
+
+              // Remove obsolete properties (end)
+              // -----------------------------------------
+
+              // -----------------------------------------
+              // Set the updated configuration
+
+              configHelper.createConfigType(cluster, 
ambariManagementController, "cluster-env", properties,
+                  AUTHENTICATED_USER_NAME, "Upgrading to Ambari 2.0");
+
+              // Set configuration (end)
+              // -----------------------------------------
+
+            }
+          }
+        }
+      }
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/fad971d3/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog200Test.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog200Test.java
 
b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog200Test.java
index 5e709fd..03df4d7 100644
--- 
a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog200Test.java
+++ 
b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog200Test.java
@@ -40,12 +40,15 @@ import java.lang.reflect.Method;
 import java.sql.ResultSet;
 import java.sql.SQLException;
 import java.util.Collections;
+import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
 import javax.persistence.EntityManager;
 
+import com.google.inject.AbstractModule;
 import org.apache.ambari.server.configuration.Configuration;
+import org.apache.ambari.server.controller.AmbariManagementController;
 import org.apache.ambari.server.orm.DBAccessor;
 import org.apache.ambari.server.orm.DBAccessor.DBColumnInfo;
 import org.apache.ambari.server.orm.GuiceJpaInitializer;
@@ -63,9 +66,15 @@ import 
org.apache.ambari.server.orm.entities.HostComponentStateEntityPK;
 import org.apache.ambari.server.orm.entities.HostEntity;
 import 
org.apache.ambari.server.orm.entities.ServiceComponentDesiredStateEntity;
 import 
org.apache.ambari.server.orm.entities.ServiceComponentDesiredStateEntityPK;
+import org.apache.ambari.server.state.Cluster;
+import org.apache.ambari.server.state.Clusters;
+import org.apache.ambari.server.state.Config;
+import org.apache.ambari.server.state.ConfigHelper;
+import org.apache.ambari.server.state.PropertyInfo;
 import org.apache.ambari.server.state.SecurityState;
 import org.apache.ambari.server.state.SecurityType;
 import org.easymock.Capture;
+import org.easymock.EasyMockSupport;
 import org.junit.After;
 import org.junit.Assert;
 import org.junit.Before;
@@ -329,6 +338,7 @@ public class UpgradeCatalog200Test {
         ("addNewConfigurationsFromXml");
     Method setSecurityType = 
UpgradeCatalog200.class.getDeclaredMethod("setSecurityType");
     Method updateTezConfiguration = 
UpgradeCatalog200.class.getDeclaredMethod("updateTezConfiguration");
+    Method updateClusterEnvConfiguration = 
UpgradeCatalog200.class.getDeclaredMethod("updateClusterEnvConfiguration");
     Method updateConfigurationProperties = 
AbstractUpgradeCatalog.class.getDeclaredMethod
             ("updateConfigurationProperties", String.class, Map.class, 
boolean.class, boolean.class);
     Method persistHDPRepo = 
UpgradeCatalog200.class.getDeclaredMethod("persistHDPRepo");
@@ -340,6 +350,7 @@ public class UpgradeCatalog200Test {
         .addMockedMethod(setSecurityType)
         .addMockedMethod(updateTezConfiguration)
         .addMockedMethod(updateConfigurationProperties)
+        .addMockedMethod(updateClusterEnvConfiguration)
         .addMockedMethod(persistHDPRepo)
         .createMock();
 
@@ -363,6 +374,9 @@ public class UpgradeCatalog200Test {
     upgradeCatalog.persistHDPRepo();
     expectLastCall().once();
 
+    upgradeCatalog.updateClusterEnvConfiguration();
+    expectLastCall();
+
     replay(upgradeCatalog);
 
     upgradeCatalog.executeDMLUpdates();
@@ -370,6 +384,100 @@ public class UpgradeCatalog200Test {
     verify(upgradeCatalog);
   }
 
+  @Test
+  public void testUpdateClusterEnvConfiguration() throws Exception {
+    EasyMockSupport easyMockSupport = new EasyMockSupport();
+    final AmbariManagementController  mockAmbariManagementController = 
easyMockSupport.createStrictMock(AmbariManagementController.class);
+    final ConfigHelper mockConfigHelper = 
easyMockSupport.createMock(ConfigHelper.class);
+
+    final Clusters mockClusters = 
easyMockSupport.createStrictMock(Clusters.class);
+    final Cluster mockClusterExpected = 
easyMockSupport.createStrictMock(Cluster.class);
+    final Cluster mockClusterMissingSmokeUser = 
easyMockSupport.createStrictMock(Cluster.class);
+    final Cluster mockClusterMissingConfig = 
easyMockSupport.createStrictMock(Cluster.class);
+
+    final Config mockClusterEnvExpected = 
easyMockSupport.createStrictMock(Config.class);
+    final Config mockClusterEnvMissingSmokeUser = 
easyMockSupport.createStrictMock(Config.class);
+
+    final Map<String, String> propertiesExpectedT0 = new HashMap<String, 
String>();
+    propertiesExpectedT0.put("kerberos_domain", "EXAMPLE.COM");
+    propertiesExpectedT0.put("user_group", "hadoop");
+    propertiesExpectedT0.put("kinit_path_local", "/usr/bin");
+    propertiesExpectedT0.put("security_enabled", "true");
+    propertiesExpectedT0.put("hive_tar_destination_folder", 
"hdfs,///hdp/apps/{{ hdp_stack_version }}/hive/");
+    propertiesExpectedT0.put("sqoop_tar_source", 
"/usr/hdp/current/sqoop-client/sqoop.tar.gz");
+    propertiesExpectedT0.put("hadoop-streaming_tar_destination_folder", 
"hdfs,///hdp/apps/{{ hdp_stack_version }}/mapreduce/");
+    propertiesExpectedT0.put("pig_tar_source", 
"/usr/hdp/current/pig-client/pig.tar.gz");
+    propertiesExpectedT0.put("mapreduce_tar_destination_folder", 
"hdfs,///hdp/apps/{{ hdp_stack_version }}/mapreduce/");
+    propertiesExpectedT0.put("hive_tar_source", 
"/usr/hdp/current/hive-client/hive.tar.gz");
+    propertiesExpectedT0.put("mapreduce_tar_source", 
"/usr/hdp/current/hadoop-client/mapreduce.tar.gz");
+    propertiesExpectedT0.put("smokeuser", "ambari-qa");
+    propertiesExpectedT0.put("pig_tar_destination_folder", 
"hdfs,///hdp/apps/{{ hdp_stack_version }}/pig/");
+    propertiesExpectedT0.put("hadoop-streaming_tar_source", 
"/usr/hdp/current/hadoop-mapreduce-client/hadoop-streaming.jar");
+    propertiesExpectedT0.put("tez_tar_destination_folder", 
"hdfs,///hdp/apps/{{ hdp_stack_version }}/tez/");
+    propertiesExpectedT0.put("smokeuser_keytab", 
"/etc/security/keytabs/smokeuser.headless.keytab");
+    propertiesExpectedT0.put("sqoop_tar_destination_folder", 
"hdfs,///hdp/apps/{{ hdp_stack_version }}/sqoop/");
+    propertiesExpectedT0.put("tez_tar_source", 
"/usr/hdp/current/tez-client/lib/tez.tar.gz");
+    propertiesExpectedT0.put("ignore_groupsusers_create", "false");
+
+    final Map<String, String> propertiesExpectedT1 = new HashMap<String, 
String>(propertiesExpectedT0);
+    propertiesExpectedT1.put("smokeuser_principal_name", "ambari-qa");
+
+    final Map<String, String> propertiesMissingSmokeUserT0 = new 
HashMap<String, String>(propertiesExpectedT0);
+    propertiesMissingSmokeUserT0.remove("smokeuser");
+
+    final Map<String, String> propertiesMissingSmokeUserT1 = new 
HashMap<String, String>(propertiesMissingSmokeUserT0);
+    propertiesMissingSmokeUserT1.put("smokeuser_principal_name", "ambari-qa");
+
+    final PropertyInfo mockSmokeUserPropertyInfo = 
easyMockSupport.createStrictMock(PropertyInfo.class);
+
+    final Injector mockInjector = Guice.createInjector(new AbstractModule() {
+      @Override
+      protected void configure() {
+        
bind(AmbariManagementController.class).toInstance(mockAmbariManagementController);
+        bind(ConfigHelper.class).toInstance(mockConfigHelper);
+        bind(Clusters.class).toInstance(mockClusters);
+
+        bind(DBAccessor.class).toInstance(createNiceMock(DBAccessor.class));
+      }
+    });
+
+    
expect(mockAmbariManagementController.getClusters()).andReturn(mockClusters).once();
+    expect(mockClusters.getClusters()).andReturn(new HashMap<String, 
Cluster>() {{
+      put("normal", mockClusterExpected);
+      put("missing_smokeuser", mockClusterMissingSmokeUser);
+      put("missing_cluster-env", mockClusterMissingConfig);
+
+    }}).once();
+
+      // Expected operation
+    
expect(mockClusterExpected.getDesiredConfigByType("cluster-env")).andReturn(mockClusterEnvExpected).once();
+    
expect(mockClusterEnvExpected.getProperties()).andReturn(propertiesExpectedT0).once();
+
+    mockConfigHelper.createConfigType(mockClusterExpected, 
mockAmbariManagementController,
+        "cluster-env", propertiesExpectedT1, 
UpgradeCatalog200.AUTHENTICATED_USER_NAME, "Upgrading to Ambari 2.0");
+    expectLastCall().once();
+
+    // Missing smokeuser
+    
expect(mockClusterMissingSmokeUser.getDesiredConfigByType("cluster-env")).andReturn(mockClusterEnvMissingSmokeUser).once();
+    
expect(mockClusterEnvMissingSmokeUser.getProperties()).andReturn(propertiesMissingSmokeUserT0).once();
+
+    
expect(mockConfigHelper.getStackProperties(mockClusterMissingSmokeUser)).andReturn(Collections.singleton(mockSmokeUserPropertyInfo)).once();
+
+    
expect(mockSmokeUserPropertyInfo.getFilename()).andReturn("cluster-env.xml").once();
+    expect(mockSmokeUserPropertyInfo.getValue()).andReturn("ambari-qa").once();
+
+    mockConfigHelper.createConfigType(mockClusterMissingSmokeUser, 
mockAmbariManagementController,
+        "cluster-env", propertiesMissingSmokeUserT1, 
UpgradeCatalog200.AUTHENTICATED_USER_NAME, "Upgrading to Ambari 2.0");
+    expectLastCall().once();
+
+    // Missing cluster-env config
+    
expect(mockClusterMissingConfig.getDesiredConfigByType("cluster-env")).andReturn(null).once();
+
+    easyMockSupport.replayAll();
+    
mockInjector.getInstance(UpgradeCatalog200.class).updateClusterEnvConfiguration();
+    easyMockSupport.verifyAll();
+  }
+
   /**
    * Tests that Nagios is correctly removed.
    *

Reply via email to