Repository: ambari
Updated Branches:
  refs/heads/branch-2.5 50d391da0 -> 9388184eb
  refs/heads/trunk 384a7f036 -> fdbe0939e


AMBARI-19464. Stack Downgrading Potentially Corrupts Kerberos Descriptor 
(echekanskiy via dlysnichenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/98f03b5f
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/98f03b5f
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/98f03b5f

Branch: refs/heads/branch-2.5
Commit: 98f03b5f89f9e4a3467bc7f7cd53f29c4fb5c378
Parents: 50d391d
Author: Lisnichenko Dmitro <[email protected]>
Authored: Thu Jan 12 16:22:18 2017 +0200
Committer: Lisnichenko Dmitro <[email protected]>
Committed: Thu Jan 12 16:22:18 2017 +0200

----------------------------------------------------------------------
 .../server/orm/entities/ArtifactEntity.java     |   4 +-
 .../upgrades/UpgradeUserKerberosDescriptor.java | 130 +++++++++---
 .../UpgradeUserKerberosDescriptorTest.java      | 201 +++++++++++++++++++
 3 files changed, 302 insertions(+), 33 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/98f03b5f/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ArtifactEntity.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ArtifactEntity.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ArtifactEntity.java
index 8972e6d..eeda7d4 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ArtifactEntity.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ArtifactEntity.java
@@ -112,7 +112,7 @@ public class ArtifactEntity {
    *
    * @param foreignKeys  ordered map of foreign key property names to values
    */
-  public void setForeignKeys(TreeMap<String, String> foreignKeys) {
+  public void setForeignKeys(Map<String, String> foreignKeys) {
     this.foreignKeys = serializeForeignKeys(foreignKeys);
   }
 
@@ -134,7 +134,7 @@ public class ArtifactEntity {
    *
    * @return string representation of the foreign keys map
    */
-  public static String serializeForeignKeys(TreeMap<String, String> 
foreignKeys) {
+  public static String serializeForeignKeys(Map<String, String> foreignKeys) {
     return jsonSerializer.toJson(foreignKeys);
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/98f03b5f/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/UpgradeUserKerberosDescriptor.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/UpgradeUserKerberosDescriptor.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/UpgradeUserKerberosDescriptor.java
index f1eab38..60d02a3 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/UpgradeUserKerberosDescriptor.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/UpgradeUserKerberosDescriptor.java
@@ -75,6 +75,9 @@ public class UpgradeUserKerberosDescriptor extends 
AbstractServerAction {
    */
   private static final String TARGET_STACK_KEY = "target_stack";
 
+  private final static String KERBEROS_DESCRIPTOR_NAME = "kerberos_descriptor";
+  private final static String KERBEROS_DESCRIPTOR_BACKUP_NAME = 
"kerberos_descriptor_backup";
+
   @Inject
   private ArtifactDAO artifactDAO;
 
@@ -116,49 +119,52 @@ public class UpgradeUserKerberosDescriptor extends 
AbstractServerAction {
       if (userDescriptor != null) {
         StackId originalStackId = 
getStackIdFromCommandParams(ORIGINAL_STACK_KEY);
         StackId targetStackId = getStackIdFromCommandParams(TARGET_STACK_KEY);
-        boolean isDowngrade = isDowngrade();
-
-        StackId newVersion = (isDowngrade) ? originalStackId : targetStackId;
-        StackId previousVersion = (isDowngrade) ? targetStackId : 
originalStackId;
-        KerberosDescriptor newDescriptor = null;
-        KerberosDescriptor previousDescriptor = null;
 
-        if (newVersion == null) {
-          logErrorMessage(messages, errorMessages, "The new stack version 
information was not found.");
+        if (isDowngrade()) {
+          restoreDescriptor(foreignKeys, messages, errorMessages);
         } else {
-          logMessage(messages, String.format("Obtaining new stack Kerberos 
descriptor for %s.", newVersion.toString()));
-          newDescriptor = 
ambariMetaInfo.getKerberosDescriptor(newVersion.getStackName(), 
newVersion.getStackVersion());
+          backupDescriptor(foreignKeys, messages, errorMessages);
+
+          KerberosDescriptor newDescriptor = null;
+          KerberosDescriptor previousDescriptor = null;
+
+          if (targetStackId == null) {
+            logErrorMessage(messages, errorMessages, "The new stack version 
information was not found.");
+          } else {
+            logMessage(messages, String.format("Obtaining new stack Kerberos 
descriptor for %s.", targetStackId.toString()));
+            newDescriptor = 
ambariMetaInfo.getKerberosDescriptor(targetStackId.getStackName(), 
targetStackId.getStackVersion());
 
-          if (newDescriptor == null) {
-            logErrorMessage(messages, errorMessages, String.format("The 
Kerberos descriptor for the new stack version, %s, was not found.", 
newVersion.toString()));
+            if (newDescriptor == null) {
+              logErrorMessage(messages, errorMessages, String.format("The 
Kerberos descriptor for the new stack version, %s, was not found.", 
targetStackId.toString()));
+            }
           }
-        }
 
-        if (previousVersion == null) {
-          logErrorMessage(messages, errorMessages, "The previous stack version 
information was not found.");
-        } else {
-          logMessage(messages, String.format("Obtaining previous stack 
Kerberos descriptor for %s.", previousVersion.toString()));
-          previousDescriptor = 
ambariMetaInfo.getKerberosDescriptor(previousVersion.getStackName(), 
previousVersion.getStackVersion());
+          if (originalStackId == null) {
+            logErrorMessage(messages, errorMessages, "The previous stack 
version information was not found.");
+          } else {
+            logMessage(messages, String.format("Obtaining previous stack 
Kerberos descriptor for %s.", originalStackId.toString()));
+            previousDescriptor = 
ambariMetaInfo.getKerberosDescriptor(originalStackId.getStackName(), 
originalStackId.getStackVersion());
 
-          if (newDescriptor == null) {
-            logErrorMessage(messages, errorMessages, String.format("The 
Kerberos descriptor for the previous stack version, %s, was not found.", 
previousVersion.toString()));
+            if (newDescriptor == null) {
+              logErrorMessage(messages, errorMessages, String.format("The 
Kerberos descriptor for the previous stack version, %s, was not found.", 
originalStackId.toString()));
+            }
           }
-        }
 
-        if (errorMessages.isEmpty()) {
-          logMessage(messages, "Updating the user-specified Kerberos 
descriptor.");
+          if (errorMessages.isEmpty()) {
+            logMessage(messages, "Updating the user-specified Kerberos 
descriptor.");
 
-          KerberosDescriptor updatedDescriptor = 
KerberosDescriptorUpdateHelper.updateUserKerberosDescriptor(
-              previousDescriptor,
-              newDescriptor,
-              userDescriptor);
+            KerberosDescriptor updatedDescriptor = 
KerberosDescriptorUpdateHelper.updateUserKerberosDescriptor(
+                previousDescriptor,
+                newDescriptor,
+                userDescriptor);
 
-          logMessage(messages, "Storing updated user-specified Kerberos 
descriptor.");
+            logMessage(messages, "Storing updated user-specified Kerberos 
descriptor.");
 
-          entity.setArtifactData(updatedDescriptor.toMap());
-          artifactDAO.merge(entity);
+            entity.setArtifactData(updatedDescriptor.toMap());
+            artifactDAO.merge(entity);
 
-          logMessage(messages, "Successfully updated the user-specified 
Kerberos descriptor.");
+            logMessage(messages, "Successfully updated the user-specified 
Kerberos descriptor.");
+          }
         }
       } else {
         logMessage(messages, "A user-specified Kerberos descriptor was not 
found. No updates are necessary.");
@@ -202,4 +208,66 @@ public class UpgradeUserKerberosDescriptor extends 
AbstractServerAction {
     messages.add(message);
     errorMessages.add(message);
   }
+
+  /**
+   * Create copy of user defined kerberos descriptor and stores it with name 
{@code kerberos_descriptor_backup}.
+   *
+   * @param foreignKeys   keys specific for cluster
+   * @param messages      list of log messages
+   * @param errorMessages list of error log messages
+   */
+  private void backupDescriptor(TreeMap<String, String> foreignKeys, 
List<String> messages, List<String> errorMessages) {
+    ArtifactEntity backupEntity = 
artifactDAO.findByNameAndForeignKeys(KERBEROS_DESCRIPTOR_BACKUP_NAME, 
foreignKeys);
+    if (backupEntity != null) {
+      artifactDAO.remove(backupEntity);
+    }
+
+    ArtifactEntity entity = 
artifactDAO.findByNameAndForeignKeys(KERBEROS_DESCRIPTOR_NAME, foreignKeys);
+    if (entity != null) {
+      backupEntity = new ArtifactEntity();
+      backupEntity.setArtifactName(KERBEROS_DESCRIPTOR_BACKUP_NAME);
+      backupEntity.setForeignKeys(entity.getForeignKeys());
+      backupEntity.setArtifactData(entity.getArtifactData());
+
+      artifactDAO.create(backupEntity);
+      logMessage(messages, "Created backup of kerberos descriptor");
+    } else {
+      logErrorMessage(
+          messages,
+          errorMessages,
+          "No backup of kerberos descriptor created due to missing original 
kerberos descriptor"
+      );
+    }
+
+
+  }
+
+  /**
+   * Restores user defined kerberos descriptor from artifact with name {@code 
kerberos_descriptor_backup}.
+   *
+   * @param foreignKeys   keys specific for cluster
+   * @param messages      list of log messages
+   * @param errorMessages list of error log messages
+   */
+  private void restoreDescriptor(TreeMap<String, String> foreignKeys, 
List<String> messages, List<String> errorMessages) {
+    ArtifactEntity backupEntity = 
artifactDAO.findByNameAndForeignKeys(KERBEROS_DESCRIPTOR_BACKUP_NAME, 
foreignKeys);
+
+    if (backupEntity != null) {
+      ArtifactEntity entity = 
artifactDAO.findByNameAndForeignKeys(KERBEROS_DESCRIPTOR_NAME, foreignKeys);
+      if (entity != null) {
+        artifactDAO.remove(entity);
+      }
+
+      entity = new ArtifactEntity();
+      entity.setArtifactName(KERBEROS_DESCRIPTOR_NAME);
+      entity.setForeignKeys(backupEntity.getForeignKeys());
+      entity.setArtifactData(backupEntity.getArtifactData());
+
+      artifactDAO.create(entity);
+      logMessage(messages, "Restored kerberos descriptor from backup");
+    } else {
+      logErrorMessage(messages, errorMessages, "No backup of kerberos 
descriptor found");
+    }
+  }
+
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/98f03b5f/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/UpgradeUserKerberosDescriptorTest.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/UpgradeUserKerberosDescriptorTest.java
 
b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/UpgradeUserKerberosDescriptorTest.java
new file mode 100644
index 0000000..9e2080b
--- /dev/null
+++ 
b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/UpgradeUserKerberosDescriptorTest.java
@@ -0,0 +1,201 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.server.serveraction.upgrades;
+
+import static org.easymock.EasyMock.*;
+import static org.junit.Assert.assertEquals;
+
+import java.lang.reflect.Field;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.TreeMap;
+
+import org.apache.ambari.server.actionmanager.ExecutionCommandWrapper;
+import org.apache.ambari.server.actionmanager.HostRoleCommand;
+import org.apache.ambari.server.agent.ExecutionCommand;
+import org.apache.ambari.server.api.services.AmbariMetaInfo;
+import org.apache.ambari.server.orm.dao.ArtifactDAO;
+import org.apache.ambari.server.orm.entities.ArtifactEntity;
+import org.apache.ambari.server.state.Cluster;
+import org.apache.ambari.server.state.Clusters;
+import org.apache.ambari.server.state.kerberos.KerberosDescriptor;
+import org.apache.ambari.server.state.kerberos.KerberosDescriptorFactory;
+import org.apache.ambari.server.state.kerberos.KerberosDescriptorUpdateHelper;
+import org.easymock.Capture;
+import org.easymock.EasyMock;
+import org.junit.Before;
+import org.junit.Test;
+
+import org.junit.runner.RunWith;
+import org.powermock.api.mockito.PowerMockito;
+import org.powermock.core.classloader.annotations.PrepareForTest;
+import org.powermock.modules.junit4.PowerMockRunner;
+
+/**
+ * Tests OozieConfigCalculation logic
+ */
+@RunWith(PowerMockRunner.class)
+@PrepareForTest(KerberosDescriptorUpdateHelper.class)
+public class UpgradeUserKerberosDescriptorTest {
+  private Clusters clusters;
+  private Cluster cluster;
+  private AmbariMetaInfo ambariMetaInfo;
+  private KerberosDescriptorFactory kerberosDescriptorFactory;
+  private ArtifactDAO artifactDAO;
+
+  private TreeMap<String, Field> fields = new TreeMap<>();
+
+  @Before
+  public void setup() throws Exception {
+    clusters = EasyMock.createMock(Clusters.class);
+    cluster = EasyMock.createMock(Cluster.class);
+    kerberosDescriptorFactory = 
EasyMock.createNiceMock(KerberosDescriptorFactory.class);
+    ambariMetaInfo = EasyMock.createMock(AmbariMetaInfo.class);
+    artifactDAO = EasyMock.createNiceMock(ArtifactDAO.class);
+
+    expect(clusters.getCluster((String) 
anyObject())).andReturn(cluster).anyTimes();
+    expect(cluster.getClusterId()).andReturn(1l).atLeastOnce();
+    replay(clusters, cluster);
+
+    prepareFields();
+
+  }
+
+  @Test
+  public void testUpgrade() throws Exception {
+
+    Map<String, String> commandParams = new HashMap<String, String>();
+    commandParams.put("clusterName", "c1");
+    commandParams.put("upgrade_direction", "UPGRADE");
+    commandParams.put("original_stack", "HDP-2.4");
+    commandParams.put("target_stack", "HDP-2.5");
+
+    ExecutionCommand executionCommand = new ExecutionCommand();
+    executionCommand.setCommandParams(commandParams);
+    executionCommand.setClusterName("c1");
+
+    HostRoleCommand hrc = EasyMock.createMock(HostRoleCommand.class);
+    expect(hrc.getRequestId()).andReturn(1L).anyTimes();
+    expect(hrc.getStageId()).andReturn(2L).anyTimes();
+    expect(hrc.getExecutionCommandWrapper()).andReturn(new 
ExecutionCommandWrapper(executionCommand)).anyTimes();
+    replay(hrc);
+
+    UpgradeUserKerberosDescriptor action = new UpgradeUserKerberosDescriptor();
+    injectFields(action);
+
+    action.setExecutionCommand(executionCommand);
+    action.setHostRoleCommand(hrc);
+
+    ArtifactEntity entity = EasyMock.createNiceMock(ArtifactEntity.class);
+    expect(entity.getArtifactData()).andReturn(null).anyTimes();
+    expect(entity.getForeignKeys()).andReturn(null).anyTimes();
+    expect(artifactDAO.findByNameAndForeignKeys(anyString(), (TreeMap<String, 
String>) anyObject())).andReturn(entity).atLeastOnce();
+
+    KerberosDescriptor userDescriptor = 
EasyMock.createMock(KerberosDescriptor.class);
+    KerberosDescriptor newDescriptor = 
EasyMock.createMock(KerberosDescriptor.class);
+    KerberosDescriptor previousDescriptor = 
EasyMock.createMock(KerberosDescriptor.class);
+    KerberosDescriptor updatedKerberosDescriptor = 
EasyMock.createMock(KerberosDescriptor.class);
+
+    PowerMockito.mockStatic(KerberosDescriptorUpdateHelper.class);
+    
PowerMockito.when(KerberosDescriptorUpdateHelper.updateUserKerberosDescriptor(previousDescriptor,
 newDescriptor, userDescriptor)).thenReturn(updatedKerberosDescriptor);
+    
expect(kerberosDescriptorFactory.createInstance((Map)null)).andReturn(userDescriptor).atLeastOnce();
+    
expect(ambariMetaInfo.getKerberosDescriptor("HDP","2.5")).andReturn(newDescriptor).atLeastOnce();
+    
expect(ambariMetaInfo.getKerberosDescriptor("HDP","2.4")).andReturn(previousDescriptor).atLeastOnce();
+
+
+    expect(updatedKerberosDescriptor.toMap()).andReturn(null).once();
+
+
+    expect(artifactDAO.merge(entity)).andReturn(entity).once();
+    Capture<ArtifactEntity> createCapture = Capture.newInstance();
+    artifactDAO.create(capture(createCapture));
+    EasyMock.expectLastCall().once();
+
+    replay(artifactDAO, entity, ambariMetaInfo, kerberosDescriptorFactory, 
updatedKerberosDescriptor);
+
+    action.execute(null);
+
+    verify(artifactDAO, updatedKerberosDescriptor);
+    assertEquals(createCapture.getValue().getArtifactName(), 
"kerberos_descriptor_backup");
+  }
+
+  @Test
+  public void testDowngrade() throws Exception {
+
+    Map<String, String> commandParams = new HashMap<String, String>();
+    commandParams.put("clusterName", "c1");
+    commandParams.put("upgrade_direction", "DOWNGRADE");
+    commandParams.put("original_stack", "HDP-2.4");
+    commandParams.put("target_stack", "HDP-2.5");
+
+    ExecutionCommand executionCommand = new ExecutionCommand();
+    executionCommand.setCommandParams(commandParams);
+    executionCommand.setClusterName("c1");
+
+    HostRoleCommand hrc = EasyMock.createMock(HostRoleCommand.class);
+    expect(hrc.getRequestId()).andReturn(1L).anyTimes();
+    expect(hrc.getStageId()).andReturn(2L).anyTimes();
+    expect(hrc.getExecutionCommandWrapper()).andReturn(new 
ExecutionCommandWrapper(executionCommand)).anyTimes();
+    replay(hrc);
+
+    UpgradeUserKerberosDescriptor action = new UpgradeUserKerberosDescriptor();
+    injectFields(action);
+
+    action.setExecutionCommand(executionCommand);
+    action.setHostRoleCommand(hrc);
+
+    ArtifactEntity entity = EasyMock.createNiceMock(ArtifactEntity.class);
+    expect(entity.getArtifactData()).andReturn(null).anyTimes();
+    expect(entity.getForeignKeys()).andReturn(null).anyTimes();
+    expect(artifactDAO.findByNameAndForeignKeys(anyString(), (TreeMap<String, 
String>) anyObject())).andReturn(entity).atLeastOnce();
+
+    KerberosDescriptor userDescriptor = 
EasyMock.createMock(KerberosDescriptor.class);
+
+    
expect(kerberosDescriptorFactory.createInstance((Map)null)).andReturn(userDescriptor).atLeastOnce();
+
+    Capture<ArtifactEntity> createCapture = Capture.newInstance();
+    artifactDAO.create(capture(createCapture));
+    EasyMock.expectLastCall().once();
+
+    artifactDAO.remove(entity);
+    EasyMock.expectLastCall().atLeastOnce();
+
+    replay(artifactDAO, entity, ambariMetaInfo, kerberosDescriptorFactory);
+
+    action.execute(null);
+
+    verify(artifactDAO);
+    assertEquals(createCapture.getValue().getArtifactName(), 
"kerberos_descriptor");
+  }
+
+  private void prepareFields() throws NoSuchFieldException {
+    String[] fieldsNames = 
{"artifactDAO","clusters","ambariMetaInfo","kerberosDescriptorFactory"};
+    for(String fieldName : fieldsNames)
+    {
+      Field clustersField = 
UpgradeUserKerberosDescriptor.class.getDeclaredField(fieldName);
+      clustersField.setAccessible(true);
+      fields.put(fieldName, clustersField);
+    }
+  }
+  private void injectFields(UpgradeUserKerberosDescriptor action) throws 
IllegalAccessException {
+    fields.get("artifactDAO").set(action, artifactDAO);
+    fields.get("clusters").set(action, clusters);
+    fields.get("ambariMetaInfo").set(action, ambariMetaInfo);
+    fields.get("kerberosDescriptorFactory").set(action, 
kerberosDescriptorFactory);
+  }
+}

Reply via email to