AMBARI-21451 - Expected Values Like original_stack Are Missing On Downgrades 
(jonathanhurley)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/f27f3aff
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/f27f3aff
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/f27f3aff

Branch: refs/heads/branch-feature-AMBARI-14714
Commit: f27f3affbb4c7f49944dcefc7581ac228b103e3f
Parents: eb3d3ea
Author: Jonathan Hurley <jhur...@hortonworks.com>
Authored: Wed Jul 12 13:30:16 2017 -0400
Committer: Jonathan Hurley <jhur...@hortonworks.com>
Committed: Wed Jul 12 19:26:37 2017 -0400

----------------------------------------------------------------------
 .../libraries/functions/stack_features.py       |  28 ++--
 .../controller/ActionExecutionContext.java      |  30 ++--
 .../controller/AmbariActionExecutionHelper.java |  15 +-
 .../ClusterStackVersionResourceProvider.java    |   2 +-
 .../upgrades/UpgradeUserKerberosDescriptor.java | 142 +++++++------------
 .../ambari/server/state/UpgradeContext.java     |  16 ++-
 .../SPARK/1.2.1/package/scripts/params.py       |  11 +-
 .../SPARK/1.2.1/package/scripts/setup_spark.py  |   6 +-
 .../1.2.1/package/scripts/spark_service.py      |   6 +-
 .../UpgradeUserKerberosDescriptorTest.java      |  59 ++++++--
 .../src/test/python/TestStackFeature.py         |  44 ++++--
 .../test/python/stacks/2.0.6/configs/nn_eu.json |   2 +-
 .../stacks/2.0.6/configs/nn_eu_standby.json     |   2 +-
 .../2.1/configs/hive-metastore-upgrade.json     |   2 +-
 .../python/stacks/2.2/configs/knox_upgrade.json |   2 +-
 15 files changed, 199 insertions(+), 168 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/f27f3aff/ambari-common/src/main/python/resource_management/libraries/functions/stack_features.py
----------------------------------------------------------------------
diff --git 
a/ambari-common/src/main/python/resource_management/libraries/functions/stack_features.py
 
b/ambari-common/src/main/python/resource_management/libraries/functions/stack_features.py
index 576c138..24201dd 100644
--- 
a/ambari-common/src/main/python/resource_management/libraries/functions/stack_features.py
+++ 
b/ambari-common/src/main/python/resource_management/libraries/functions/stack_features.py
@@ -105,7 +105,10 @@ def get_stack_feature_version(config):
 
   # something like 2.4.0.0-1234; represents the version for the command
   # (or None if this is a cluster install and it hasn't been calculated yet)
-  version = default("/commandParams/version", None)
+  # this is always guaranteed to be the correct version for the command, even 
in
+  # upgrade and downgrade scenarios
+  command_version = default("/commandParams/version", None)
+  command_stack = default("/commandParams/target_stack", None)
 
   # something like 2.4.0.0-1234
   # (or None if this is a cluster install and it hasn't been calculated yet)
@@ -115,13 +118,13 @@ def get_stack_feature_version(config):
   upgrade_direction = default("/commandParams/upgrade_direction", None)
 
   # start out with the value that's right 99% of the time
-  version_for_stack_feature_checks = version if version is not None else 
stack_version
+  version_for_stack_feature_checks = command_version if command_version is not 
None else stack_version
 
   # if this is not an upgrade, then we take the simple path
   if upgrade_direction is None:
     Logger.info(
-      "Stack Feature Version Info: stack_version={0}, version={1}, 
current_cluster_version={2} -> {3}".format(
-        stack_version, version, current_cluster_version, 
version_for_stack_feature_checks))
+      "Stack Feature Version Info: Cluster Stack={0}, Cluster Current 
Version={1}, Command Stack={2}, Command Version={3}-> {4}".format(
+        stack_version, current_cluster_version, command_stack, 
command_version, version_for_stack_feature_checks))
 
     return version_for_stack_feature_checks
 
@@ -130,15 +133,12 @@ def get_stack_feature_version(config):
   is_stop_command = _is_stop_command(config)
   if not is_stop_command:
     Logger.info(
-      "Stack Feature Version Info: stack_version={0}, version={1}, 
current_cluster_version={2}, upgrade_direction={3} -> {4}".format(
-        stack_version, version, current_cluster_version, upgrade_direction,
+      "Stack Feature Version Info: Cluster Stack={0}, Cluster Current 
Version={1}, Command Stack={2}, Command Version={3}, Upgrade Direction={4} -> 
{5}".format(
+        stack_version, current_cluster_version, command_stack, 
command_version, upgrade_direction,
         version_for_stack_feature_checks))
 
     return version_for_stack_feature_checks
 
-  original_stack = default("/commandParams/original_stack", None)
-  target_stack = default("/commandParams/target_stack", None)
-
   # something like 2.5.0.0-5678 (or None)
   downgrade_from_version = default("/commandParams/downgrade_from_version", 
None)
 
@@ -154,15 +154,13 @@ def get_stack_feature_version(config):
     # UPGRADE
     if current_cluster_version is not None:
       version_for_stack_feature_checks = current_cluster_version
-    elif original_stack is not None:
-      version_for_stack_feature_checks = format_stack_version(original_stack)
     else:
-      version_for_stack_feature_checks = version if version is not None else 
stack_version
+      version_for_stack_feature_checks = command_version if command_version is 
not None else stack_version
 
   Logger.info(
-    "Stack Feature Version Info: stack_version={0}, version={1}, 
current_cluster_version={2}, upgrade_direction={3}, original_stack={4}, 
target_stack={5}, downgrade_from_version={6}, stop_command={7} -> {8}".format(
-      stack_version, version, current_cluster_version, upgrade_direction, 
original_stack,
-      target_stack, downgrade_from_version, is_stop_command, 
version_for_stack_feature_checks))
+    "Stack Feature Version Info: Cluster Stack={0}, Cluster Current 
Version={1}, Command Stack={2}, Command Version={3}, Upgrade Direction={4}, 
stop_command={5} -> {6}".format(
+      stack_version, current_cluster_version, command_stack, command_version, 
upgrade_direction,
+      is_stop_command, version_for_stack_feature_checks))
 
   return version_for_stack_feature_checks
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/f27f3aff/ambari-server/src/main/java/org/apache/ambari/server/controller/ActionExecutionContext.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/ActionExecutionContext.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/ActionExecutionContext.java
index 34d6db9..5d71869 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/ActionExecutionContext.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/ActionExecutionContext.java
@@ -27,7 +27,7 @@ import org.apache.ambari.server.actionmanager.TargetHostType;
 import org.apache.ambari.server.agent.ExecutionCommand;
 import org.apache.ambari.server.controller.internal.RequestOperationLevel;
 import org.apache.ambari.server.controller.internal.RequestResourceFilter;
-import org.apache.ambari.server.state.StackId;
+import org.apache.ambari.server.orm.entities.RepositoryVersionEntity;
 
 /**
  * The context required to create tasks and stages for a custom action
@@ -44,7 +44,7 @@ public class ActionExecutionContext {
   private String expectedComponentName;
   private boolean hostsInMaintenanceModeExcluded = true;
   private boolean allowRetry = false;
-  private StackId stackId;
+  private RepositoryVersionEntity repositoryVersion;
 
   private List<ExecutionCommandVisitor> m_visitors = new ArrayList<>();
 
@@ -175,27 +175,29 @@ public class ActionExecutionContext {
   }
 
   /**
-   * Gets the stack to use for generating stack-associated values for a 
command.
-   * In some cases the cluster's stack is not the correct one to use, such as
-   * when distributing a repository.
+   * Gets the stack/version to use for generating stack-associated values for a
+   * command. In some cases the cluster's stack is not the correct one to use,
+   * such as when distributing a repository.
    *
-   * @return the stackId the stack to use when generating stack-specific 
content
-   *         for the command.
+   * @return the repository for the stack/version to use when generating
+   *         stack-specific content for the command.
+   *
+   * @return
    */
-  public StackId getStackId() {
-    return stackId;
+  public RepositoryVersionEntity getRepositoryVersion() {
+    return repositoryVersion;
   }
 
   /**
-   * Sets the stack to use for generating stack-associated values for a 
command.
-   * In some cases the cluster's stack is not the correct one to use, such as
-   * when distributing a repository.
+   * Sets the stack/version to use for generating stack-associated values for a
+   * command. In some cases the cluster's stack is not the correct one to use,
+   * such as when distributing a repository.
    *
    * @param stackId
    *          the stackId to use for stack-based properties on the command.
    */
-  public void setStackId(StackId stackId) {
-    this.stackId = stackId;
+  public void setRepositoryVersion(RepositoryVersionEntity repositoryVersion) {
+    this.repositoryVersion = repositoryVersion;
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/ambari/blob/f27f3aff/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariActionExecutionHelper.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariActionExecutionHelper.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariActionExecutionHelper.java
index 391daa9..55356c7 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariActionExecutionHelper.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariActionExecutionHelper.java
@@ -539,13 +539,18 @@ public class AmbariActionExecutionHelper {
     // if the repo is null, see if any values from the context should go on the
     // host params and then return
     if (null == repositoryVersion) {
-      if (null != actionContext.getStackId()) {
-        StackId stackId = actionContext.getStackId();
+      // see if the action context has a repository set to use for the command
+      if (null != actionContext.getRepositoryVersion()) {
+        StackId stackId = actionContext.getRepositoryVersion().getStackId();
         hostLevelParams.put(STACK_NAME, stackId.getStackName());
         hostLevelParams.put(STACK_VERSION, stackId.getStackVersion());
       }
 
       return;
+    } else {
+      StackId stackId = repositoryVersion.getStackId();
+      hostLevelParams.put(STACK_NAME, stackId.getStackName());
+      hostLevelParams.put(STACK_VERSION, stackId.getStackVersion());
     }
 
     JsonObject rootJsonObject = new JsonObject();
@@ -569,11 +574,5 @@ public class AmbariActionExecutionHelper {
     }
 
     hostLevelParams.put(REPO_INFO, rootJsonObject.toString());
-
-    // set the host level params if not already set by whoever is creating 
this command
-    if (!hostLevelParams.containsKey(STACK_NAME) || 
!hostLevelParams.containsKey(STACK_VERSION)) {
-      hostLevelParams.put(STACK_NAME, repositoryVersion.getStackName());
-      hostLevelParams.put(STACK_VERSION, repositoryVersion.getStackVersion());
-    }
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/f27f3aff/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterStackVersionResourceProvider.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterStackVersionResourceProvider.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterStackVersionResourceProvider.java
index c4fce8a..9ecea95 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterStackVersionResourceProvider.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterStackVersionResourceProvider.java
@@ -613,7 +613,7 @@ public class ClusterStackVersionResourceProvider extends 
AbstractControllerResou
     ActionExecutionContext actionContext = new 
ActionExecutionContext(cluster.getClusterName(),
         INSTALL_PACKAGES_ACTION, Collections.singletonList(filter), 
roleParams);
 
-    actionContext.setStackId(stackId);
+    actionContext.setRepositoryVersion(repoVersion);
     
actionContext.setTimeout(Short.valueOf(configuration.getDefaultAgentTaskTimeout(true)));
 
     repoVersionHelper.addCommandRepository(actionContext, osFamily, 
repoVersion, repoInfo);

http://git-wip-us.apache.org/repos/asf/ambari/blob/f27f3aff/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/UpgradeUserKerberosDescriptor.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/UpgradeUserKerberosDescriptor.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/UpgradeUserKerberosDescriptor.java
index 59690a3..78aaa77 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/UpgradeUserKerberosDescriptor.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/UpgradeUserKerberosDescriptor.java
@@ -22,6 +22,8 @@ import java.util.List;
 import java.util.TreeMap;
 import java.util.concurrent.ConcurrentMap;
 
+import org.apache.ambari.annotations.Experimental;
+import org.apache.ambari.annotations.ExperimentalFeature;
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.actionmanager.HostRoleCommand;
 import org.apache.ambari.server.actionmanager.HostRoleStatus;
@@ -29,10 +31,10 @@ import org.apache.ambari.server.agent.CommandReport;
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.orm.dao.ArtifactDAO;
 import org.apache.ambari.server.orm.entities.ArtifactEntity;
-import org.apache.ambari.server.serveraction.AbstractServerAction;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.StackId;
+import org.apache.ambari.server.state.UpgradeContext;
 import org.apache.ambari.server.state.kerberos.KerberosDescriptor;
 import org.apache.ambari.server.state.kerberos.KerberosDescriptorFactory;
 import org.apache.ambari.server.state.kerberos.KerberosDescriptorUpdateHelper;
@@ -48,34 +50,9 @@ import com.google.inject.Inject;
  *
  * @see org.apache.ambari.server.state.kerberos.KerberosDescriptorUpdateHelper
  */
-public class UpgradeUserKerberosDescriptor extends AbstractServerAction {
+public class UpgradeUserKerberosDescriptor extends AbstractUpgradeServerAction 
{
   private static final Logger LOG = 
LoggerFactory.getLogger(UpgradeUserKerberosDescriptor.class);
 
-  /**
-   * The upgrade direction.
-   *
-   * @see Direction
-   */
-  private static final String UPGRADE_DIRECTION_KEY = "upgrade_direction";
-
-  /**
-   * The original "current" stack of the cluster before the upgrade started.
-   * This is the same regardless of whether the current direction is
-   * {@link Direction#UPGRADE} or {@link Direction#DOWNGRADE}.
-   *
-   * @see Direction
-   */
-  private static final String ORIGINAL_STACK_KEY = "original_stack";
-
-  /**
-   * The target upgrade stack before the upgrade started. This is the same
-   * regardless of whether the current direction is {@link Direction#UPGRADE} 
or
-   * {@link Direction#DOWNGRADE}.
-   *
-   * @see Direction
-   */
-  private static final String TARGET_STACK_KEY = "target_stack";
-
   private final static String KERBEROS_DESCRIPTOR_NAME = "kerberos_descriptor";
   private final static String KERBEROS_DESCRIPTOR_BACKUP_NAME = 
"kerberos_descriptor_backup";
 
@@ -108,70 +85,73 @@ public class UpgradeUserKerberosDescriptor extends 
AbstractServerAction {
     List<String> messages = new ArrayList<>();
     List<String> errorMessages = new ArrayList<>();
 
-    if (cluster != null) {
-      logMessage(messages, "Obtaining the user-defined Kerberos descriptor");
+    UpgradeContext upgradeContext = getUpgradeContext(cluster);
 
-      TreeMap<String, String> foreignKeys = new TreeMap<>();
-      foreignKeys.put("cluster", String.valueOf(cluster.getClusterId()));
+    logMessage(messages, "Obtaining the user-defined Kerberos descriptor");
 
-      ArtifactEntity entity = 
artifactDAO.findByNameAndForeignKeys("kerberos_descriptor", foreignKeys);
-      KerberosDescriptor userDescriptor = (entity == null) ? null : 
kerberosDescriptorFactory.createInstance(entity.getArtifactData());
+    TreeMap<String, String> foreignKeys = new TreeMap<>();
+    foreignKeys.put("cluster", String.valueOf(cluster.getClusterId()));
 
-      if (userDescriptor != null) {
-        StackId originalStackId = 
getStackIdFromCommandParams(ORIGINAL_STACK_KEY);
-        StackId targetStackId = getStackIdFromCommandParams(TARGET_STACK_KEY);
+    ArtifactEntity entity = 
artifactDAO.findByNameAndForeignKeys("kerberos_descriptor", foreignKeys);
+    KerberosDescriptor userDescriptor = (entity == null) ? null : 
kerberosDescriptorFactory.createInstance(entity.getArtifactData());
 
-        if (isDowngrade()) {
-          restoreDescriptor(foreignKeys, messages, errorMessages);
-        } else {
-          backupDescriptor(foreignKeys, messages, errorMessages);
+    if (userDescriptor != null) {
+
+      @Experimental(
+          feature = ExperimentalFeature.PATCH_UPGRADES,
+          comment = "This needs to be correctly done per-service")
+
+      StackId originalStackId = cluster.getCurrentStackVersion();
+      StackId targetStackId = 
upgradeContext.getRepositoryVersion().getStackId();
+
+      if (upgradeContext.getDirection() == Direction.DOWNGRADE) {
+        restoreDescriptor(foreignKeys, messages, errorMessages);
+      } else {
+        backupDescriptor(foreignKeys, messages, errorMessages);
 
-          KerberosDescriptor newDescriptor = null;
-          KerberosDescriptor previousDescriptor = null;
+        KerberosDescriptor newDescriptor = null;
+        KerberosDescriptor previousDescriptor = null;
 
-          if (targetStackId == null) {
-            logErrorMessage(messages, errorMessages, "The new stack version 
information was not found.");
-          } else {
-            logMessage(messages, String.format("Obtaining new stack Kerberos 
descriptor for %s.", targetStackId.toString()));
-            newDescriptor = 
ambariMetaInfo.getKerberosDescriptor(targetStackId.getStackName(), 
targetStackId.getStackVersion());
+        if (targetStackId == null) {
+          logErrorMessage(messages, errorMessages, "The new stack version 
information was not found.");
+        } else {
+          logMessage(messages, String.format("Obtaining new stack Kerberos 
descriptor for %s.", targetStackId.toString()));
+          newDescriptor = 
ambariMetaInfo.getKerberosDescriptor(targetStackId.getStackName(), 
targetStackId.getStackVersion());
 
-            if (newDescriptor == null) {
-              logErrorMessage(messages, errorMessages, String.format("The 
Kerberos descriptor for the new stack version, %s, was not found.", 
targetStackId.toString()));
-            }
+          if (newDescriptor == null) {
+            logErrorMessage(messages, errorMessages, String.format("The 
Kerberos descriptor for the new stack version, %s, was not found.", 
targetStackId.toString()));
           }
+        }
 
-          if (originalStackId == null) {
-            logErrorMessage(messages, errorMessages, "The previous stack 
version information was not found.");
-          } else {
-            logMessage(messages, String.format("Obtaining previous stack 
Kerberos descriptor for %s.", originalStackId.toString()));
-            previousDescriptor = 
ambariMetaInfo.getKerberosDescriptor(originalStackId.getStackName(), 
originalStackId.getStackVersion());
+        if (originalStackId == null) {
+          logErrorMessage(messages, errorMessages, "The previous stack version 
information was not found.");
+        } else {
+          logMessage(messages, String.format("Obtaining previous stack 
Kerberos descriptor for %s.", originalStackId.toString()));
+          previousDescriptor = 
ambariMetaInfo.getKerberosDescriptor(originalStackId.getStackName(), 
originalStackId.getStackVersion());
 
-            if (newDescriptor == null) {
-              logErrorMessage(messages, errorMessages, String.format("The 
Kerberos descriptor for the previous stack version, %s, was not found.", 
originalStackId.toString()));
-            }
+          if (newDescriptor == null) {
+            logErrorMessage(messages, errorMessages, String.format("The 
Kerberos descriptor for the previous stack version, %s, was not found.", 
originalStackId.toString()));
           }
+        }
 
-          if (errorMessages.isEmpty()) {
-            logMessage(messages, "Updating the user-specified Kerberos 
descriptor.");
+        if (errorMessages.isEmpty()) {
+          logMessage(messages, "Updating the user-specified Kerberos 
descriptor.");
 
-            KerberosDescriptor updatedDescriptor = 
KerberosDescriptorUpdateHelper.updateUserKerberosDescriptor(
-                previousDescriptor,
-                newDescriptor,
-                userDescriptor);
+          KerberosDescriptor updatedDescriptor = 
KerberosDescriptorUpdateHelper.updateUserKerberosDescriptor(
+              previousDescriptor,
+              newDescriptor,
+              userDescriptor);
 
-            logMessage(messages, "Storing updated user-specified Kerberos 
descriptor.");
+          logMessage(messages, "Storing updated user-specified Kerberos 
descriptor.");
 
-            entity.setArtifactData(updatedDescriptor.toMap());
-            artifactDAO.merge(entity);
+          entity.setArtifactData(updatedDescriptor.toMap());
+          artifactDAO.merge(entity);
 
-            logMessage(messages, "Successfully updated the user-specified 
Kerberos descriptor.");
-          }
+          logMessage(messages, "Successfully updated the user-specified 
Kerberos descriptor.");
         }
-      } else {
-        logMessage(messages, "A user-specified Kerberos descriptor was not 
found. No updates are necessary.");
       }
     } else {
-      logErrorMessage(messages, errorMessages, String.format("The cluster 
named %s was not found.", clusterName));
+      logMessage(messages, "A user-specified Kerberos descriptor was not 
found. No updates are necessary.");
     }
 
     if (!errorMessages.isEmpty()) {
@@ -181,24 +161,6 @@ public class UpgradeUserKerberosDescriptor extends 
AbstractServerAction {
     return createCommandReport(0, HostRoleStatus.COMPLETED, "{}", 
StringUtils.join(messages, "\n"), StringUtils.join(errorMessages, "\n"));
   }
 
-  /**
-   * Determines if upgrade direction is {@link Direction#UPGRADE} or {@link 
Direction#DOWNGRADE}.
-   *
-   * @return {@code true} if {@link Direction#DOWNGRADE}; {@code false} if 
{@link Direction#UPGRADE}
-   */
-  private boolean isDowngrade() {
-    return 
Direction.DOWNGRADE.name().equalsIgnoreCase(getCommandParameterValue(UPGRADE_DIRECTION_KEY));
-  }
-
-  private StackId getStackIdFromCommandParams(String commandParamKey) {
-    String stackId = getCommandParameterValue(commandParamKey);
-    if (stackId == null) {
-      return null;
-    } else {
-      return new StackId(stackId);
-    }
-  }
-
   private void logMessage(List<String> messages, String message) {
     LOG.info(message);
     messages.add(message);

http://git-wip-us.apache.org/repos/asf/ambari/blob/f27f3aff/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeContext.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeContext.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeContext.java
index 3ecf64d..1695bd3 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeContext.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeContext.java
@@ -99,7 +99,13 @@ public class UpgradeContext {
   public static final String COMMAND_PARAM_TASKS = "tasks";
   public static final String COMMAND_PARAM_STRUCT_OUT = "structured_out";
 
-  /**
+  @Deprecated
+  @Experimental(
+      feature = ExperimentalFeature.PATCH_UPGRADES,
+      comment = "This isn't needed anymore, but many python classes still use 
it")
+  public static final String COMMAND_PARAM_DOWNGRADE_FROM_VERSION = 
"downgrade_from_version";
+
+  /*
    * The cluster that the upgrade is for.
    */
   final private Cluster m_cluster;
@@ -744,6 +750,7 @@ public class UpgradeContext {
    * <ul>
    * <li>{@link #COMMAND_PARAM_CLUSTER_NAME}
    * <li>{@link #COMMAND_PARAM_DIRECTION}
+   * <li>{@link #COMMAND_PARAM_DOWNGRADE_FROM_VERSION}
    * <li>{@link #COMMAND_PARAM_UPGRADE_TYPE}
    * <li>{@link KeyNames#REFRESH_CONFIG_TAGS_BEFORE_EXECUTION} - necessary in
    * order to have the commands contain the correct configurations. Otherwise,
@@ -758,8 +765,13 @@ public class UpgradeContext {
   public Map<String, String> getInitializedCommandParameters() {
     Map<String, String> parameters = new HashMap<>();
 
+    Direction direction = getDirection();
     parameters.put(COMMAND_PARAM_CLUSTER_NAME, m_cluster.getClusterName());
-    parameters.put(COMMAND_PARAM_DIRECTION, 
getDirection().name().toLowerCase());
+    parameters.put(COMMAND_PARAM_DIRECTION, direction.name().toLowerCase());
+
+    if (direction == Direction.DOWNGRADE) {
+      parameters.put(COMMAND_PARAM_DOWNGRADE_FROM_VERSION, 
m_repositoryVersion.getVersion());
+    }
 
     if (null != getType()) {
       // use the serialized attributes of the enum to convert it to a string,

http://git-wip-us.apache.org/repos/asf/ambari/blob/f27f3aff/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/params.py
 
b/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/params.py
index 74fd76a..93b4944 100644
--- 
a/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/params.py
@@ -23,6 +23,7 @@ import status_params
 
 from setup_spark import *
 from resource_management.libraries.functions.stack_features import 
check_stack_feature
+from resource_management.libraries.functions.stack_features import 
get_stack_feature_version
 from resource_management.libraries.functions.constants import StackFeature
 from resource_management.libraries.functions import conf_select, stack_select
 from resource_management.libraries.functions.get_stack_version import 
get_stack_version
@@ -56,10 +57,8 @@ upgrade_direction = 
default("/commandParams/upgrade_direction", None)
 java_home = config['hostLevelParams']['java_home']
 stack_name = status_params.stack_name
 stack_root = Script.get_stack_root()
-stack_version_unformatted = config['hostLevelParams']['stack_version']
-if upgrade_direction == Direction.DOWNGRADE:
-  stack_version_unformatted = 
config['commandParams']['original_stack'].split("-")[1]
-stack_version_formatted = format_stack_version(stack_version_unformatted)
+
+version_for_stack_feature_checks = get_stack_feature_version(config)
 
 sysprep_skip_copy_tarballs_hdfs = get_sysprep_skip_copy_tarballs_hdfs()
 
@@ -70,7 +69,7 @@ spark_conf = '/etc/spark/conf'
 hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
 hadoop_bin_dir = stack_select.get_hadoop_dir("bin")
 
-if stack_version_formatted and 
check_stack_feature(StackFeature.ROLLING_UPGRADE, stack_version_formatted):
+if check_stack_feature(StackFeature.ROLLING_UPGRADE, 
version_for_stack_feature_checks):
   hadoop_home = stack_select.get_hadoop_dir("home")
   spark_conf = format("{stack_root}/current/{component_directory}/conf")
   spark_log_dir = config['configurations']['spark-env']['spark_log_dir']
@@ -211,7 +210,7 @@ dfs_type = default("/commandParams/dfs_type", "")
 # livy is only supported from HDP 2.5
 has_livyserver = False
 
-if stack_version_formatted and check_stack_feature(StackFeature.SPARK_LIVY, 
stack_version_formatted) and "livy-env" in config['configurations']:
+if check_stack_feature(StackFeature.SPARK_LIVY, 
version_for_stack_feature_checks) and "livy-env" in config['configurations']:
   livy_component_directory = 
Script.get_component_from_role(SERVER_ROLE_DIRECTORY_MAP, "LIVY_SERVER")
   livy_conf = format("{stack_root}/current/{livy_component_directory}/conf")
   livy_log_dir = config['configurations']['livy-env']['livy_log_dir']

http://git-wip-us.apache.org/repos/asf/ambari/blob/f27f3aff/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/setup_spark.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/setup_spark.py
 
b/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/setup_spark.py
index 50c1555..53c8f9e 100644
--- 
a/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/setup_spark.py
+++ 
b/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/setup_spark.py
@@ -118,11 +118,11 @@ def setup_spark(env, type, upgrade_type=None, 
action=None, config_dir=None):
       mode=0644
     )
 
-  effective_version = params.version if upgrade_type is not None else 
params.stack_version_formatted
+  effective_version = params.version if upgrade_type is not None else 
params.version_for_stack_feature_checks
   if effective_version:
     effective_version = format_stack_version(effective_version)
 
-  if effective_version and 
check_stack_feature(StackFeature.SPARK_JAVA_OPTS_SUPPORT, effective_version):
+  if check_stack_feature(StackFeature.SPARK_JAVA_OPTS_SUPPORT, 
effective_version):
     File(os.path.join(params.spark_conf, 'java-opts'),
       owner=params.spark_user,
       group=params.spark_group,
@@ -134,7 +134,7 @@ def setup_spark(env, type, upgrade_type=None, action=None, 
config_dir=None):
       action="delete"
     )
 
-  if params.spark_thrift_fairscheduler_content and effective_version and 
check_stack_feature(StackFeature.SPARK_16PLUS, effective_version):
+  if params.spark_thrift_fairscheduler_content and 
check_stack_feature(StackFeature.SPARK_16PLUS, effective_version):
     # create spark-thrift-fairscheduler.xml
     File(os.path.join(config_dir,"spark-thrift-fairscheduler.xml"),
       owner=params.spark_user,

http://git-wip-us.apache.org/repos/asf/ambari/blob/f27f3aff/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/spark_service.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/spark_service.py
 
b/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/spark_service.py
index 31a296a..2838186 100644
--- 
a/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/spark_service.py
+++ 
b/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/spark_service.py
@@ -34,11 +34,11 @@ def spark_service(name, upgrade_type=None, action=None):
 
   if action == 'start':
 
-    effective_version = params.version if upgrade_type is not None else 
params.stack_version_formatted
+    effective_version = params.version if upgrade_type is not None else 
params.version_for_stack_feature_checks
     if effective_version:
       effective_version = format_stack_version(effective_version)
 
-    if name == 'jobhistoryserver' and effective_version and 
check_stack_feature(StackFeature.SPARK_16PLUS, effective_version):
+    if name == 'jobhistoryserver' and 
check_stack_feature(StackFeature.SPARK_16PLUS, effective_version):
       # copy spark-hdp-assembly.jar to hdfs
       copy_to_hdfs("spark", params.user_group, params.hdfs_user, 
skip=params.sysprep_skip_copy_tarballs_hdfs)
       # create spark history directory
@@ -58,7 +58,7 @@ def spark_service(name, upgrade_type=None, action=None):
 
     # Spark 1.3.1.2.3, and higher, which was included in HDP 2.3, does not 
have a dependency on Tez, so it does not
     # need to copy the tarball, otherwise, copy it.
-    if params.stack_version_formatted and 
check_stack_feature(StackFeature.TEZ_FOR_SPARK, params.stack_version_formatted):
+    if check_stack_feature(StackFeature.TEZ_FOR_SPARK, 
params.version_for_stack_feature_checks):
       resource_created = copy_to_hdfs("tez", params.user_group, 
params.hdfs_user, skip=params.sysprep_skip_copy_tarballs_hdfs)
       if resource_created:
         params.HdfsResource(None, action="execute")

http://git-wip-us.apache.org/repos/asf/ambari/blob/f27f3aff/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/UpgradeUserKerberosDescriptorTest.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/UpgradeUserKerberosDescriptorTest.java
 
b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/UpgradeUserKerberosDescriptorTest.java
index ed92955..86f6d3b 100644
--- 
a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/UpgradeUserKerberosDescriptorTest.java
+++ 
b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/UpgradeUserKerberosDescriptorTest.java
@@ -36,11 +36,17 @@ import org.apache.ambari.server.agent.ExecutionCommand;
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.orm.dao.ArtifactDAO;
 import org.apache.ambari.server.orm.entities.ArtifactEntity;
+import org.apache.ambari.server.orm.entities.RepositoryVersionEntity;
+import org.apache.ambari.server.orm.entities.UpgradeEntity;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
+import org.apache.ambari.server.state.StackId;
+import org.apache.ambari.server.state.UpgradeContext;
+import org.apache.ambari.server.state.UpgradeContextFactory;
 import org.apache.ambari.server.state.kerberos.KerberosDescriptor;
 import org.apache.ambari.server.state.kerberos.KerberosDescriptorFactory;
 import org.apache.ambari.server.state.kerberos.KerberosDescriptorUpdateHelper;
+import org.apache.ambari.server.state.stack.upgrade.Direction;
 import org.easymock.Capture;
 import org.easymock.EasyMock;
 import org.junit.Before;
@@ -58,23 +64,34 @@ import org.powermock.modules.junit4.PowerMockRunner;
 public class UpgradeUserKerberosDescriptorTest {
   private Clusters clusters;
   private Cluster cluster;
+  private UpgradeEntity upgrade;
+  private UpgradeContext upgradeContext;
   private AmbariMetaInfo ambariMetaInfo;
   private KerberosDescriptorFactory kerberosDescriptorFactory;
   private ArtifactDAO artifactDAO;
+  private UpgradeContextFactory upgradeContextFactory;
 
   private TreeMap<String, Field> fields = new TreeMap<>();
+  private StackId HDP_24 = new StackId("HDP", "2.4");
 
   @Before
   public void setup() throws Exception {
     clusters = EasyMock.createMock(Clusters.class);
     cluster = EasyMock.createMock(Cluster.class);
+    upgrade = EasyMock.createNiceMock(UpgradeEntity.class);
     kerberosDescriptorFactory = 
EasyMock.createNiceMock(KerberosDescriptorFactory.class);
     ambariMetaInfo = EasyMock.createMock(AmbariMetaInfo.class);
     artifactDAO = EasyMock.createNiceMock(ArtifactDAO.class);
+    upgradeContextFactory = 
EasyMock.createNiceMock(UpgradeContextFactory.class);
+    upgradeContext = EasyMock.createNiceMock(UpgradeContext.class);
 
     expect(clusters.getCluster((String) 
anyObject())).andReturn(cluster).anyTimes();
     expect(cluster.getClusterId()).andReturn(1l).atLeastOnce();
-    replay(clusters, cluster);
+    expect(cluster.getCurrentStackVersion()).andReturn(HDP_24).atLeastOnce();
+    expect(cluster.getUpgradeInProgress()).andReturn(upgrade).atLeastOnce();
+    expect(upgradeContextFactory.create(cluster, 
upgrade)).andReturn(upgradeContext).atLeastOnce();
+
+    replay(clusters, cluster, upgradeContextFactory, upgrade);
 
     prepareFields();
 
@@ -82,12 +99,16 @@ public class UpgradeUserKerberosDescriptorTest {
 
   @Test
   public void testUpgrade() throws Exception {
+    StackId stackId = new StackId("HDP", "2.5");
+    RepositoryVersionEntity repositoryVersion = 
EasyMock.createNiceMock(RepositoryVersionEntity.class);
+    expect(repositoryVersion.getStackId()).andReturn(stackId).atLeastOnce();
+
+    
expect(upgradeContext.getDirection()).andReturn(Direction.UPGRADE).atLeastOnce();
+    
expect(upgradeContext.getRepositoryVersion()).andReturn(repositoryVersion).atLeastOnce();
+    replay(repositoryVersion, upgradeContext);
 
     Map<String, String> commandParams = new HashMap<>();
     commandParams.put("clusterName", "c1");
-    commandParams.put("upgrade_direction", "UPGRADE");
-    commandParams.put("original_stack", "HDP-2.4");
-    commandParams.put("target_stack", "HDP-2.5");
 
     ExecutionCommand executionCommand = new ExecutionCommand();
     executionCommand.setCommandParams(commandParams);
@@ -140,12 +161,16 @@ public class UpgradeUserKerberosDescriptorTest {
 
   @Test
   public void testDowngrade() throws Exception {
+    StackId stackId = new StackId("HDP", "2.5");
+    RepositoryVersionEntity repositoryVersion = 
EasyMock.createNiceMock(RepositoryVersionEntity.class);
+    expect(repositoryVersion.getStackId()).andReturn(stackId).atLeastOnce();
+
+    
expect(upgradeContext.getDirection()).andReturn(Direction.DOWNGRADE).atLeastOnce();
+    
expect(upgradeContext.getRepositoryVersion()).andReturn(repositoryVersion).atLeastOnce();
+    replay(repositoryVersion, upgradeContext);
 
     Map<String, String> commandParams = new HashMap<>();
     commandParams.put("clusterName", "c1");
-    commandParams.put("upgrade_direction", "DOWNGRADE");
-    commandParams.put("original_stack", "HDP-2.4");
-    commandParams.put("target_stack", "HDP-2.5");
 
     ExecutionCommand executionCommand = new ExecutionCommand();
     executionCommand.setCommandParams(commandParams);
@@ -188,12 +213,19 @@ public class UpgradeUserKerberosDescriptorTest {
   }
 
   private void prepareFields() throws NoSuchFieldException {
-    String[] fieldsNames = 
{"artifactDAO","clusters","ambariMetaInfo","kerberosDescriptorFactory"};
-    for(String fieldName : fieldsNames)
-    {
-      Field clustersField = 
UpgradeUserKerberosDescriptor.class.getDeclaredField(fieldName);
-      clustersField.setAccessible(true);
-      fields.put(fieldName, clustersField);
+    String[] fieldsNames = { "artifactDAO", "clusters", "ambariMetaInfo",
+        "kerberosDescriptorFactory", "m_upgradeContextFactory" };
+
+    for (String fieldName : fieldsNames) {
+      try {
+        Field clustersField = 
UpgradeUserKerberosDescriptor.class.getDeclaredField(fieldName);
+        clustersField.setAccessible(true);
+        fields.put(fieldName, clustersField);
+      } catch( NoSuchFieldException noSuchFieldException ){
+        Field clustersField = 
UpgradeUserKerberosDescriptor.class.getSuperclass().getDeclaredField(fieldName);
+        clustersField.setAccessible(true);
+        fields.put(fieldName, clustersField);        
+      }
     }
   }
   private void injectFields(UpgradeUserKerberosDescriptor action) throws 
IllegalAccessException {
@@ -201,5 +233,6 @@ public class UpgradeUserKerberosDescriptorTest {
     fields.get("clusters").set(action, clusters);
     fields.get("ambariMetaInfo").set(action, ambariMetaInfo);
     fields.get("kerberosDescriptorFactory").set(action, 
kerberosDescriptorFactory);
+    fields.get("m_upgradeContextFactory").set(action, upgradeContextFactory);
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/f27f3aff/ambari-server/src/test/python/TestStackFeature.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/TestStackFeature.py 
b/ambari-server/src/test/python/TestStackFeature.py
index 0116a7a..230734c 100644
--- a/ambari-server/src/test/python/TestStackFeature.py
+++ b/ambari-server/src/test/python/TestStackFeature.py
@@ -28,6 +28,32 @@ from unittest import TestCase
 Logger.initialize_logger()
 
 class TestStackFeature(TestCase):
+  """
+  EU Upgrade (HDP 2.5 to HDP 2.6)
+    - STOP
+      hostLevelParams/stack_name = HDP
+      hostLevelParams/stack_version = 2.5
+      hostLevelParams/current_version = 2.5.0.0-1237
+      commandParams/version = 2.5.0.0-1237
+    - START
+      hostLevelParams/stack_name = HDP
+      hostLevelParams/stack_version = 2.6
+      hostLevelParams/current_version = 2.5.0.0-1237
+      commandParams/version = 2.6.0.0-334
+
+  EU Downgrade (HDP 2.6 to HDP 2.5)
+    - STOP
+    hostLevelParams/stack_name = HDP
+    hostLevelParams/stack_version = 2.6
+    hostLevelParams/current_version = 2.5.0.0-1237
+    commandParams/version = 2.6.0.0-334
+    - START
+    hostLevelParams/stack_name = HDP
+    hostLevelParams/stack_version = 2.5
+    hostLevelParams/current_version = 2.5.0.0-1237
+    commandParams/version = 2.5.0.0-1237
+  """
+
   def test_get_stack_feature_version_missing_params(self):
     try:
       stack_feature_version = get_stack_feature_version({})
@@ -122,7 +148,7 @@ class TestStackFeature(TestCase):
         "current_version":  "2.4.0.0-1234"
       },
       "commandParams": {
-        "original_stack": "2.4",
+        "source_stack": "2.4",
         "target_stack": "2.5",
         "upgrade_direction": "upgrade",
         "version": "2.5.9.9-9999"
@@ -143,8 +169,8 @@ class TestStackFeature(TestCase):
         "current_version":"2.4.0.0-1234"
       },
       "commandParams":{
-        "original_stack":"2.4",
-        "target_stack":"2.5",
+        "source_stack":"2.5",
+        "target_stack":"2.4",
         "upgrade_direction":"downgrade",
         "version":"2.4.0.0-1234",
         "downgrade_from_version": "2.5.9.9-9999"
@@ -166,10 +192,10 @@ class TestStackFeature(TestCase):
         "current_version":"2.4.0.0-1234"
       },
       "commandParams":{
-        "original_stack":"2.4",
-        "target_stack":"2.5",
+        "source_stack":"2.5",
+        "target_stack":"2.4",
         "upgrade_direction":"downgrade",
-        "version":"2.4.0.0-1234",
+        "version":"2.5.9.9-9999",
         "downgrade_from_version":"2.5.9.9-9999"
       }
     }
@@ -189,10 +215,10 @@ class TestStackFeature(TestCase):
         "custom_command":"STOP"
       },
       "commandParams":{
-        "original_stack":"2.4",
-        "target_stack":"2.5",
+        "source_stack":"2.5",
+        "target_stack":"2.4",
         "upgrade_direction":"downgrade",
-        "version":"2.4.0.0-1234",
+        "version":"2.5.9.9-9999",
         "downgrade_from_version":"2.5.9.9-9999"
       }
     }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/f27f3aff/ambari-server/src/test/python/stacks/2.0.6/configs/nn_eu.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/configs/nn_eu.json 
b/ambari-server/src/test/python/stacks/2.0.6/configs/nn_eu.json
index 7f77d83..3aadf2c 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/configs/nn_eu.json
+++ b/ambari-server/src/test/python/stacks/2.0.6/configs/nn_eu.json
@@ -25,7 +25,7 @@
         "upgrade_type": "nonrolling_upgrade",
         "version": "2.3.2.0-2844", 
         "forceRefreshConfigTagsBeforeExecution": "*", 
-        "original_stack": "HDP-2.2", 
+        "source_stack": "HDP-2.2",
         "command_timeout": "1200", 
         "target_stack": "HDP-2.3", 
         "desired_namenode_role": "standby", 

http://git-wip-us.apache.org/repos/asf/ambari/blob/f27f3aff/ambari-server/src/test/python/stacks/2.0.6/configs/nn_eu_standby.json
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/python/stacks/2.0.6/configs/nn_eu_standby.json 
b/ambari-server/src/test/python/stacks/2.0.6/configs/nn_eu_standby.json
index 87b18af..2d48ff6 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/configs/nn_eu_standby.json
+++ b/ambari-server/src/test/python/stacks/2.0.6/configs/nn_eu_standby.json
@@ -25,7 +25,7 @@
         "upgrade_type": "nonrolling_upgrade",
         "version": "2.3.2.0-2844", 
         "forceRefreshConfigTagsBeforeExecution": "*", 
-        "original_stack": "HDP-2.2", 
+        "source_stack": "HDP-2.2",
         "command_timeout": "1200", 
         "target_stack": "HDP-2.3", 
         "desired_namenode_role": "standby", 

http://git-wip-us.apache.org/repos/asf/ambari/blob/f27f3aff/ambari-server/src/test/python/stacks/2.1/configs/hive-metastore-upgrade.json
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/python/stacks/2.1/configs/hive-metastore-upgrade.json 
b/ambari-server/src/test/python/stacks/2.1/configs/hive-metastore-upgrade.json
index 99fcba0..021695b 100644
--- 
a/ambari-server/src/test/python/stacks/2.1/configs/hive-metastore-upgrade.json
+++ 
b/ambari-server/src/test/python/stacks/2.1/configs/hive-metastore-upgrade.json
@@ -13,7 +13,7 @@
         "upgrade_type": "nonrolling_upgrade",
         "version": "2.3.2.0-2950", 
         "forceRefreshConfigTagsBeforeExecution": "*", 
-        "original_stack": "HDP-2.3", 
+        "source_stack": "HDP-2.3",
         "command_timeout": "1200", 
         "target_stack": "HDP-2.3", 
         "script_type": "PYTHON"

http://git-wip-us.apache.org/repos/asf/ambari/blob/f27f3aff/ambari-server/src/test/python/stacks/2.2/configs/knox_upgrade.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/configs/knox_upgrade.json 
b/ambari-server/src/test/python/stacks/2.2/configs/knox_upgrade.json
index a9db11c..1805c3b 100644
--- a/ambari-server/src/test/python/stacks/2.2/configs/knox_upgrade.json
+++ b/ambari-server/src/test/python/stacks/2.2/configs/knox_upgrade.json
@@ -22,7 +22,7 @@
         "upgrade_type": "rolling_upgrade",
         "command_retry_max_attempt_count": "3", 
         "version": "2.3.0.0-2096", 
-        "original_stack": "HDP-2.3", 
+        "source_stack": "HDP-2.3",
         "command_retry_enabled": "false", 
         "command_timeout": "1200", 
         "target_stack": "HDP-2.3", 

Reply via email to