AMBARI-21722 - Begin Using Service Versions In Python stack_feature Code 
(jonathanhurley)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/330a61cd
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/330a61cd
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/330a61cd

Branch: refs/heads/branch-feature-logsearch-ui
Commit: 330a61cdbe34f97559359c67c5039623d9fc6e69
Parents: 12c0588
Author: Jonathan Hurley <jhur...@hortonworks.com>
Authored: Mon Aug 14 16:58:52 2017 -0400
Committer: Jonathan Hurley <jhur...@hortonworks.com>
Committed: Wed Aug 16 13:26:44 2017 -0400

----------------------------------------------------------------------
 .../libraries/functions/component_version.py    |   63 +
 .../libraries/functions/constants.py            |    1 -
 .../libraries/functions/copy_tarball.py         |   38 +-
 .../libraries/functions/stack_features.py       |   33 +-
 .../libraries/functions/upgrade_summary.py      |  130 ++
 .../actionmanager/ExecutionCommandWrapper.java  |   25 +-
 .../ambari/server/agent/ExecutionCommand.java   |   35 +-
 .../AmbariManagementControllerImpl.java         |    1 -
 .../ambari/server/state/UpgradeContext.java     |  100 +-
 .../FALCON/0.5.0.2.1/package/scripts/falcon.py  |    4 +-
 .../0.5.0.2.1/package/scripts/params_linux.py   |   25 +-
 .../package/scripts/hive_metastore.py           |   11 +-
 .../package/scripts/hive_server_upgrade.py      |   16 +-
 .../0.12.0.2.0/package/scripts/hive_service.py  |    2 +-
 .../0.12.0.2.0/package/scripts/params_linux.py  |   11 +-
 .../2.1.0.3.0/package/scripts/hive_metastore.py |   11 +-
 .../package/scripts/hive_server_upgrade.py      |   17 +-
 .../2.1.0.3.0/package/scripts/hive_service.py   |    2 +-
 .../2.1.0.3.0/package/scripts/params_linux.py   |   11 +-
 .../KAFKA/0.10.0.3.0/package/scripts/kafka.py   |   29 -
 .../0.10.0.3.0/package/scripts/kafka_broker.py  |   13 +-
 .../KAFKA/0.10.0.3.0/package/scripts/params.py  |    9 +-
 .../KAFKA/0.10.0.3.0/package/scripts/upgrade.py |    3 -
 .../KAFKA/0.8.1/package/scripts/kafka.py        |   29 -
 .../KAFKA/0.8.1/package/scripts/kafka_broker.py |   14 +-
 .../KAFKA/0.8.1/package/scripts/params.py       |    8 -
 .../KAFKA/0.8.1/package/scripts/upgrade.py      |    7 +-
 .../0.5.0.2.2/package/scripts/params_linux.py   |    6 +-
 .../0.5.0.3.0/package/scripts/params_linux.py   |    3 +-
 .../OOZIE/4.0.0.2.0/package/scripts/oozie.py    |    4 +-
 .../RANGER/0.4.0/package/scripts/params.py      |    1 -
 .../RANGER/1.0.0.3.0/package/scripts/params.py  |    1 -
 .../3.4.5/package/scripts/params_linux.py       |    1 -
 .../HDP/2.0.6/properties/stack_features.json    |    6 -
 .../3.0/hooks/after-INSTALL/scripts/params.py   |    3 -
 .../HDP/3.0/properties/stack_features.json      |    6 -
 .../ambari/server/StateRecoveryManagerTest.java |   10 +-
 .../ExecutionCommandWrapperTest.java            |    8 +-
 .../ambari/server/utils/StageUtilsTest.java     |    6 +
 .../test/python/TestComponentVersionMapping.py  |   84 ++
 .../src/test/python/TestStackFeature.py         |   88 +-
 .../src/test/python/TestUpgradeSummary.py       |  137 ++
 .../stacks/2.0.6/HIVE/test_hive_server.py       |   17 +-
 .../2.0.6/HIVE/test_hive_service_check.py       |   12 +-
 .../test/python/stacks/2.0.6/configs/nn_eu.json |   21 +-
 .../stacks/2.0.6/configs/nn_eu_standby.json     |   21 +-
 .../python/stacks/2.0.6/configs/secured.json    | 1210 +++++++++---------
 .../secured_yarn_include_file_dont_manage.json  |    4 +-
 .../secured_yarn_include_file_manage.json       |    4 +-
 .../hooks/after-INSTALL/test_after_install.py   |    2 -
 .../stacks/2.1/FALCON/test_falcon_client.py     |    5 +-
 .../stacks/2.1/FALCON/test_falcon_server.py     |   15 +-
 .../stacks/2.1/FALCON/test_service_check.py     |    5 +-
 .../stacks/2.1/HIVE/test_hive_metastore.py      |   48 +-
 .../2.1/configs/hive-metastore-upgrade.json     |   21 +-
 .../python/stacks/2.2/KNOX/test_knox_gateway.py |   36 +-
 .../python/stacks/2.2/configs/knox_upgrade.json |   21 +-
 .../stacks/2.3/configs/storm_default.json       |    6 +-
 .../2.3/configs/storm_default_secure.json       |    6 +-
 .../2.5/configs/ranger-admin-default.json       |    1 -
 .../2.5/configs/ranger-admin-secured.json       |    1 -
 .../stacks/2.5/configs/ranger-kms-default.json  |    1 -
 .../stacks/2.5/configs/ranger-kms-secured.json  |    1 -
 .../2.6/configs/ranger-admin-default.json       |    1 -
 .../2.6/configs/ranger-admin-secured.json       |    1 -
 .../src/test/python/stacks/utils/RMFTestCase.py |   66 +-
 .../2.0/hooks/after-INSTALL/scripts/params.py   |    3 -
 .../HDF/2.0/properties/stack_features.json      |    6 -
 .../2.0/hooks/after-INSTALL/scripts/params.py   |    3 -
 .../HIVE/package/scripts/hive_metastore.py      |    7 +-
 .../HIVE/package/scripts/hive_server_upgrade.py |   21 +-
 .../HIVE/package/scripts/hive_service.py        |    2 +-
 .../HIVE/package/scripts/params_linux.py        |   11 +-
 73 files changed, 1578 insertions(+), 1013 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/330a61cd/ambari-common/src/main/python/resource_management/libraries/functions/component_version.py
----------------------------------------------------------------------
diff --git 
a/ambari-common/src/main/python/resource_management/libraries/functions/component_version.py
 
b/ambari-common/src/main/python/resource_management/libraries/functions/component_version.py
new file mode 100644
index 0000000..a1fd6b2
--- /dev/null
+++ 
b/ambari-common/src/main/python/resource_management/libraries/functions/component_version.py
@@ -0,0 +1,63 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+from resource_management.libraries.script.script import Script
+
+def get_component_repository_version(service_name, component_name = None):
+  """
+  Gets the version associated with the specified component from the structure 
in the command.
+  Every command should contain a mapping of service/component to the desired 
repository it's set
+  to.
+
+  :service_name: the name of the service
+  :component_name: the name of the component
+  """
+  versions = _get_component_repositories()
+  if versions is None:
+    return None
+
+  if service_name not in versions:
+    return None
+
+  component_versions = versions[service_name]
+  if len(component_versions) == 0:
+    return None
+
+  if component_name is None:
+    for component in component_versions:
+      return component_versions[component]
+
+  if not component_name in component_versions:
+    return None
+
+  return component_versions[component_name]
+
+
+def _get_component_repositories():
+  """
+  Gets an initialized dictionary from the value in componentVersionMap. This 
structure is
+  sent on every command by Ambari and should contain each service & 
component's desired repository.
+  :return:
+  """
+  config = Script.get_config()
+  if "componentVersionMap" not in config or config["componentVersionMap"] is 
"":
+    return None
+
+  return config["componentVersionMap"]

http://git-wip-us.apache.org/repos/asf/ambari/blob/330a61cd/ambari-common/src/main/python/resource_management/libraries/functions/constants.py
----------------------------------------------------------------------
diff --git 
a/ambari-common/src/main/python/resource_management/libraries/functions/constants.py
 
b/ambari-common/src/main/python/resource_management/libraries/functions/constants.py
index 6ae71ef..f46b7cf 100644
--- 
a/ambari-common/src/main/python/resource_management/libraries/functions/constants.py
+++ 
b/ambari-common/src/main/python/resource_management/libraries/functions/constants.py
@@ -65,7 +65,6 @@ class StackFeature:
   SPARK_LIVY2 = "spark_livy2"
   STORM_KERBEROS = "storm_kerberos"
   STORM_AMS = "storm_ams"
-  CREATE_KAFKA_BROKER_ID = "create_kafka_broker_id"
   KAFKA_LISTENERS = "kafka_listeners"
   KAFKA_KERBEROS = "kafka_kerberos"
   PIG_ON_TEZ = "pig_on_tez"

http://git-wip-us.apache.org/repos/asf/ambari/blob/330a61cd/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py
----------------------------------------------------------------------
diff --git 
a/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py
 
b/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py
index 82a732b..795160c 100644
--- 
a/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py
+++ 
b/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py
@@ -21,7 +21,6 @@ limitations under the License.
 __all__ = ["copy_to_hdfs", "get_sysprep_skip_copy_tarballs_hdfs"]
 
 import os
-import uuid
 import tempfile
 import re
 
@@ -30,7 +29,7 @@ from resource_management.libraries.resources.hdfs_resource 
import HdfsResource
 from resource_management.libraries.functions.default import default
 from resource_management.core import shell
 from resource_management.core.logger import Logger
-from resource_management.libraries.functions import stack_tools
+from resource_management.libraries.functions import stack_tools, 
stack_features, stack_select
 
 STACK_NAME_PATTERN = "{{ stack_name }}"
 STACK_ROOT_PATTERN = "{{ stack_root }}"
@@ -141,32 +140,23 @@ def 
get_current_version(use_upgrading_version_during_upgrade=True):
   :param use_upgrading_version_during_upgrade: True, except when the RU/EU 
hasn't started yet.
   :return: Version, or False if an error occurred.
   """
-  upgrade_direction = default("/commandParams/upgrade_direction", None)
-  is_stack_upgrade = upgrade_direction is not None
-  current_version = default("/hostLevelParams/current_version", None)
-  Logger.info("Default version is {0}".format(current_version))
-  if is_stack_upgrade:
-    if use_upgrading_version_during_upgrade:
-      # This is the version going to. In the case of a downgrade, it is the 
lower version.
-      current_version = default("/commandParams/version", None)
-      Logger.info("Because this is a Stack Upgrade, will use version 
{0}".format(current_version))
-    else:
-      Logger.info("This is a Stack Upgrade, but keep the version unchanged.")
-  else:
-    if current_version is None:
-      # During normal operation, the first installation of services won't yet 
know about the version, so must rely
-      # on <stack-selector> to get it.
-      stack_version = _get_single_version_from_stack_select()
-      if stack_version:
-        Logger.info("Will use stack version {0}".format(stack_version))
-        current_version = stack_version
+  # get the version for this command
+  version = stack_features.get_stack_feature_version(Script.get_config())
 
+  # if there is no upgrade, then use the command's version
+  if not Script.in_stack_upgrade() or use_upgrading_version_during_upgrade:
+    Logger.info("Tarball version was calcuated as {0}. Use Command Version: 
{1}".format(
+      version, use_upgrading_version_during_upgrade))
+
+    return version
+
+  # we're in an upgrade and we need to use an older version
+  current_version = stack_select.get_role_component_current_stack_version()
   if current_version is None:
-    message_suffix = "during stack %s" % str(upgrade_direction) if 
is_stack_upgrade else ""
-    Logger.warning("Cannot copy tarball because unable to determine current 
version {0}.".format(message_suffix))
+    Logger.warning("Unable to determine the current version of the component 
for this command; unable to copy the tarball")
     return False
 
-  return current_version
+  return current_version;
 
 
 def _get_single_version_from_stack_select():

http://git-wip-us.apache.org/repos/asf/ambari/blob/330a61cd/ambari-common/src/main/python/resource_management/libraries/functions/stack_features.py
----------------------------------------------------------------------
diff --git 
a/ambari-common/src/main/python/resource_management/libraries/functions/stack_features.py
 
b/ambari-common/src/main/python/resource_management/libraries/functions/stack_features.py
index b241288..92823b0 100644
--- 
a/ambari-common/src/main/python/resource_management/libraries/functions/stack_features.py
+++ 
b/ambari-common/src/main/python/resource_management/libraries/functions/stack_features.py
@@ -110,10 +110,6 @@ def get_stack_feature_version(config):
   command_version = default("/commandParams/version", None)
   command_stack = default("/commandParams/target_stack", None)
 
-  # something like 2.4.0.0-1234
-  # (or None if this is a cluster install and it hasn't been calculated yet)
-  current_cluster_version = default("/hostLevelParams/current_version", None)
-
   # UPGRADE or DOWNGRADE (or None)
   upgrade_direction = default("/commandParams/upgrade_direction", None)
 
@@ -123,8 +119,8 @@ def get_stack_feature_version(config):
   # if this is not an upgrade, then we take the simple path
   if upgrade_direction is None:
     Logger.info(
-      "Stack Feature Version Info: Cluster Stack={0}, Cluster Current 
Version={1}, Command Stack={2}, Command Version={3} -> {4}".format(
-        stack_version, current_cluster_version, command_stack, 
command_version, version_for_stack_feature_checks))
+      "Stack Feature Version Info: Cluster Stack={0}, Command Stack={1}, 
Command Version={2} -> {3}".format(
+        stack_version, command_stack, command_version, 
version_for_stack_feature_checks))
 
     return version_for_stack_feature_checks
 
@@ -133,33 +129,24 @@ def get_stack_feature_version(config):
   is_stop_command = _is_stop_command(config)
   if not is_stop_command:
     Logger.info(
-      "Stack Feature Version Info: Cluster Stack={0}, Cluster Current 
Version={1}, Command Stack={2}, Command Version={3}, Upgrade Direction={4} -> 
{5}".format(
-        stack_version, current_cluster_version, command_stack, 
command_version, upgrade_direction,
+      "Stack Feature Version Info: Cluster Stack={0}, Command Stack={1}, 
Command Version={2}, Upgrade Direction={3} -> {4}".format(
+        stack_version, command_stack, command_version, upgrade_direction,
         version_for_stack_feature_checks))
 
     return version_for_stack_feature_checks
 
-  # something like 2.5.0.0-5678 (or None)
-  downgrade_from_version = default("/commandParams/downgrade_from_version", 
None)
-
+  is_downgrade = upgrade_direction.lower() == Direction.DOWNGRADE.lower()
   # guaranteed to have a STOP command now during an UPGRADE/DOWNGRADE, check 
direction
-  if upgrade_direction.lower() == Direction.DOWNGRADE.lower():
-    if downgrade_from_version is None:
-      Logger.warning(
-        "Unable to determine the version being downgraded when stopping 
services, using {0}".format(
-          version_for_stack_feature_checks))
-    else:
-      version_for_stack_feature_checks = downgrade_from_version
+  if is_downgrade:
+    from resource_management.libraries.functions import upgrade_summary
+    version_for_stack_feature_checks = 
upgrade_summary.get_source_version(default_version = 
version_for_stack_feature_checks)
   else:
     # UPGRADE
-    if current_cluster_version is not None:
-      version_for_stack_feature_checks = current_cluster_version
-    else:
       version_for_stack_feature_checks = command_version if command_version is 
not None else stack_version
 
   Logger.info(
-    "Stack Feature Version Info: Cluster Stack={0}, Cluster Current 
Version={1}, Command Stack={2}, Command Version={3}, Upgrade Direction={4}, 
stop_command={5} -> {6}".format(
-      stack_version, current_cluster_version, command_stack, command_version, 
upgrade_direction,
+    "Stack Feature Version Info: Cluster Stack={0}, Command Stack={1}, Command 
Version={2}, Upgrade Direction={3}, stop_command={4} -> {5}".format(
+      stack_version, command_stack, command_version, upgrade_direction,
       is_stop_command, version_for_stack_feature_checks))
 
   return version_for_stack_feature_checks

http://git-wip-us.apache.org/repos/asf/ambari/blob/330a61cd/ambari-common/src/main/python/resource_management/libraries/functions/upgrade_summary.py
----------------------------------------------------------------------
diff --git 
a/ambari-common/src/main/python/resource_management/libraries/functions/upgrade_summary.py
 
b/ambari-common/src/main/python/resource_management/libraries/functions/upgrade_summary.py
new file mode 100644
index 0000000..849ac8a
--- /dev/null
+++ 
b/ambari-common/src/main/python/resource_management/libraries/functions/upgrade_summary.py
@@ -0,0 +1,130 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+from collections import namedtuple
+from resource_management.libraries.script.script import Script
+from resource_management.libraries.functions.constants import Direction
+
+UpgradeSummary = namedtuple("UpgradeSummary", "type direction orchestration 
is_revert services")
+UpgradeServiceSummary = namedtuple("UpgradeServiceSummary", "service_name 
source_stack source_version target_stack target_version")
+
+def get_source_version(service_name = None, default_version=None):
+  """
+  Gets the source (from) version of a service participating in an upgrade. If 
there is no
+  upgrade or the specific service is not participating, this will return None.
+  :param service_name:  the service name to check for, or None to extract it 
from the command
+  :param default_version: if the version of the service can't be calculated, 
this optional
+  default value is returned
+  :return:  the version that the service is upgrading from or None if there is 
no upgrade or
+  the service is not included in the upgrade.
+  """
+  service_summary = _get_service_summary(service_name)
+  if service_summary is None:
+    return default_version
+
+  return service_summary.source_version
+
+
+def get_target_version(service_name = None, default_version=None):
+  """
+  Gets the target (to) version of a service participating in an upgrade. If 
there is no
+  upgrade or the specific service is not participating, this will return None.
+  :param service_name:  the service name to check for, or None to extract it 
from the command
+  :param default_version: if the version of the service can't be calculated, 
this optional
+  default value is returned
+  :return:  the version that the service is upgrading to or None if there is 
no upgrade or
+  the service is not included in the upgrade.
+  """
+  service_summary = _get_service_summary(service_name)
+  if service_summary is None:
+    return default_version
+
+  return service_summary.target_version
+
+
+
+def get_upgrade_summary():
+  """
+  Gets a summary of an upgrade in progress, including type, direction, 
orchestration and from/to
+  repository versions.
+  """
+  config = Script.get_config()
+  if "upgradeSummary" not in config or not config["upgradeSummary"]:
+    return None
+
+  upgrade_summary = config["upgradeSummary"]
+  service_summary_dict = {}
+
+  service_summary = upgrade_summary["services"]
+  for service_name, service_summary_json in service_summary.iteritems():
+    service_summary =  UpgradeServiceSummary(service_name = service_name,
+      source_stack = service_summary_json["sourceStackId"],
+      source_version = service_summary_json["sourceVersion"],
+      target_stack = service_summary_json["targetStackId"],
+      target_version = service_summary_json["targetVersion"])
+
+    service_summary_dict[service_name] = service_summary
+
+  return UpgradeSummary(type=upgrade_summary["type"], 
direction=upgrade_summary["direction"],
+    orchestration=upgrade_summary["orchestration"], is_revert = 
upgrade_summary["isRevert"],
+    services = service_summary_dict)
+
+
+def get_downgrade_from_version(service_name = None):
+  """
+  Gets the downgrade-from-version for the specificed service. If there is no 
downgrade or
+  the service isn't participating in the downgrade, then this will return None
+  :param service_name:  the service, or optionally onmitted to infer it from 
the command.
+  :return: the downgrade-from-version or None
+  """
+  upgrade_summary = get_upgrade_summary()
+  if upgrade_summary is None:
+    return None
+
+  if Direction.DOWNGRADE.lower() != upgrade_summary.direction.lower():
+    return None
+
+  service_summary = _get_service_summary(service_name)
+  if service_summary is None:
+    return None
+
+  return service_summary.source_version
+
+
+def _get_service_summary(service_name):
+  """
+  Gets the service summary for the upgrade/downgrade for the given service, or 
None if
+  the service isn't participating.
+  :param service_name:  the service name
+  :return:  the service summary or None
+  """
+  upgrade_summary = get_upgrade_summary()
+  if upgrade_summary is None:
+    return None
+
+  if service_name is None:
+    config = Script.get_config()
+    service_name = config['serviceName']
+
+  service_summary = upgrade_summary.services
+  if service_name not in service_summary:
+    return None
+
+  return service_summary[service_name]

http://git-wip-us.apache.org/repos/asf/ambari/blob/330a61cd/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ExecutionCommandWrapper.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ExecutionCommandWrapper.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ExecutionCommandWrapper.java
index 6785df9..5f9300a 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ExecutionCommandWrapper.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ExecutionCommandWrapper.java
@@ -26,6 +26,7 @@ import java.util.TreeMap;
 
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.ClusterNotFoundException;
+import org.apache.ambari.server.RoleCommand;
 import org.apache.ambari.server.ServiceNotFoundException;
 import org.apache.ambari.server.agent.AgentCommand.AgentCommandType;
 import org.apache.ambari.server.agent.ExecutionCommand;
@@ -33,6 +34,7 @@ import 
org.apache.ambari.server.agent.ExecutionCommand.KeyNames;
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.orm.dao.HostRoleCommandDAO;
 import org.apache.ambari.server.orm.entities.RepositoryVersionEntity;
+import org.apache.ambari.server.orm.entities.UpgradeEntity;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.ConfigHelper;
@@ -42,6 +44,9 @@ import org.apache.ambari.server.state.ServiceComponent;
 import org.apache.ambari.server.state.ServiceInfo;
 import org.apache.ambari.server.state.StackId;
 import org.apache.ambari.server.state.StackInfo;
+import org.apache.ambari.server.state.UpgradeContext;
+import org.apache.ambari.server.state.UpgradeContext.UpgradeSummary;
+import org.apache.ambari.server.state.UpgradeContextFactory;
 import org.apache.commons.collections.MapUtils;
 import org.apache.commons.lang.StringUtils;
 import org.slf4j.Logger;
@@ -70,6 +75,9 @@ public class ExecutionCommandWrapper {
   @Inject
   private Gson gson;
 
+  @Inject
+  private UpgradeContextFactory upgradeContextFactory;
+
   /**
    * Used for injecting hooks and common-services into the command.
    */
@@ -223,8 +231,12 @@ public class ExecutionCommandWrapper {
         Map<String, String> commandParams = 
executionCommand.getCommandParams();
 
         if (null != repositoryVersion) {
-          commandParams.put(KeyNames.VERSION, repositoryVersion.getVersion());
-          executionCommand.getHostLevelParams().put(KeyNames.CURRENT_VERSION, 
repositoryVersion.getVersion());
+          // only set the version if it's not set and this is NOT an install
+          // command
+          if (!commandParams.containsKey(KeyNames.VERSION)
+              && executionCommand.getRoleCommand() != RoleCommand.INSTALL) {
+            commandParams.put(KeyNames.VERSION, 
repositoryVersion.getVersion());
+          }
 
           StackId stackId = repositoryVersion.getStackId();
           StackInfo stackInfo = ambariMetaInfo.getStack(stackId.getStackName(),
@@ -255,6 +267,15 @@ public class ExecutionCommandWrapper {
       // we are "loading-late": components that have not yet upgraded in an EU 
will have the correct versions.
       executionCommand.setComponentVersions(cluster);
 
+      // provide some basic information about a cluster upgrade if there is one
+      // in progress
+      UpgradeEntity upgrade = cluster.getUpgradeInProgress();
+      if (null != upgrade) {
+        UpgradeContext upgradeContext = upgradeContextFactory.create(cluster, 
upgrade);
+        UpgradeSummary upgradeSummary = upgradeContext.getUpgradeSummary();
+        executionCommand.setUpgradeSummary(upgradeSummary);
+      }
+
     } catch (ClusterNotFoundException cnfe) {
       // it's possible that there are commands without clusters; in such cases,
       // just return the de-serialized command and don't try to read configs

http://git-wip-us.apache.org/repos/asf/ambari/blob/330a61cd/ambari-server/src/main/java/org/apache/ambari/server/agent/ExecutionCommand.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/agent/ExecutionCommand.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/agent/ExecutionCommand.java
index 29d28da..fd27169 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/agent/ExecutionCommand.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/agent/ExecutionCommand.java
@@ -31,6 +31,7 @@ import org.apache.ambari.server.RoleCommand;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Service;
 import org.apache.ambari.server.state.ServiceComponent;
+import org.apache.ambari.server.state.UpgradeContext.UpgradeSummary;
 import org.apache.ambari.server.utils.StageUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -156,6 +157,9 @@ public class ExecutionCommand extends AgentCommand {
   @SerializedName("componentVersionMap")
   private Map<String, Map<String, String>> componentVersionMap = new 
HashMap<>();
 
+  @SerializedName("upgradeSummary")
+  private UpgradeSummary upgradeSummary;
+
   public void setConfigurationCredentials(Map<String, Map<String, String>> 
configurationCredentials) {
     this.configurationCredentials = configurationCredentials;
   }
@@ -500,23 +504,13 @@ public class ExecutionCommand extends AgentCommand {
     /**
      * The version of the component to send down with the command. Normally,
      * this is simply the repository version of the component. However, during
-     * ugprades, this value may change depending on the progress of the upgrade
+     * upgrades, this value may change depending on the progress of the upgrade
      * and the type/direction.
      */
     @Experimental(
         feature = ExperimentalFeature.PATCH_UPGRADES,
         comment = "Change this to reflect the component version")
     String VERSION = "version";
-
-    /**
-     * Put on hostLevelParams to indicate the version that the component should
-     * be.
-     */
-    @Deprecated
-    @Experimental(
-        feature = ExperimentalFeature.PATCH_UPGRADES,
-        comment = "This should be replaced by a map of all service component 
versions")
-    String CURRENT_VERSION = "current_version";
   }
 
   /**
@@ -552,4 +546,23 @@ public class ExecutionCommand extends AgentCommand {
 
     this.componentVersionMap = componentVersionMap;
   }
+
+  /**
+   * Sets the upgrade summary if there is an active upgrade in the cluster.
+   *
+   * @param upgradeSummary
+   *          the upgrade or {@code null} for none.
+   */
+  public void setUpgradeSummary(UpgradeSummary upgradeSummary) {
+    this.upgradeSummary = upgradeSummary;
+  }
+
+  /**
+   * Gets the upgrade summary if there is an active upgrade in the cluster.
+   *
+   * @return the upgrade or {@code null} for none.
+   */
+  public UpgradeSummary getUpgradeSummary() {
+    return upgradeSummary;
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/330a61cd/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
index cd2f9d9..d3b6d7c 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
@@ -2555,7 +2555,6 @@ public class AmbariManagementControllerImpl implements 
AmbariManagementControlle
     execCmd.setCommandParams(commandParams);
 
     
execCmd.setRepositoryFile(customCommandExecutionHelper.getCommandRepository(cluster,
 component, host));
-    hostParams.put(KeyNames.CURRENT_VERSION, repoVersion.getVersion());
 
     if ((execCmd != null) && 
(execCmd.getConfigurationTags().containsKey("cluster-env"))) {
       LOG.debug("AmbariManagementControllerImpl.createHostAction: created 
ExecutionCommand for host {}, role {}, roleCommand {}, and command ID {}, with 
cluster-env tags {}",

http://git-wip-us.apache.org/repos/asf/ambari/blob/330a61cd/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeContext.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeContext.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeContext.java
index a4ed080..4c28be3 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeContext.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeContext.java
@@ -84,6 +84,7 @@ import org.slf4j.LoggerFactory;
 import com.google.common.base.Objects;
 import com.google.gson.Gson;
 import com.google.gson.JsonElement;
+import com.google.gson.annotations.SerializedName;
 import com.google.inject.Inject;
 import com.google.inject.assistedinject.Assisted;
 import com.google.inject.assistedinject.AssistedInject;
@@ -106,12 +107,6 @@ public class UpgradeContext {
   public static final String COMMAND_PARAM_TASKS = "tasks";
   public static final String COMMAND_PARAM_STRUCT_OUT = "structured_out";
 
-  @Deprecated
-  @Experimental(
-      feature = ExperimentalFeature.PATCH_UPGRADES,
-      comment = "This isn't needed anymore, but many python classes still use 
it")
-  public static final String COMMAND_PARAM_DOWNGRADE_FROM_VERSION = 
"downgrade_from_version";
-
   /*
    * The cluster that the upgrade is for.
    */
@@ -185,8 +180,9 @@ public class UpgradeContext {
   private boolean m_autoSkipManualVerification = false;
 
   /**
-   * A set of services which are included in this upgrade. If this is empty,
-   * then all cluster services are included.
+   * A set of services which are included in this upgrade. This will never be
+   * empty - if all services of a cluster are included, then the cluster's
+   * current list of services is populated.
    */
   private final Set<String> m_services = new HashSet<>();
 
@@ -797,7 +793,6 @@ public class UpgradeContext {
    * <ul>
    * <li>{@link #COMMAND_PARAM_CLUSTER_NAME}
    * <li>{@link #COMMAND_PARAM_DIRECTION}
-   * <li>{@link #COMMAND_PARAM_DOWNGRADE_FROM_VERSION}
    * <li>{@link #COMMAND_PARAM_UPGRADE_TYPE}
    * <li>{@link KeyNames#REFRESH_CONFIG_TAGS_BEFORE_EXECUTION} - necessary in
    * order to have the commands contain the correct configurations. Otherwise,
@@ -816,10 +811,6 @@ public class UpgradeContext {
     parameters.put(COMMAND_PARAM_CLUSTER_NAME, m_cluster.getClusterName());
     parameters.put(COMMAND_PARAM_DIRECTION, direction.name().toLowerCase());
 
-    if (direction == Direction.DOWNGRADE) {
-      parameters.put(COMMAND_PARAM_DOWNGRADE_FROM_VERSION, 
m_repositoryVersion.getVersion());
-    }
-
     if (null != getType()) {
       // use the serialized attributes of the enum to convert it to a string,
       // but first we must convert it into an element so that we don't get a
@@ -868,6 +859,44 @@ public class UpgradeContext {
   }
 
   /**
+   * Gets a POJO of the upgrade suitable to serialize.
+   *
+   * @return the upgrade summary as a POJO.
+   */
+  public UpgradeSummary getUpgradeSummary() {
+    UpgradeSummary summary = new UpgradeSummary();
+    summary.direction = m_direction;
+    summary.type = m_type;
+    summary.orchestration = m_orchestration;
+    summary.isRevert = m_isRevert;
+
+    summary.services = new HashMap<>();
+
+    for (String serviceName : m_services) {
+      RepositoryVersionEntity sourceRepositoryVersion = 
m_sourceRepositoryMap.get(serviceName);
+      RepositoryVersionEntity targetRepositoryVersion = 
m_targetRepositoryMap.get(serviceName);
+      if (null == sourceRepositoryVersion || null == targetRepositoryVersion) {
+        LOG.warn("Unable to get the source/target repositories for {} for the 
upgrade summary",
+            serviceName);
+        continue;
+      }
+
+      UpgradeServiceSummary serviceSummary = new UpgradeServiceSummary();
+      serviceSummary.sourceRepositoryId = sourceRepositoryVersion.getId();
+      serviceSummary.sourceStackId = 
sourceRepositoryVersion.getStackId().getStackId();
+      serviceSummary.sourceVersion = sourceRepositoryVersion.getVersion();
+
+      serviceSummary.targetRepositoryId = targetRepositoryVersion.getId();
+      serviceSummary.targetStackId = 
targetRepositoryVersion.getStackId().getStackId();
+      serviceSummary.targetVersion = targetRepositoryVersion.getVersion();
+
+      summary.services.put(serviceName, serviceSummary);
+    }
+
+    return summary;
+  }
+
+  /**
    * Gets the set of services which will participate in the upgrade. The
    * services available in the repository are comapred against those installed
    * in the cluster to arrive at the final subset.
@@ -1254,4 +1283,49 @@ public class UpgradeContext {
       return hostOrderItems;
     }
   }
+
+  /**
+   * The {@link UpgradeSummary} class is a simple POJO used to serialize the
+   * infomration about and upgrade.
+   */
+  public static class UpgradeSummary {
+    @SerializedName("direction")
+    public Direction direction;
+
+    @SerializedName("type")
+    public UpgradeType type;
+
+    @SerializedName("orchestration")
+    public RepositoryType orchestration;
+
+    @SerializedName("isRevert")
+    public boolean isRevert = false;
+
+    @SerializedName("services")
+    public Map<String, UpgradeServiceSummary> services;
+  }
+
+  /**
+   * The {@link UpgradeServiceSummary} class is used as a way to encapsulate 
the
+   * service source and target versions during an upgrade.
+   */
+  public static class UpgradeServiceSummary {
+    @SerializedName("sourceRepositoryId")
+    public long sourceRepositoryId;
+
+    @SerializedName("targetRepositoryId")
+    public long targetRepositoryId;
+
+    @SerializedName("sourceStackId")
+    public String sourceStackId;
+
+    @SerializedName("targetStackId")
+    public String targetStackId;
+
+    @SerializedName("sourceVersion")
+    public String sourceVersion;
+
+    @SerializedName("targetVersion")
+    public String targetVersion;
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/330a61cd/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon.py
 
b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon.py
index c9c66ac..933515b 100644
--- 
a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon.py
+++ 
b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon.py
@@ -125,7 +125,7 @@ def falcon(type, action = None, upgrade_type=None):
     # Generate atlas-application.properties.xml file
     if params.falcon_atlas_support and params.enable_atlas_hook:
       # If Atlas is added later than Falcon, this package will be absent.
-      if 
check_stack_feature(StackFeature.ATLAS_INSTALL_HOOK_PACKAGE_SUPPORT,params.current_version_formatted):
+      if 
check_stack_feature(StackFeature.ATLAS_INSTALL_HOOK_PACKAGE_SUPPORT,params.version):
         install_atlas_hook_packages(params.atlas_plugin_package, 
params.atlas_ubuntu_plugin_package, params.host_sys_prepped,
                                     
params.agent_stack_retry_on_unavailability, params.agent_stack_retry_count)
 
@@ -134,7 +134,7 @@ def falcon(type, action = None, upgrade_type=None):
 
       # Falcon 0.10 uses FALCON_EXTRA_CLASS_PATH.
       # Setup symlinks for older versions.
-      if params.current_version_formatted and 
check_stack_feature(StackFeature.FALCON_ATLAS_SUPPORT_2_3, 
params.current_version_formatted):
+      if check_stack_feature(StackFeature.FALCON_ATLAS_SUPPORT_2_3, 
params.version):
         setup_atlas_jar_symlinks("falcon", params.falcon_webinf_lib)
 
   if type == 'server':

http://git-wip-us.apache.org/repos/asf/ambari/blob/330a61cd/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params_linux.py
 
b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params_linux.py
index 3773918..b8e709a 100644
--- 
a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params_linux.py
+++ 
b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params_linux.py
@@ -27,10 +27,9 @@ from resource_management.libraries.functions import 
get_kinit_path
 from resource_management.libraries.script.script import Script
 import os
 from resource_management.libraries.functions.expect import expect
+from resource_management.libraries.functions import stack_features
 from resource_management.libraries.functions.stack_features import 
check_stack_feature
-from resource_management.libraries.functions.version import 
format_stack_version
 from resource_management.libraries.functions import StackFeature
-from resource_management.libraries.functions.setup_atlas_hook import 
has_atlas_in_cluster
 
 config = Script.get_config()
 stack_root = status_params.stack_root
@@ -39,17 +38,13 @@ stack_name = status_params.stack_name
 agent_stack_retry_on_unavailability = 
config['hostLevelParams']['agent_stack_retry_on_unavailability']
 agent_stack_retry_count = expect("/hostLevelParams/agent_stack_retry_count", 
int)
 
-# New Cluster Stack Version that is defined during the RESTART of a Rolling 
Upgrade
-version = default("/commandParams/version", None)
+version = stack_features.get_stack_feature_version(config)
 
 stack_version_unformatted = status_params.stack_version_unformatted
 stack_version_formatted = status_params.stack_version_formatted
 upgrade_direction = default("/commandParams/upgrade_direction", None)
 jdk_location = config['hostLevelParams']['jdk_location']
 
-# current host stack version
-current_version = default("/hostLevelParams/current_version", None)
-current_version_formatted = format_stack_version(current_version)
 
 etc_prefix_dir = "/etc/falcon"
 
@@ -57,7 +52,7 @@ etc_prefix_dir = "/etc/falcon"
 hadoop_home_dir = stack_select.get_hadoop_dir("home")
 hadoop_bin_dir = stack_select.get_hadoop_dir("bin")
 
-if stack_version_formatted and 
check_stack_feature(StackFeature.ROLLING_UPGRADE, stack_version_formatted):
+if check_stack_feature(StackFeature.ROLLING_UPGRADE, version):
   # if this is a server action, then use the server binaries; smoke tests
   # use the client binaries
   server_role_dir_mapping = { 'FALCON_SERVER' : 'falcon-server',
@@ -135,7 +130,7 @@ kinit_path_local = 
get_kinit_path(default('/configurations/kerberos-env/executab
 supports_hive_dr = config['configurations']['falcon-env']['supports_hive_dr']
 # HDP 2.4 still supported the /usr/$STACK/$VERSION/falcon/data-mirroring 
folder, which had to be copied to HDFS
 # In HDP 2.5, an empty data-mirroring folder has to be created, and the 
extensions folder has to be uploaded to HDFS.
-supports_data_mirroring = supports_hive_dr and (stack_version_formatted and 
not check_stack_feature(StackFeature.FALCON_EXTENSIONS, 
stack_version_formatted))
+supports_data_mirroring = supports_hive_dr and not 
check_stack_feature(StackFeature.FALCON_EXTENSIONS, version)
 
 local_data_mirroring_dir = 
format('{stack_root}/current/falcon-server/data-mirroring')
 dfs_data_mirroring_dir = "/apps/data-mirroring"
@@ -156,22 +151,22 @@ falcon_atlas_support = False
 atlas_hook_cp = ""
 if enable_atlas_hook:
 
-  # stack_version doesn't contain a minor number of the stack (only first two 
numbers: 2.3). Get it from current_version_formatted
-  falcon_atlas_support = current_version_formatted and 
check_stack_feature(StackFeature.FALCON_ATLAS_SUPPORT_2_3, 
current_version_formatted) \
-      or check_stack_feature(StackFeature.FALCON_ATLAS_SUPPORT, 
stack_version_formatted)
+  # stack_version doesn't contain a minor number of the stack (only first two 
numbers: 2.3). Get it from the command version
+  falcon_atlas_support = 
check_stack_feature(StackFeature.FALCON_ATLAS_SUPPORT_2_3, version) \
+      or check_stack_feature(StackFeature.FALCON_ATLAS_SUPPORT, version)
 
-  if check_stack_feature(StackFeature.ATLAS_CONF_DIR_IN_PATH, 
stack_version_formatted):
+  if check_stack_feature(StackFeature.ATLAS_CONF_DIR_IN_PATH, version):
     atlas_conf_dir = format('{stack_root}/current/atlas-server/conf')
     atlas_home_dir = format('{stack_root}/current/atlas-server')
     atlas_hook_cp = atlas_conf_dir + os.pathsep + os.path.join(atlas_home_dir, 
"hook", "falcon", "*") + os.pathsep
-  elif check_stack_feature(StackFeature.ATLAS_UPGRADE_SUPPORT, 
stack_version_formatted):
+  elif check_stack_feature(StackFeature.ATLAS_UPGRADE_SUPPORT, version):
     atlas_hook_cp = format('{stack_root}/current/atlas-client/hook/falcon/*') 
+ os.pathsep
 
 atlas_application_class_addition = ""
 if falcon_atlas_support:
   # Some stack versions do not support Atlas Falcon hook. See 
stack_features.json
   # Packaging was different in older versions.
-  if current_version_formatted and 
check_stack_feature(StackFeature.FALCON_ATLAS_SUPPORT_2_3, 
current_version_formatted):
+  if check_stack_feature(StackFeature.FALCON_ATLAS_SUPPORT_2_3, version):
     atlas_application_class_addition = 
",\\\norg.apache.falcon.atlas.service.AtlasService"
     atlas_plugin_package = "atlas-metadata*-falcon-plugin"
     atlas_ubuntu_plugin_package = "atlas-metadata.*-falcon-plugin"

http://git-wip-us.apache.org/repos/asf/ambari/blob/330a61cd/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore.py
 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore.py
index 9b5cf43..8435aa2 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore.py
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore.py
@@ -27,6 +27,7 @@ from resource_management.libraries.functions.constants import 
Direction
 from resource_management.libraries.functions.format import format
 from resource_management.libraries.functions.version import 
format_stack_version
 from resource_management.libraries.functions import StackFeature
+from resource_management.libraries.functions import upgrade_summary
 from resource_management.libraries.functions.stack_features import 
check_stack_feature
 from resource_management.libraries.functions.security_commons import 
build_expectations
 from resource_management.libraries.functions.security_commons import 
cached_kinit_executor
@@ -179,10 +180,12 @@ class HiveMetastoreDefault(HiveMetastore):
     # since the configurations have not been written out yet during an upgrade
     # we need to choose the original legacy location
     schematool_hive_server_conf_dir = params.hive_server_conf_dir
-    if params.current_version is not None:
-      current_version = format_stack_version(params.current_version)
-      if not(check_stack_feature(StackFeature.CONFIG_VERSIONING, 
current_version)):
-        schematool_hive_server_conf_dir = LEGACY_HIVE_SERVER_CONF
+
+    upgrade_from_version = upgrade_summary.get_source_version("HIVE",
+      default_version = params.version_for_stack_feature_checks)
+
+    if not (check_stack_feature(StackFeature.CONFIG_VERSIONING, 
upgrade_from_version)):
+      schematool_hive_server_conf_dir = LEGACY_HIVE_SERVER_CONF
 
     env_dict = {
       'HIVE_CONF_DIR': schematool_hive_server_conf_dir

http://git-wip-us.apache.org/repos/asf/ambari/blob/330a61cd/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_upgrade.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_upgrade.py
 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_upgrade.py
index 17db489..12c9e1c 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_upgrade.py
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_upgrade.py
@@ -68,12 +68,8 @@ def deregister():
   # If upgrading, the upgrade-target hive binary should be used to call the 
--deregister command.
   # If downgrading, the downgrade-source hive binary should be used to call 
the --deregister command.
   # By now <stack-selector-tool> has been called to set 'current' to 
target-stack
-  if "downgrade" == params.upgrade_direction:
-    # hive_bin
-    downgrade_version = params.current_version
-    if params.downgrade_from_version:
-      downgrade_version = params.downgrade_from_version
-    hive_execute_path = _get_hive_execute_path(downgrade_version)
+  if params.downgrade_from_version is not None:
+    hive_execute_path = _get_hive_execute_path(params.downgrade_from_version)
 
   command = format('hive --config {hive_server_conf_dir} --service hiveserver2 
--deregister ' + current_hiveserver_version)
   Execute(command, user=params.hive_user, path=hive_execute_path, tries=1 )
@@ -114,12 +110,10 @@ def _get_current_hiveserver_version():
 
   try:
     # When downgrading the source version should be the version we are 
downgrading from
-    if "downgrade" == params.upgrade_direction:
-      if not params.downgrade_from_version:
-        raise Fail('The version from which we are downgrading from should be 
provided in \'downgrade_from_version\'')
+    source_version = params.version_for_stack_feature_checks
+    if params.downgrade_from_version is not None:
       source_version = params.downgrade_from_version
-    else:
-      source_version = params.current_version
+
     hive_execute_path = _get_hive_execute_path(source_version)
     version_hive_bin = params.hive_bin
     formatted_source_version = format_stack_version(source_version)

http://git-wip-us.apache.org/repos/asf/ambari/blob/330a61cd/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_service.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_service.py
 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_service.py
index a7d3915..1f2b644 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_service.py
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_service.py
@@ -64,7 +64,7 @@ def hive_service(name, action='start', upgrade_type=None):
     cmd = format("{start_hiveserver2_path} {hive_log_dir}/hive-server2.out 
{hive_log_dir}/hive-server2.err {pid_file} {hive_server_conf_dir} 
{hive_log_dir}")
 
 
-    if params.security_enabled and params.current_version and 
check_stack_feature(StackFeature.HIVE_SERVER2_KERBERIZED_ENV, 
params.current_version):
+    if params.security_enabled and 
check_stack_feature(StackFeature.HIVE_SERVER2_KERBERIZED_ENV, 
params.version_for_stack_feature_checks):
       hive_kinit_cmd = format("{kinit_path_local} -kt {hive_server2_keytab} 
{hive_principal}; ")
       Execute(hive_kinit_cmd, user=params.hive_user)
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/330a61cd/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
index c69340e..8b3912c 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
@@ -39,6 +39,7 @@ from resource_management.libraries.functions import 
StackFeature
 from resource_management.libraries.functions.stack_features import 
check_stack_feature
 from resource_management.libraries.functions.stack_features import 
get_stack_feature_version
 from resource_management.libraries.functions.get_port_from_url import 
get_port_from_url
+from resource_management.libraries.functions import upgrade_summary
 from resource_management.libraries.functions.expect import expect
 from resource_management.libraries import functions
 from resource_management.libraries.functions.setup_atlas_hook import 
has_atlas_in_cluster
@@ -88,12 +89,9 @@ major_stack_version = 
get_major_version(stack_version_formatted_major)
 # It cannot be used during the initial Cluser Install because the version is 
not yet known.
 version = default("/commandParams/version", None)
 
-# current host stack version
-current_version = default("/hostLevelParams/current_version", None)
-
-# When downgrading the 'version' and 'current_version' are both pointing to 
the downgrade-target version
+# When downgrading the 'version' is pointing to the downgrade-target version
 # downgrade_from_version provides the source-version the downgrade is 
happening from
-downgrade_from_version = default("/commandParams/downgrade_from_version", None)
+downgrade_from_version = upgrade_summary.get_downgrade_from_version("HIVE")
 
 # get the correct version to use for checking stack features
 version_for_stack_feature_checks = get_stack_feature_version(config)
@@ -317,7 +315,8 @@ driver_curl_source = 
format("{jdk_location}/{jdbc_jar_name}")
 # normally, the JDBC driver would be referenced by 
<stack-root>/current/.../foo.jar
 # but in RU if <stack-selector-tool> is called and the restart fails, then 
this means that current pointer
 # is now pointing to the upgraded version location; that's bad for the cp 
command
-source_jdbc_file = 
format("{stack_root}/{current_version}/hive/lib/{jdbc_jar_name}")
+version_for_source_jdbc_file = 
upgrade_summary.get_source_version(default_version = 
version_for_stack_feature_checks)
+source_jdbc_file = 
format("{stack_root}/{version_for_source_jdbc_file}/hive/lib/{jdbc_jar_name}")
 
 check_db_connection_jar_name = "DBConnectionVerification.jar"
 check_db_connection_jar = 
format("/usr/lib/ambari-agent/{check_db_connection_jar_name}")

http://git-wip-us.apache.org/repos/asf/ambari/blob/330a61cd/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/hive_metastore.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/hive_metastore.py
 
b/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/hive_metastore.py
index 9b5cf43..3404044 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/hive_metastore.py
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/hive_metastore.py
@@ -27,6 +27,7 @@ from resource_management.libraries.functions.constants import 
Direction
 from resource_management.libraries.functions.format import format
 from resource_management.libraries.functions.version import 
format_stack_version
 from resource_management.libraries.functions import StackFeature
+from resource_management.libraries.functions import upgrade_summary
 from resource_management.libraries.functions.stack_features import 
check_stack_feature
 from resource_management.libraries.functions.security_commons import 
build_expectations
 from resource_management.libraries.functions.security_commons import 
cached_kinit_executor
@@ -179,10 +180,12 @@ class HiveMetastoreDefault(HiveMetastore):
     # since the configurations have not been written out yet during an upgrade
     # we need to choose the original legacy location
     schematool_hive_server_conf_dir = params.hive_server_conf_dir
-    if params.current_version is not None:
-      current_version = format_stack_version(params.current_version)
-      if not(check_stack_feature(StackFeature.CONFIG_VERSIONING, 
current_version)):
-        schematool_hive_server_conf_dir = LEGACY_HIVE_SERVER_CONF
+
+    upgrade_from_version = upgrade_summary.get_source_version("HIVE",
+      default_version = params.version_for_stack_feature_checks)
+
+    if not(check_stack_feature(StackFeature.CONFIG_VERSIONING, 
upgrade_from_version)):
+      schematool_hive_server_conf_dir = LEGACY_HIVE_SERVER_CONF
 
     env_dict = {
       'HIVE_CONF_DIR': schematool_hive_server_conf_dir

http://git-wip-us.apache.org/repos/asf/ambari/blob/330a61cd/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/hive_server_upgrade.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/hive_server_upgrade.py
 
b/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/hive_server_upgrade.py
index 17db489..85ae8b0 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/hive_server_upgrade.py
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/hive_server_upgrade.py
@@ -26,6 +26,7 @@ from resource_management.core import shell
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions import StackFeature
+from resource_management.libraries.functions import upgrade_summary
 from resource_management.libraries.functions.stack_features import 
check_stack_feature
 from resource_management.libraries.functions.version import 
format_stack_version
 
@@ -68,12 +69,8 @@ def deregister():
   # If upgrading, the upgrade-target hive binary should be used to call the 
--deregister command.
   # If downgrading, the downgrade-source hive binary should be used to call 
the --deregister command.
   # By now <stack-selector-tool> has been called to set 'current' to 
target-stack
-  if "downgrade" == params.upgrade_direction:
-    # hive_bin
-    downgrade_version = params.current_version
-    if params.downgrade_from_version:
-      downgrade_version = params.downgrade_from_version
-    hive_execute_path = _get_hive_execute_path(downgrade_version)
+  if params.downgrade_from_version is not None:
+    hive_execute_path = _get_hive_execute_path(params.downgrade_from_version)
 
   command = format('hive --config {hive_server_conf_dir} --service hiveserver2 
--deregister ' + current_hiveserver_version)
   Execute(command, user=params.hive_user, path=hive_execute_path, tries=1 )
@@ -114,12 +111,10 @@ def _get_current_hiveserver_version():
 
   try:
     # When downgrading the source version should be the version we are 
downgrading from
-    if "downgrade" == params.upgrade_direction:
-      if not params.downgrade_from_version:
-        raise Fail('The version from which we are downgrading from should be 
provided in \'downgrade_from_version\'')
+    source_version = params.version_for_stack_feature_checks
+    if params.downgrade_from_version is not None:
       source_version = params.downgrade_from_version
-    else:
-      source_version = params.current_version
+
     hive_execute_path = _get_hive_execute_path(source_version)
     version_hive_bin = params.hive_bin
     formatted_source_version = format_stack_version(source_version)

http://git-wip-us.apache.org/repos/asf/ambari/blob/330a61cd/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/hive_service.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/hive_service.py
 
b/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/hive_service.py
index a7d3915..1f2b644 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/hive_service.py
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/hive_service.py
@@ -64,7 +64,7 @@ def hive_service(name, action='start', upgrade_type=None):
     cmd = format("{start_hiveserver2_path} {hive_log_dir}/hive-server2.out 
{hive_log_dir}/hive-server2.err {pid_file} {hive_server_conf_dir} 
{hive_log_dir}")
 
 
-    if params.security_enabled and params.current_version and 
check_stack_feature(StackFeature.HIVE_SERVER2_KERBERIZED_ENV, 
params.current_version):
+    if params.security_enabled and 
check_stack_feature(StackFeature.HIVE_SERVER2_KERBERIZED_ENV, 
params.version_for_stack_feature_checks):
       hive_kinit_cmd = format("{kinit_path_local} -kt {hive_server2_keytab} 
{hive_principal}; ")
       Execute(hive_kinit_cmd, user=params.hive_user)
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/330a61cd/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/params_linux.py
 
b/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/params_linux.py
index 77206e7..90d9067 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/params_linux.py
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/params_linux.py
@@ -38,6 +38,7 @@ from resource_management.libraries.script.script import Script
 from resource_management.libraries.functions import StackFeature
 from resource_management.libraries.functions.stack_features import 
check_stack_feature
 from resource_management.libraries.functions.stack_features import 
get_stack_feature_version
+from resource_management.libraries.functions import upgrade_summary
 from resource_management.libraries.functions.get_port_from_url import 
get_port_from_url
 from resource_management.libraries.functions.expect import expect
 from resource_management.libraries import functions
@@ -86,12 +87,9 @@ stack_version_formatted = 
functions.get_stack_version('hive-server2')
 # It cannot be used during the initial Cluser Install because the version is 
not yet known.
 version = default("/commandParams/version", None)
 
-# current host stack version
-current_version = default("/hostLevelParams/current_version", None)
-
-# When downgrading the 'version' and 'current_version' are both pointing to 
the downgrade-target version
+# When downgrading the 'version' is pointing to the downgrade-target version
 # downgrade_from_version provides the source-version the downgrade is 
happening from
-downgrade_from_version = default("/commandParams/downgrade_from_version", None)
+downgrade_from_version = upgrade_summary.get_downgrade_from_version("HIVE")
 
 # get the correct version to use for checking stack features
 version_for_stack_feature_checks = get_stack_feature_version(config)
@@ -315,7 +313,8 @@ driver_curl_source = 
format("{jdk_location}/{jdbc_jar_name}")
 # normally, the JDBC driver would be referenced by 
<stack-root>/current/.../foo.jar
 # but in RU if <stack-selector-tool> is called and the restart fails, then 
this means that current pointer
 # is now pointing to the upgraded version location; that's bad for the cp 
command
-source_jdbc_file = 
format("{stack_root}/{current_version}/hive/lib/{jdbc_jar_name}")
+version_for_source_jdbc_file = 
upgrade_summary.get_source_version(default_version = 
version_for_stack_feature_checks)
+source_jdbc_file = 
format("{stack_root}/{version_for_source_jdbc_file}/hive/lib/{jdbc_jar_name}")
 
 check_db_connection_jar_name = "DBConnectionVerification.jar"
 check_db_connection_jar = 
format("/usr/lib/ambari-agent/{check_db_connection_jar_name}")

http://git-wip-us.apache.org/repos/asf/ambari/blob/330a61cd/ambari-server/src/main/resources/common-services/KAFKA/0.10.0.3.0/package/scripts/kafka.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/KAFKA/0.10.0.3.0/package/scripts/kafka.py
 
b/ambari-server/src/main/resources/common-services/KAFKA/0.10.0.3.0/package/scripts/kafka.py
index 62a9003..3f9d109 100644
--- 
a/ambari-server/src/main/resources/common-services/KAFKA/0.10.0.3.0/package/scripts/kafka.py
+++ 
b/ambari-server/src/main/resources/common-services/KAFKA/0.10.0.3.0/package/scripts/kafka.py
@@ -45,35 +45,6 @@ def kafka(upgrade_type=None):
     effective_version = params.stack_version_formatted if upgrade_type is None 
else format_stack_version(params.version)
     Logger.info(format("Effective stack version: {effective_version}"))
 
-    # In HDP-2.2 (Apache Kafka 0.8.1.1) we used to generate broker.ids based 
on hosts and add them to
-    # kafka's server.properties. In future version brokers can generate their 
own ids based on zookeeper seq
-    # We need to preserve the broker.id when user is upgrading from HDP-2.2 to 
any higher version.
-    # Once its preserved it will be written to kafka.log.dirs/meta.properties 
and it will be used from there on
-    # similarly we need preserve port as well during the upgrade
-
-    if upgrade_type is not None and params.upgrade_direction == 
Direction.UPGRADE and \
-      check_stack_feature(StackFeature.CREATE_KAFKA_BROKER_ID, 
params.current_version) and \
-      check_stack_feature(StackFeature.KAFKA_LISTENERS, params.version):
-      if len(params.kafka_hosts) > 0 and params.hostname in params.kafka_hosts:
-        brokerid = str(sorted(params.kafka_hosts).index(params.hostname))
-        kafka_server_config['broker.id'] = brokerid
-        Logger.info(format("Calculating broker.id as {brokerid}"))
-      if 'port' in kafka_server_config:
-        port = kafka_server_config['port']
-        Logger.info(format("Port config from previous verson: {port}"))
-        listeners = kafka_server_config['listeners']
-        kafka_server_config['listeners'] = listeners.replace("6667", port)
-        Logger.info(format("Kafka listeners after the port update: 
{listeners}"))
-        del kafka_server_config['port']
-
-
-    if effective_version is not None and effective_version != "" and \
-      check_stack_feature(StackFeature.CREATE_KAFKA_BROKER_ID, 
effective_version):
-      if len(params.kafka_hosts) > 0 and params.hostname in params.kafka_hosts:
-        brokerid = str(sorted(params.kafka_hosts).index(params.hostname))
-        kafka_server_config['broker.id'] = brokerid
-        Logger.info(format("Calculating broker.id as {brokerid}"))
-
     # listeners and advertised.listeners are only added in 2.3.0.0 onwards.
     if effective_version is not None and effective_version != "" and \
        check_stack_feature(StackFeature.KAFKA_LISTENERS, effective_version):

http://git-wip-us.apache.org/repos/asf/ambari/blob/330a61cd/ambari-server/src/main/resources/common-services/KAFKA/0.10.0.3.0/package/scripts/kafka_broker.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/KAFKA/0.10.0.3.0/package/scripts/kafka_broker.py
 
b/ambari-server/src/main/resources/common-services/KAFKA/0.10.0.3.0/package/scripts/kafka_broker.py
index 7ddd1ab..468768f 100644
--- 
a/ambari-server/src/main/resources/common-services/KAFKA/0.10.0.3.0/package/scripts/kafka_broker.py
+++ 
b/ambari-server/src/main/resources/common-services/KAFKA/0.10.0.3.0/package/scripts/kafka_broker.py
@@ -25,6 +25,7 @@ from resource_management.libraries.functions.version import 
format_stack_version
 from resource_management.libraries.functions.format import format
 from resource_management.libraries.functions.check_process_status import 
check_process_status
 from resource_management.libraries.functions import StackFeature
+from resource_management.libraries.functions import upgrade_summary
 from resource_management.libraries.functions.stack_features import 
check_stack_feature
 from resource_management.libraries.functions.show_logs import show_logs
 from kafka import ensure_base_directories
@@ -50,16 +51,16 @@ class KafkaBroker(Script):
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, 
params.version):
       stack_select.select_packages(params.version)
 
-    # This is extremely important since it should only be called if crossing 
the HDP 2.3.4.0 boundary. 
-    if params.current_version and params.version and params.upgrade_direction:
+    # This is extremely important since it should only be called if crossing 
the HDP 2.3.4.0 boundary.
+    if params.version and params.upgrade_direction:
       src_version = dst_version = None
       if params.upgrade_direction == Direction.UPGRADE:
-        src_version = format_stack_version(params.current_version)
-        dst_version = format_stack_version(params.version)
+        src_version = upgrade_summary.get_source_version("KAFKA", 
default_version =  params.version)
+        dst_version = upgrade_summary.get_target_version("KAFKA", 
default_version =  params.version)
       else:
         # These represent the original values during the UPGRADE direction
-        src_version = format_stack_version(params.version)
-        dst_version = format_stack_version(params.downgrade_from_version)
+        src_version = upgrade_summary.get_target_version("KAFKA", 
default_version =  params.version)
+        dst_version = upgrade_summary.get_source_version("KAFKA", 
default_version =  params.version)
 
       if not check_stack_feature(StackFeature.KAFKA_ACL_MIGRATION_SUPPORT, 
src_version) and check_stack_feature(StackFeature.KAFKA_ACL_MIGRATION_SUPPORT, 
dst_version):
         # Calling the acl migration script requires the configs to be present.

http://git-wip-us.apache.org/repos/asf/ambari/blob/330a61cd/ambari-server/src/main/resources/common-services/KAFKA/0.10.0.3.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/KAFKA/0.10.0.3.0/package/scripts/params.py
 
b/ambari-server/src/main/resources/common-services/KAFKA/0.10.0.3.0/package/scripts/params.py
index 4d0448f..869c329 100644
--- 
a/ambari-server/src/main/resources/common-services/KAFKA/0.10.0.3.0/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/common-services/KAFKA/0.10.0.3.0/package/scripts/params.py
@@ -32,6 +32,7 @@ import status_params
 from resource_management.libraries.resources.hdfs_resource import HdfsResource
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions import conf_select
+from resource_management.libraries.functions import upgrade_summary
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.functions.get_not_managed_resources import 
get_not_managed_resources
 from resource_management.libraries.functions.setup_ranger_plugin_xml import 
get_audit_configs, generate_ranger_service_config
@@ -46,10 +47,6 @@ retryAble = default("/commandParams/command_retry_enabled", 
False)
 # Version being upgraded/downgraded to
 version = default("/commandParams/version", None)
 
-# Version that is CURRENT.
-current_version = default("/hostLevelParams/current_version", None)
-
-
 stack_version_unformatted = config['hostLevelParams']['stack_version']
 stack_version_formatted = format_stack_version(stack_version_unformatted)
 upgrade_direction = default("/commandParams/upgrade_direction", None)
@@ -61,9 +58,9 @@ stack_supports_ranger_kerberos = 
check_stack_feature(StackFeature.RANGER_KERBERO
 stack_supports_ranger_audit_db = 
check_stack_feature(StackFeature.RANGER_AUDIT_DB_SUPPORT, 
version_for_stack_feature_checks)
 stack_supports_core_site_for_ranger_plugin = 
check_stack_feature(StackFeature.CORE_SITE_FOR_RANGER_PLUGINS_SUPPORT, 
version_for_stack_feature_checks)
 
-# When downgrading the 'version' and 'current_version' are both pointing to 
the downgrade-target version
+# When downgrading the 'version' is pointing to the downgrade-target version
 # downgrade_from_version provides the source-version the downgrade is 
happening from
-downgrade_from_version = default("/commandParams/downgrade_from_version", None)
+downgrade_from_version = upgrade_summary.get_downgrade_from_version("KAFKA")
 
 hostname = config['hostname']
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/330a61cd/ambari-server/src/main/resources/common-services/KAFKA/0.10.0.3.0/package/scripts/upgrade.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/KAFKA/0.10.0.3.0/package/scripts/upgrade.py
 
b/ambari-server/src/main/resources/common-services/KAFKA/0.10.0.3.0/package/scripts/upgrade.py
index b6e4046..fe8389d 100644
--- 
a/ambari-server/src/main/resources/common-services/KAFKA/0.10.0.3.0/package/scripts/upgrade.py
+++ 
b/ambari-server/src/main/resources/common-services/KAFKA/0.10.0.3.0/package/scripts/upgrade.py
@@ -43,9 +43,6 @@ def run_migration(env, upgrade_type):
   if params.upgrade_direction is None:
     raise Fail('Parameter "upgrade_direction" is missing.')
 
-  if params.upgrade_direction == Direction.DOWNGRADE and 
params.downgrade_from_version is None:
-    raise Fail('Parameter "downgrade_from_version" is missing.')
-
   if not params.security_enabled:
     Logger.info("Skip running the Kafka ACL migration script since cluster 
security is not enabled.")
     return

http://git-wip-us.apache.org/repos/asf/ambari/blob/330a61cd/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/kafka.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/kafka.py
 
b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/kafka.py
index 3fe1e2d..3aa3473 100644
--- 
a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/kafka.py
+++ 
b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/kafka.py
@@ -45,35 +45,6 @@ def kafka(upgrade_type=None):
     effective_version = params.stack_version_formatted if upgrade_type is None 
else format_stack_version(params.version)
     Logger.info(format("Effective stack version: {effective_version}"))
 
-    # In HDP-2.2 (Apache Kafka 0.8.1.1) we used to generate broker.ids based 
on hosts and add them to
-    # kafka's server.properties. In future version brokers can generate their 
own ids based on zookeeper seq
-    # We need to preserve the broker.id when user is upgrading from HDP-2.2 to 
any higher version.
-    # Once its preserved it will be written to kafka.log.dirs/meta.properties 
and it will be used from there on
-    # similarly we need preserve port as well during the upgrade
-
-    if upgrade_type is not None and params.upgrade_direction == 
Direction.UPGRADE and \
-      check_stack_feature(StackFeature.CREATE_KAFKA_BROKER_ID, 
params.current_version) and \
-      check_stack_feature(StackFeature.KAFKA_LISTENERS, params.version):
-      if len(params.kafka_hosts) > 0 and params.hostname in params.kafka_hosts:
-        brokerid = str(sorted(params.kafka_hosts).index(params.hostname))
-        kafka_server_config['broker.id'] = brokerid
-        Logger.info(format("Calculating broker.id as {brokerid}"))
-      if 'port' in kafka_server_config:
-        port = kafka_server_config['port']
-        Logger.info(format("Port config from previous verson: {port}"))
-        listeners = kafka_server_config['listeners']
-        kafka_server_config['listeners'] = listeners.replace("6667", port)
-        Logger.info(format("Kafka listeners after the port update: 
{listeners}"))
-        del kafka_server_config['port']
-
-
-    if effective_version is not None and effective_version != "" and \
-      check_stack_feature(StackFeature.CREATE_KAFKA_BROKER_ID, 
effective_version):
-      if len(params.kafka_hosts) > 0 and params.hostname in params.kafka_hosts:
-        brokerid = str(sorted(params.kafka_hosts).index(params.hostname))
-        kafka_server_config['broker.id'] = brokerid
-        Logger.info(format("Calculating broker.id as {brokerid}"))
-
     # listeners and advertised.listeners are only added in 2.3.0.0 onwards.
     if effective_version is not None and effective_version != "" and \
        check_stack_feature(StackFeature.KAFKA_LISTENERS, effective_version):

http://git-wip-us.apache.org/repos/asf/ambari/blob/330a61cd/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/kafka_broker.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/kafka_broker.py
 
b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/kafka_broker.py
index 0910156..2094a6f 100644
--- 
a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/kafka_broker.py
+++ 
b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/kafka_broker.py
@@ -25,6 +25,7 @@ from resource_management.libraries.functions.version import 
format_stack_version
 from resource_management.libraries.functions.format import format
 from resource_management.libraries.functions.check_process_status import 
check_process_status
 from resource_management.libraries.functions import StackFeature
+from resource_management.libraries.functions import upgrade_summary
 from resource_management.libraries.functions.stack_features import 
check_stack_feature
 from resource_management.libraries.functions.show_logs import show_logs
 from kafka import ensure_base_directories
@@ -47,19 +48,22 @@ class KafkaBroker(Script):
     import params
     env.set_params(params)
 
+    # grab the current version of the component
+    pre_upgrade_version = 
stack_select.get_role_component_current_stack_version()
+
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, 
params.version):
       stack_select.select_packages(params.version)
 
     # This is extremely important since it should only be called if crossing 
the HDP 2.3.4.0 boundary.
-    if params.current_version and params.version and params.upgrade_direction:
+    if params.version and params.upgrade_direction:
       src_version = dst_version = None
       if params.upgrade_direction == Direction.UPGRADE:
-        src_version = format_stack_version(params.current_version)
-        dst_version = format_stack_version(params.version)
+        src_version = upgrade_summary.get_source_version("KAFKA", 
default_version = params.version)
+        dst_version = upgrade_summary.get_target_version("KAFKA", 
default_version = params.version)
       else:
         # These represent the original values during the UPGRADE direction
-        src_version = format_stack_version(params.version)
-        dst_version = format_stack_version(params.downgrade_from_version)
+        src_version = upgrade_summary.get_target_version("KAFKA", 
default_version = params.version)
+        dst_version = upgrade_summary.get_source_version("KAFKA", 
default_version = params.version)
 
       if not check_stack_feature(StackFeature.KAFKA_ACL_MIGRATION_SUPPORT, 
src_version) and check_stack_feature(StackFeature.KAFKA_ACL_MIGRATION_SUPPORT, 
dst_version):
         # Calling the acl migration script requires the configs to be present.

http://git-wip-us.apache.org/repos/asf/ambari/blob/330a61cd/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/params.py
 
b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/params.py
index c7e84fc..a62265b 100644
--- 
a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/params.py
@@ -46,10 +46,6 @@ retryAble = default("/commandParams/command_retry_enabled", 
False)
 # Version being upgraded/downgraded to
 version = default("/commandParams/version", None)
 
-# Version that is CURRENT.
-current_version = default("/hostLevelParams/current_version", None)
-
-
 stack_version_unformatted = config['hostLevelParams']['stack_version']
 stack_version_formatted = format_stack_version(stack_version_unformatted)
 upgrade_direction = default("/commandParams/upgrade_direction", None)
@@ -61,10 +57,6 @@ stack_supports_ranger_kerberos = 
check_stack_feature(StackFeature.RANGER_KERBERO
 stack_supports_ranger_audit_db = 
check_stack_feature(StackFeature.RANGER_AUDIT_DB_SUPPORT, 
version_for_stack_feature_checks)
 stack_supports_core_site_for_ranger_plugin = 
check_stack_feature(StackFeature.CORE_SITE_FOR_RANGER_PLUGINS_SUPPORT, 
version_for_stack_feature_checks)
 
-# When downgrading the 'version' and 'current_version' are both pointing to 
the downgrade-target version
-# downgrade_from_version provides the source-version the downgrade is 
happening from
-downgrade_from_version = default("/commandParams/downgrade_from_version", None)
-
 hostname = config['hostname']
 
 # default kafka parameters

http://git-wip-us.apache.org/repos/asf/ambari/blob/330a61cd/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/upgrade.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/upgrade.py
 
b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/upgrade.py
index b6e4046..e79a8ad 100644
--- 
a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/upgrade.py
+++ 
b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/upgrade.py
@@ -43,9 +43,6 @@ def run_migration(env, upgrade_type):
   if params.upgrade_direction is None:
     raise Fail('Parameter "upgrade_direction" is missing.')
 
-  if params.upgrade_direction == Direction.DOWNGRADE and 
params.downgrade_from_version is None:
-    raise Fail('Parameter "downgrade_from_version" is missing.')
-
   if not params.security_enabled:
     Logger.info("Skip running the Kafka ACL migration script since cluster 
security is not enabled.")
     return
@@ -53,13 +50,11 @@ def run_migration(env, upgrade_type):
   Logger.info("Upgrade type: {0}, direction: {1}".format(str(upgrade_type), 
params.upgrade_direction))
 
   # If the schema upgrade script exists in the version upgrading to, then 
attempt to upgrade/downgrade it while still using the present bits.
-  kafka_acls_script = None
+  kafka_acls_script = 
format("{stack_root}/{version_for_stack_feature_checks}/kafka/bin/kafka-acls.sh")
   command_suffix = ""
   if params.upgrade_direction == Direction.UPGRADE:
-    kafka_acls_script = 
format("{stack_root}/{version}/kafka/bin/kafka-acls.sh")
     command_suffix = "--upgradeAcls"
   elif params.upgrade_direction == Direction.DOWNGRADE:
-    kafka_acls_script = 
format("{stack_root}/{downgrade_from_version}/kafka/bin/kafka-acls.sh")
     command_suffix = "--downgradeAcls"
 
   if kafka_acls_script is not None:

http://git-wip-us.apache.org/repos/asf/ambari/blob/330a61cd/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params_linux.py
 
b/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params_linux.py
index 4558069..7ddf0c8 100644
--- 
a/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params_linux.py
+++ 
b/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params_linux.py
@@ -32,7 +32,9 @@ from resource_management.libraries.functions import 
get_kinit_path
 from resource_management.libraries.script.script import Script
 from status_params import *
 from resource_management.libraries.resources.hdfs_resource import HdfsResource
-from resource_management.libraries.functions import stack_select, conf_select
+from resource_management.libraries.functions import stack_select
+from resource_management.libraries.functions import conf_select
+from resource_management.libraries.functions import upgrade_summary
 from resource_management.libraries.functions.get_not_managed_resources import 
get_not_managed_resources
 from resource_management.libraries.functions.stack_features import 
check_stack_feature
 from resource_management.libraries.functions.stack_features import 
get_stack_feature_version
@@ -64,7 +66,7 @@ stack_supports_core_site_for_ranger_plugin = 
check_stack_feature(StackFeature.CO
 
 # This is the version whose state is CURRENT. During an RU, this is the source 
version.
 # DO NOT format it since we need the build number too.
-upgrade_from_version = default("/hostLevelParams/current_version", None)
+upgrade_from_version = upgrade_summary.get_source_version()
 
 # server configurations
 # Default value used in HDP 2.3.0.0 and earlier.

http://git-wip-us.apache.org/repos/asf/ambari/blob/330a61cd/ambari-server/src/main/resources/common-services/KNOX/0.5.0.3.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/KNOX/0.5.0.3.0/package/scripts/params_linux.py
 
b/ambari-server/src/main/resources/common-services/KNOX/0.5.0.3.0/package/scripts/params_linux.py
index 4558069..b39123d 100644
--- 
a/ambari-server/src/main/resources/common-services/KNOX/0.5.0.3.0/package/scripts/params_linux.py
+++ 
b/ambari-server/src/main/resources/common-services/KNOX/0.5.0.3.0/package/scripts/params_linux.py
@@ -36,6 +36,7 @@ from resource_management.libraries.functions import 
stack_select, conf_select
 from resource_management.libraries.functions.get_not_managed_resources import 
get_not_managed_resources
 from resource_management.libraries.functions.stack_features import 
check_stack_feature
 from resource_management.libraries.functions.stack_features import 
get_stack_feature_version
+from resource_management.libraries.functions import upgrade_summary
 from resource_management.libraries.functions.constants import StackFeature
 from resource_management.libraries.functions import is_empty
 from resource_management.libraries.functions.setup_ranger_plugin_xml import 
get_audit_configs, generate_ranger_service_config
@@ -64,7 +65,7 @@ stack_supports_core_site_for_ranger_plugin = 
check_stack_feature(StackFeature.CO
 
 # This is the version whose state is CURRENT. During an RU, this is the source 
version.
 # DO NOT format it since we need the build number too.
-upgrade_from_version = default("/hostLevelParams/current_version", None)
+upgrade_from_version = upgrade_summary.get_source_version()
 
 # server configurations
 # Default value used in HDP 2.3.0.0 and earlier.

http://git-wip-us.apache.org/repos/asf/ambari/blob/330a61cd/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py
 
b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py
index 695395a..0796ad1 100644
--- 
a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py
+++ 
b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py
@@ -424,8 +424,8 @@ def 
copy_atlas_hive_hook_to_dfs_share_lib(upgrade_type=None, upgrade_direction=N
                  "and performing a Downgrade.")
     return
 
-  current_version = get_current_version()
-  atlas_hive_hook_dir = 
format("{stack_root}/{current_version}/atlas/hook/hive/")
+  effective_version = get_current_version()
+  atlas_hive_hook_dir = 
format("{stack_root}/{effective_version}/atlas/hook/hive/")
   if not os.path.exists(atlas_hive_hook_dir):
     Logger.error(format("ERROR. Atlas is installed in cluster but this Oozie 
server doesn't "
                         "contain directory {atlas_hive_hook_dir}"))

http://git-wip-us.apache.org/repos/asf/ambari/blob/330a61cd/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/params.py
 
b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/params.py
index 21bfd0f..c31181d 100644
--- 
a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/params.py
@@ -76,7 +76,6 @@ stack_supports_ranger_tagsync_ssl_xml_support = 
check_stack_feature(StackFeature
 stack_supports_ranger_solr_configs = 
check_stack_feature(StackFeature.RANGER_SOLR_CONFIG_SUPPORT, 
version_for_stack_feature_checks)
 stack_supports_secure_ssl_password = 
check_stack_feature(StackFeature.SECURE_RANGER_SSL_PASSWORD, 
version_for_stack_feature_checks)
 
-downgrade_from_version = default("/commandParams/downgrade_from_version", None)
 upgrade_direction = default("/commandParams/upgrade_direction", None)
 
 ranger_conf    = '/etc/ranger/admin/conf'

http://git-wip-us.apache.org/repos/asf/ambari/blob/330a61cd/ambari-server/src/main/resources/common-services/RANGER/1.0.0.3.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/RANGER/1.0.0.3.0/package/scripts/params.py
 
b/ambari-server/src/main/resources/common-services/RANGER/1.0.0.3.0/package/scripts/params.py
index e121ccb..24f459c 100644
--- 
a/ambari-server/src/main/resources/common-services/RANGER/1.0.0.3.0/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/common-services/RANGER/1.0.0.3.0/package/scripts/params.py
@@ -75,7 +75,6 @@ stack_supports_ranger_tagsync_ssl_xml_support = 
check_stack_feature(StackFeature
 stack_supports_ranger_solr_configs = 
check_stack_feature(StackFeature.RANGER_SOLR_CONFIG_SUPPORT, 
version_for_stack_feature_checks)
 stack_supports_secure_ssl_password = 
check_stack_feature(StackFeature.SECURE_RANGER_SSL_PASSWORD, 
version_for_stack_feature_checks)
 
-downgrade_from_version = default("/commandParams/downgrade_from_version", None)
 upgrade_direction = default("/commandParams/upgrade_direction", None)
 
 ranger_conf    = '/etc/ranger/admin/conf'

Reply via email to