http://git-wip-us.apache.org/repos/asf/ambari/blob/e7f413d3/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HIVE/package/scripts/hive_server_upgrade.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HIVE/package/scripts/hive_server_upgrade.py
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HIVE/package/scripts/hive_server_upgrade.py
index 9c862a5..71263d4 100755
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HIVE/package/scripts/hive_server_upgrade.py
+++ 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HIVE/package/scripts/hive_server_upgrade.py
@@ -64,11 +64,7 @@ def post_upgrade_deregister():
   # If downgrading, the downgrade-source hive binary should be used to call 
the --deregister command.
   # By now hdp-select has been called to set 'current' to target-stack
   if "downgrade" == params.upgrade_direction:
-    # hive_bin
-    downgrade_version = params.current_version
-    if params.downgrade_from_version:
-      downgrade_version = params.downgrade_from_version
-    hive_execute_path = _get_hive_execute_path(downgrade_version)
+    hive_execute_path = 
_get_hive_execute_path(params.version_for_stack_feature_checks)
 
   command = format('hive --config {hive_server_conf_dir} --service hiveserver2 
--deregister ' + current_hiveserver_version)
   Execute(command, user=params.hive_user, path=hive_execute_path, tries=1 )
@@ -107,15 +103,9 @@ def _get_current_hiveserver_version():
 
   try:
     # When downgrading the source version should be the version we are 
downgrading from
-    if "downgrade" == params.upgrade_direction:
-      if not params.downgrade_from_version:
-        raise Fail('The version from which we are downgrading from should be 
provided in \'downgrade_from_version\'')
-      source_version = params.downgrade_from_version
-    else:
-      source_version = params.current_version
-    hive_execute_path = _get_hive_execute_path(source_version)
+    hive_execute_path = 
_get_hive_execute_path(params.version_for_stack_feature_checks)
     version_hive_bin = params.hive_bin
-    formatted_source_version = format_stack_version(source_version)
+    formatted_source_version = 
format_stack_version(params.version_for_stack_feature_checks)
     if formatted_source_version and compare_versions(formatted_source_version, 
"4.1") >= 0:
       version_hive_bin = format('/usr/iop/{source_version}/hive/bin')
     command = format('{version_hive_bin}/hive --version')

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7f413d3/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HIVE/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HIVE/package/scripts/params_linux.py
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HIVE/package/scripts/params_linux.py
index caba6ee..770d3c5 100755
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HIVE/package/scripts/params_linux.py
+++ 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HIVE/package/scripts/params_linux.py
@@ -36,6 +36,7 @@ from resource_management.libraries.functions import 
get_kinit_path
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions import StackFeature
+from resource_management.libraries.functions import upgrade_summary
 from resource_management.libraries.functions.stack_features import 
check_stack_feature
 from resource_management.libraries.functions.stack_features import 
get_stack_feature_version
 from resource_management.libraries.functions.get_port_from_url import 
get_port_from_url
@@ -63,16 +64,9 @@ iop_stack_version = 
functions.get_stack_version('hive-server2')
 # It cannot be used during the initial Cluser Install because the version is 
not yet known.
 version = default("/commandParams/version", None)
 
-# current host stack version
-current_version = default("/hostLevelParams/current_version", None)
-
 # get the correct version to use for checking stack features
 version_for_stack_feature_checks = get_stack_feature_version(config)
 
-# When downgrading the 'version' and 'current_version' are both pointing to 
the downgrade-target version
-# downgrade_from_version provides the source-version the downgrade is 
happening from 
-downgrade_from_version = default("/commandParams/downgrade_from_version", None)
-
 # Upgrade direction
 upgrade_direction = default("/commandParams/upgrade_direction", None)
 stack_supports_ranger_audit_db = 
check_stack_feature(StackFeature.RANGER_AUDIT_DB_SUPPORT, 
version_for_stack_feature_checks)
@@ -314,7 +308,8 @@ if upgrade_direction:
 # normally, the JDBC driver would be referenced by /usr/hdp/current/.../foo.jar
 # but in RU if hdp-select is called and the restart fails, then this means 
that current pointer
 # is now pointing to the upgraded version location; that's bad for the cp 
command
-source_jdbc_file = 
format("/usr/iop/{current_version}/hive/lib/{jdbc_jar_name}")
+version_for_source_jdbc_file = 
upgrade_summary.get_source_version(default_version = 
version_for_stack_feature_checks)
+source_jdbc_file = 
format("/usr/iop/{version_for_source_jdbc_file}/hive/lib/{jdbc_jar_name}")
 
 jdk_location = config['hostLevelParams']['jdk_location']
 driver_curl_source = format("{jdk_location}/{jdbc_symlink_name}")

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7f413d3/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KAFKA/package/scripts/kafka_broker.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KAFKA/package/scripts/kafka_broker.py
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KAFKA/package/scripts/kafka_broker.py
index 772ecad..16e7039 100755
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KAFKA/package/scripts/kafka_broker.py
+++ 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KAFKA/package/scripts/kafka_broker.py
@@ -44,19 +44,16 @@ class KafkaBroker(Script):
     import params
     env.set_params(params)
 
+    # grab the current version of the component
+    pre_upgrade_version = 
stack_select.get_role_component_current_stack_version()
+
     if params.version and 
compare_versions(format_stack_version(params.version), '4.1.0.0') >= 0:
       stack_select.select_packages(params.version)
 
     # This is extremely important since it should only be called if crossing 
the IOP 4.2 boundary.
-    if params.current_version and params.version and params.upgrade_direction:
-      src_version = dst_version = None
-      if params.upgrade_direction == Direction.UPGRADE:
-        src_version = format_stack_version(params.current_version)
-        dst_version = format_stack_version(params.version)
-      else:
-        # These represent the original values during the UPGRADE direction
-        src_version = format_stack_version(params.version)
-        dst_version = format_stack_version(params.downgrade_from_version)
+    if pre_upgrade_version and params.version_for_stack_feature_checks and 
params.upgrade_direction:
+      src_version = format_stack_version(pre_upgrade_version)
+      dst_version = 
format_stack_version(params.version_for_stack_feature_checks)
 
       if compare_versions(src_version, '4.2.0.0') < 0 and 
compare_versions(dst_version, '4.2.0.0') >= 0:
         # Upgrade from IOP 4.1 to 4.2, Calling the acl migration script 
requires the configs to be present.

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7f413d3/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KAFKA/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KAFKA/package/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KAFKA/package/scripts/params.py
index 2ae0e42..07e7ad1 100755
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KAFKA/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KAFKA/package/scripts/params.py
@@ -22,7 +22,7 @@ from resource_management.libraries.script.script import Script
 from resource_management.libraries.functions.version import 
format_stack_version, compare_versions
 from resource_management.libraries.functions.default import default
 from utils import get_bare_principal
-from resource_management.libraries.functions.get_stack_version import 
get_stack_version
+from resource_management.libraries.functions.stack_features import 
get_stack_feature_version
 from resource_management.libraries.functions.is_empty import is_empty
 import status_params
 from resource_management.core.logger import Logger
@@ -40,8 +40,6 @@ retryAble = default("/commandParams/command_retry_enabled", 
False)
 
 # Version being upgraded/downgraded to
 version = default("/commandParams/version", None)
-# Version that is CURRENT.
-current_version = default("/hostLevelParams/current_version", None)
 
 host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
 
@@ -49,9 +47,8 @@ stack_version_unformatted = 
str(config['hostLevelParams']['stack_version'])
 iop_stack_version = format_stack_version(stack_version_unformatted)
 upgrade_direction = default("/commandParams/upgrade_direction", None)
 
-# When downgrading the 'version' and 'current_version' are both pointing to 
the downgrade-target version
-# downgrade_from_version provides the source-version the downgrade is 
happening from
-downgrade_from_version = default("/commandParams/downgrade_from_version", None)
+# get the correct version to use for checking stack features
+version_for_stack_feature_checks = get_stack_feature_version(config)
 
 hostname = config['hostname']
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7f413d3/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KAFKA/package/scripts/upgrade.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KAFKA/package/scripts/upgrade.py
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KAFKA/package/scripts/upgrade.py
index f0d278b..14a7f83 100755
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KAFKA/package/scripts/upgrade.py
+++ 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KAFKA/package/scripts/upgrade.py
@@ -42,9 +42,6 @@ def run_migration(env, upgrade_type):
   if params.upgrade_direction is None:
     raise Fail('Parameter "upgrade_direction" is missing.')
 
-  if params.upgrade_direction == Direction.DOWNGRADE and 
params.downgrade_from_version is None:
-    raise Fail('Parameter "downgrade_from_version" is missing.')
-
   if not params.security_enabled:
     Logger.info("Skip running the Kafka ACL migration script since cluster 
security is not enabled.")
     return
@@ -57,9 +54,6 @@ def run_migration(env, upgrade_type):
   if params.upgrade_direction == Direction.UPGRADE:
     kafka_acls_script = format("/usr/iop/{version}/kafka/bin/kafka-acls.sh")
     command_suffix = "--upgradeAcls"
-  # elif params.upgrade_direction == Direction.DOWNGRADE:
-  #   kafka_acls_script = 
format("/usr/iop/{downgrade_from_version}/kafka/bin/kafka-acls.sh")
-  #   command_suffix = "--downgradeAcls"
 
   if kafka_acls_script is not None:
     if os.path.exists(kafka_acls_script):

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7f413d3/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KNOX/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KNOX/package/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KNOX/package/scripts/params.py
index 89f801a..48f33bb 100755
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KNOX/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KNOX/package/scripts/params.py
@@ -23,6 +23,7 @@ from resource_management.libraries.functions.version import 
format_stack_version
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
+from resource_management.libraries.functions import upgrade_summary
 from resource_management.libraries.functions.stack_features import 
check_stack_feature
 from resource_management.libraries.functions.stack_features import 
get_stack_feature_version
 from resource_management.libraries.functions.constants import StackFeature
@@ -46,7 +47,7 @@ version = default("/commandParams/version", None)
 
 # This is the version whose state is CURRENT. During an RU, this is the source 
version.
 # DO NOT format it since we need the build number too.
-upgrade_from_version = default("/hostLevelParams/current_version", None)
+upgrade_from_version = upgrade_summary.get_source_version()
 
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
 stack_version = format_stack_version(stack_version_unformatted)

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7f413d3/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/RANGER/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/RANGER/package/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/RANGER/package/scripts/params.py
index 30935f7..dc7ed3b 100755
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/RANGER/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/RANGER/package/scripts/params.py
@@ -62,7 +62,6 @@ stack_supports_ranger_kerberos = 
check_stack_feature(StackFeature.RANGER_KERBERO
 stack_supports_config_versioning = 
check_stack_feature(StackFeature.CONFIG_VERSIONING, 
version_for_stack_feature_checks)
 
 
-downgrade_from_version = default("/commandParams/downgrade_from_version", None)
 upgrade_direction = default("/commandParams/upgrade_direction", None)
 ranger_home    = '/usr/iop/current/ranger-admin'
 # ranger_conf    = '/etc/ranger/admin/conf'  # commented as we have below 

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7f413d3/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/package/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/package/scripts/params.py
index 8e901ff..a46f06b 100755
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/package/scripts/params.py
@@ -43,7 +43,6 @@ java64_home = config['hostLevelParams']['java_home']
 version = default("/commandParams/version", None)
 
 # current host stack version
-current_version = default("/hostLevelParams/current_version", None)
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
 iop_stack_version = format_stack_version(stack_version_unformatted)
 
@@ -98,7 +97,7 @@ for host in config['clusterHostInfo']['zookeeper_hosts']:
   if index < len(config['clusterHostInfo']['zookeeper_hosts']):
     zookeeper_quorum += ","
 
-if (current_version is not None and 
compare_versions(format_stack_version(current_version), '4.2.0.0') >=0 ) or  
compare_versions(iop_stack_version, '4.2.0.0')>= 0:
+if (version is not None and compare_versions(format_stack_version(version), 
'4.2.0.0') >=0 ) or  compare_versions(iop_stack_version, '4.2.0.0')>= 0:
   if upgrade_direction is not None and upgrade_direction == 
Direction.DOWNGRADE and version is not None and 
compare_versions(format_stack_version(version), '4.2.0.0') < 0:
     solr_data_dir=default("/configurations/solr-env/solr_lib_dir", None)
   else:

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7f413d3/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/package/scripts/solr_upgrade.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/package/scripts/solr_upgrade.py
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/package/scripts/solr_upgrade.py
index 55a61b6..395c0e8 100755
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/package/scripts/solr_upgrade.py
+++ 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/package/scripts/solr_upgrade.py
@@ -70,13 +70,13 @@ class SolrServerUpgrade(Script):
     import params
     env.set_params(params)
 
-    if compare_versions(format_stack_version(params.current_version), 
'4.2.0.0') >= 0:
+    if compare_versions(format_stack_version(params.version), '4.2.0.0') >= 0:
       solr_home_dir=params.solr_data_dir
     else: #4.1.0.0
       solr_home_dir=params.old_lib_dir + "/data"
 
     unique = get_unique_id_and_date()
-    
backup_solr_dir="/tmp/upgrades/{0}/solr_{1}".format(params.current_version, 
unique)
+    backup_solr_dir="/tmp/upgrades/{0}/solr_{1}".format(params.version, unique)
     backup_solr_cores="/tmp/solr/cores"
 
     if os.path.isdir(solr_home_dir) and not os.path.isdir(backup_solr_dir):

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7f413d3/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/package/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/package/scripts/params.py
index 8ec5af8..5c39914 100755
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/package/scripts/params.py
@@ -41,12 +41,10 @@ component_directory = 
Script.get_component_from_role(SERVER_ROLE_DIRECTORY_MAP,
 config = Script.get_config()
 tmp_dir = Script.get_tmp_dir()
 
-current_version = default("/hostLevelParams/current_version", None)
-iop_full_version = format_stack_version(current_version)
-
 # New Cluster Stack Version that is defined during the RESTART of a Rolling 
Upgrade
 version = default("/commandParams/version", None)
 stack_name = default("/hostLevelParams/stack_name", None)
+iop_full_version = format_stack_version(version)
 
 hadoop_home = "/usr/iop/current/hadoop-client"
 spark_conf = format("/usr/iop/current/{component_directory}/conf")

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7f413d3/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/ZOOKEEPER/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/ZOOKEEPER/package/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/ZOOKEEPER/package/scripts/params.py
index e9b8144..6af29dd 100755
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/ZOOKEEPER/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/ZOOKEEPER/package/scripts/params.py
@@ -38,7 +38,6 @@ stack_name = default("/hostLevelParams/stack_name", None)
 
 # New Cluster Stack Version that is defined during the RESTART of a Rolling 
Upgrade
 version = default("/commandParams/version", None)
-current_version = default("/hostLevelParams/current_version", None)
 
 #hadoop params
 role_root = "zookeeper-client"

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7f413d3/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/ZOOKEEPER/package/scripts/zookeeper.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/ZOOKEEPER/package/scripts/zookeeper.py
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/ZOOKEEPER/package/scripts/zookeeper.py
index 9253f43..bc311c7 100755
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/ZOOKEEPER/package/scripts/zookeeper.py
+++ 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/ZOOKEEPER/package/scripts/zookeeper.py
@@ -72,8 +72,8 @@ def zookeeper(type = None, upgrade_type=None):
          content = myid
     )
     # This path may be missing after Ambari upgrade. We need to create it.
-    if (upgrade_type == "rolling") and (not 
os.path.exists("/usr/iop/current/zookeeper-server")) and params.current_version:
-      conf_select(params.stack_name, "zookeeper", params.current_version)
+    if (upgrade_type == "rolling") and (not 
os.path.exists("/usr/iop/current/zookeeper-server")) and params.version:
+      conf_select(params.stack_name, "zookeeper", params.version)
       stack_select.select_packages(params.version)
       #Execute(format("iop-select set zookeeper-server {version}"))
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7f413d3/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/ZOOKEEPER/package/scripts/zookeeper_service.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/ZOOKEEPER/package/scripts/zookeeper_service.py
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/ZOOKEEPER/package/scripts/zookeeper_service.py
index 1dc24cd..0727970 100755
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/ZOOKEEPER/package/scripts/zookeeper_service.py
+++ 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/ZOOKEEPER/package/scripts/zookeeper_service.py
@@ -27,9 +27,9 @@ def zookeeper_service(action='start', upgrade_type=None):
   import params
 
   # This path may be missing after Ambari upgrade. We need to create it.
-  if upgrade_type is None and not 
os.path.exists("/usr/iop/current/zookeeper-server") and params.current_version \
+  if upgrade_type is None and not 
os.path.exists("/usr/iop/current/zookeeper-server") and params.version \
     and compare_versions(format_stack_version(params.version), '4.1.0.0') >= 0:
-    conf_select.select(params.stack_name, "zookeeper", params.current_version)
+    conf_select.select(params.stack_name, "zookeeper", params.version)
     stack_select.select("zookeeper-server", params.version)
 
   cmd = format("env ZOOCFGDIR={config_dir} ZOOCFG=zoo.cfg 
{zk_bin}/zkServer.sh")

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7f413d3/ambari-server/src/test/java/org/apache/ambari/server/StateRecoveryManagerTest.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/java/org/apache/ambari/server/StateRecoveryManagerTest.java
 
b/ambari-server/src/test/java/org/apache/ambari/server/StateRecoveryManagerTest.java
index b048d04..8b94338 100644
--- 
a/ambari-server/src/test/java/org/apache/ambari/server/StateRecoveryManagerTest.java
+++ 
b/ambari-server/src/test/java/org/apache/ambari/server/StateRecoveryManagerTest.java
@@ -89,8 +89,8 @@ public class StateRecoveryManagerTest {
       getHostVersionMock("install_failed_version", 
RepositoryVersionState.INSTALL_FAILED, installFailedHostVersionCapture),
       getHostVersionMock("installing_version", 
RepositoryVersionState.INSTALLING, installingHostVersionCapture),
       getHostVersionMock("installed_version", 
RepositoryVersionState.INSTALLED, installedHostVersionCapture),
-      getHostVersionMock("out_of_sync_version", 
RepositoryVersionState.OUT_OF_SYNC, outOfSyncHostVersionCapture),
-      getHostVersionMock("current_version", RepositoryVersionState.CURRENT, 
currentHostVersionCapture)));
+        getHostVersionMock("out_of_sync_version", 
RepositoryVersionState.OUT_OF_SYNC,
+            outOfSyncHostVersionCapture)));
 
     // Adding all possible cluster version states
 
@@ -101,14 +101,13 @@ public class StateRecoveryManagerTest {
     final Capture<RepositoryVersionState> upgradeFailedClusterVersionCapture = 
EasyMock.newCapture();
     final Capture<RepositoryVersionState> upgradingClusterVersionCapture = 
EasyMock.newCapture();
     final Capture<RepositoryVersionState> upgradedClusterVersionCapture = 
EasyMock.newCapture();
-    final Capture<RepositoryVersionState> currentClusterVersionCapture = 
EasyMock.newCapture();
 
     
expect(serviceComponentDesiredStateDAOMock.findAll()).andReturn(Lists.newArrayList(
       getDesiredStateEntityMock("install_failed_version", 
RepositoryVersionState.INSTALL_FAILED, installFailedClusterVersionCapture),
       getDesiredStateEntityMock("installing_version", 
RepositoryVersionState.INSTALLING, installingClusterVersionCapture),
       getDesiredStateEntityMock("installed_version", 
RepositoryVersionState.INSTALLED, installedClusterVersionCapture),
-      getDesiredStateEntityMock("out_of_sync_version", 
RepositoryVersionState.OUT_OF_SYNC, outOfSyncClusterVersionCapture),
-      getDesiredStateEntityMock("current_version", 
RepositoryVersionState.CURRENT, currentClusterVersionCapture)));
+        getDesiredStateEntityMock("out_of_sync_version", 
RepositoryVersionState.OUT_OF_SYNC,
+            outOfSyncClusterVersionCapture)));
 
     replay(hostVersionDAOMock, serviceComponentDesiredStateDAOMock);
 
@@ -132,7 +131,6 @@ public class StateRecoveryManagerTest {
     assertFalse(upgradeFailedClusterVersionCapture.hasCaptured());
     assertFalse(upgradingClusterVersionCapture.hasCaptured());
     assertFalse(upgradedClusterVersionCapture.hasCaptured());
-    assertFalse(currentClusterVersionCapture.hasCaptured());
   }
 
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7f413d3/ambari-server/src/test/java/org/apache/ambari/server/utils/StageUtilsTest.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/java/org/apache/ambari/server/utils/StageUtilsTest.java
 
b/ambari-server/src/test/java/org/apache/ambari/server/utils/StageUtilsTest.java
index c068c5b..0763ee9 100644
--- 
a/ambari-server/src/test/java/org/apache/ambari/server/utils/StageUtilsTest.java
+++ 
b/ambari-server/src/test/java/org/apache/ambari/server/utils/StageUtilsTest.java
@@ -56,12 +56,14 @@ import 
org.apache.ambari.server.actionmanager.StageFactoryImpl;
 import org.apache.ambari.server.agent.ExecutionCommand;
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.configuration.Configuration;
+import org.apache.ambari.server.controller.AmbariManagementController;
 import org.apache.ambari.server.orm.DBAccessor;
 import org.apache.ambari.server.orm.dao.HostDAO;
 import org.apache.ambari.server.orm.dao.HostRoleCommandDAO;
 import org.apache.ambari.server.security.SecurityHelper;
 import org.apache.ambari.server.security.encryption.CredentialStoreService;
 import org.apache.ambari.server.stack.StackManagerFactory;
+import org.apache.ambari.server.stageplanner.RoleGraphFactory;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.Config;
@@ -73,6 +75,7 @@ import org.apache.ambari.server.state.Service;
 import org.apache.ambari.server.state.ServiceComponent;
 import org.apache.ambari.server.state.ServiceComponentHost;
 import org.apache.ambari.server.state.ServiceComponentHostFactory;
+import org.apache.ambari.server.state.UpgradeContextFactory;
 import org.apache.ambari.server.state.cluster.ClusterFactory;
 import org.apache.ambari.server.state.host.HostFactory;
 import org.apache.ambari.server.state.stack.OsFamily;
@@ -126,10 +129,13 @@ public class StageUtilsTest extends EasyMockSupport {
         bind(HostDAO.class).toInstance(createNiceMock(HostDAO.class));
         
bind(PersistedState.class).toInstance(createNiceMock(PersistedState.class));
         
bind(HostRoleCommandDAO.class).toInstance(createNiceMock(HostRoleCommandDAO.class));
+        
bind(AmbariManagementController.class).toInstance(createNiceMock(AmbariManagementController.class));
 
         install(new 
FactoryModuleBuilder().build(ExecutionCommandWrapperFactory.class));
         install(new FactoryModuleBuilder().implement(Config.class, 
ConfigImpl.class).build(ConfigFactory.class));
         install(new 
FactoryModuleBuilder().build(ConfigureClusterTaskFactory.class));
+        install(new FactoryModuleBuilder().build(UpgradeContextFactory.class));
+        install(new FactoryModuleBuilder().build(RoleGraphFactory.class));
       }
     });
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7f413d3/ambari-server/src/test/python/TestComponentVersionMapping.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/TestComponentVersionMapping.py 
b/ambari-server/src/test/python/TestComponentVersionMapping.py
new file mode 100644
index 0000000..76fd8ed
--- /dev/null
+++ b/ambari-server/src/test/python/TestComponentVersionMapping.py
@@ -0,0 +1,84 @@
+# !/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+
+
+from resource_management.core.logger import Logger
+from resource_management.libraries.functions import component_version
+from resource_management.libraries.script import Script
+from unittest import TestCase
+
+Logger.initialize_logger()
+
+class TestComponentVersionMapping(TestCase):
+
+  def test_get_component_versions(self):
+    """
+    Tests that the component version map can be parsed
+    :return:
+    """
+    command_json = 
TestComponentVersionMapping._get_component_version_mappings()
+    Script.config = command_json
+
+    version = 
component_version.get_component_repository_version(service_name="HDFS",
+      component_name="DATANODE")
+
+    self.assertEqual(version, "2.5.0.0-1234")
+
+    version = component_version.get_component_repository_version(service_name 
= "ZOOKEEPER",
+      component_name = "ZOOKEEPER_SERVER")
+
+    self.assertEqual(version, "2.6.0.0-9999")
+
+
+  def test_get_component_version_by_service_name(self):
+    """
+    Tests that the component version map can be parsed using only the service 
name
+    :return:
+    """
+    command_json = 
TestComponentVersionMapping._get_component_version_mappings()
+    Script.config = command_json
+
+    version = 
component_version.get_component_repository_version(service_name="HDFS")
+    self.assertEqual(version, "2.5.0.0-1234")
+
+    version = component_version.get_component_repository_version(service_name 
= "ZOOKEEPER")
+    self.assertEqual(version, "2.6.0.0-9999")
+
+
+  @staticmethod
+  def _get_component_version_mappings():
+    """
+    A typical component version mapping structure
+    :return:
+    """
+    return {
+      "componentVersionMap": {
+        "HDFS": {
+          "NAMENODE": "2.5.0.0-1234",
+          "SECONDARY_NAMENODE": "2.5.0.0-1234",
+          "DATANODE": "2.5.0.0-1234",
+          "HDFS_CLIENT": "2.5.0.0-1234"
+        },
+        "ZOOKEEPER": {
+          "ZOOKEEPER_SERVER": "2.6.0.0-9999",
+          "ZOOKEEPER_CLIENT": "2.6.0.0-9999"
+        }
+      },
+    }

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7f413d3/ambari-server/src/test/python/TestStackFeature.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/TestStackFeature.py 
b/ambari-server/src/test/python/TestStackFeature.py
index 6e8bcec..ddd3f72 100644
--- a/ambari-server/src/test/python/TestStackFeature.py
+++ b/ambari-server/src/test/python/TestStackFeature.py
@@ -36,24 +36,20 @@ class TestStackFeature(TestCase):
     - STOP
       hostLevelParams/stack_name = HDP
       hostLevelParams/stack_version = 2.5
-      hostLevelParams/current_version = 2.5.0.0-1237
       commandParams/version = 2.5.0.0-1237
     - START
       hostLevelParams/stack_name = HDP
       hostLevelParams/stack_version = 2.6
-      hostLevelParams/current_version = 2.5.0.0-1237
       commandParams/version = 2.6.0.0-334
 
   EU Downgrade (HDP 2.6 to HDP 2.5)
     - STOP
     hostLevelParams/stack_name = HDP
     hostLevelParams/stack_version = 2.6
-    hostLevelParams/current_version = 2.5.0.0-1237
     commandParams/version = 2.6.0.0-334
     - START
     hostLevelParams/stack_name = HDP
     hostLevelParams/stack_version = 2.5
-    hostLevelParams/current_version = 2.5.0.0-1237
     commandParams/version = 2.5.0.0-1237
   """
 
@@ -153,6 +149,7 @@ class TestStackFeature(TestCase):
     :return:
     """
     return {
+      "serviceName":"HDFS",
       "roleCommand": "ACTIONEXECUTE",
       "hostLevelParams": {
         "stack_name": "HDP",
@@ -172,17 +169,33 @@ class TestStackFeature(TestCase):
     :return:
     """
     return {
+      "serviceName":"HDFS",
       "roleCommand":"ACTIONEXECUTE",
       "hostLevelParams": {
         "stack_name": "HDP",
         "stack_version": "2.4",
-        "current_version":  "2.4.0.0-1234"
       },
       "commandParams": {
         "source_stack": "2.4",
         "target_stack": "2.5",
         "upgrade_direction": "upgrade",
         "version": "2.5.9.9-9999"
+      },
+      "upgradeSummary": {
+        "services":{
+          "HDFS":{
+            "sourceRepositoryId":1,
+            "sourceStackId":"HDP-2.4",
+            "sourceVersion":"2.4.0.0-1234",
+            "targetRepositoryId":2,
+            "targetStackId":"HDP-2.5",
+            "targetVersion":"2.5.9.9-9999"
+          }
+        },
+        "direction":"UPGRADE",
+        "type":"rolling_upgrade",
+        "isRevert":False,
+        "orchestration":"STANDARD"
       }
     }
 
@@ -193,18 +206,33 @@ class TestStackFeature(TestCase):
     :return:
     """
     return {
+      "serviceName":"HDFS",
       "roleCommand":"ACTIONEXECUTE",
       "hostLevelParams":{
         "stack_name":"HDP",
-        "stack_version":"2.4",
-        "current_version":"2.4.0.0-1234"
+        "stack_version":"2.4"
       },
       "commandParams":{
         "source_stack":"2.5",
         "target_stack":"2.4",
         "upgrade_direction":"downgrade",
-        "version":"2.4.0.0-1234",
-        "downgrade_from_version": "2.5.9.9-9999"
+        "version":"2.4.0.0-1234"
+      },
+      "upgradeSummary":{
+        "services":{
+          "HDFS":{
+            "sourceRepositoryId":2,
+            "sourceStackId":"HDP-2.5",
+            "sourceVersion":"2.5.9.9-9999",
+            "targetRepositoryId":1,
+            "targetStackId":"HDP-2.4",
+            "targetVersion":"2.4.0.0-1234"
+          }
+        },
+        "direction":"DOWNGRADE",
+        "type":"rolling_upgrade",
+        "isRevert":False,
+        "orchestration":"STANDARD"
       }
     }
 
@@ -216,18 +244,33 @@ class TestStackFeature(TestCase):
     :return:
     """
     return {
+      "serviceName":"HDFS",
       "roleCommand":"STOP",
       "hostLevelParams":{
         "stack_name":"HDP",
         "stack_version":"2.5",
-        "current_version":"2.4.0.0-1234"
       },
       "commandParams":{
         "source_stack":"2.5",
         "target_stack":"2.4",
         "upgrade_direction":"downgrade",
-        "version":"2.5.9.9-9999",
-        "downgrade_from_version":"2.5.9.9-9999"
+        "version":"2.5.9.9-9999"
+      },
+      "upgradeSummary":{
+        "services":{
+          "HDFS":{
+            "sourceRepositoryId":2,
+            "sourceStackId":"HDP-2.5",
+            "sourceVersion":"2.5.9.9-9999",
+            "targetRepositoryId":1,
+            "targetStackId":"HDP-2.4",
+            "targetVersion":"2.4.0.0-1234"
+          }
+        },
+        "direction":"DOWNGRADE",
+        "type":"rolling_upgrade",
+        "isRevert":False,
+        "orchestration":"STANDARD"
       }
     }
 
@@ -238,19 +281,34 @@ class TestStackFeature(TestCase):
     :return:
     """
     return {
+      "serviceName":"HDFS",
       "roleCommand":"CUSTOM_COMMAND",
       "hostLevelParams":{
         "stack_name":"HDP",
         "stack_version":"2.5",
-        "current_version":"2.4.0.0-1234",
         "custom_command":"STOP"
       },
       "commandParams":{
         "source_stack":"2.5",
         "target_stack":"2.4",
         "upgrade_direction":"downgrade",
-        "version":"2.5.9.9-9999",
-        "downgrade_from_version":"2.5.9.9-9999"
+        "version":"2.5.9.9-9999"
+      },
+      "upgradeSummary":{
+        "services":{
+          "HDFS":{
+            "sourceRepositoryId":2,
+            "sourceStackId":"HDP-2.5",
+            "sourceVersion":"2.5.9.9-9999",
+            "targetRepositoryId":1,
+            "targetStackId":"HDP-2.4",
+            "targetVersion":"2.4.0.0-1234"
+          }
+        },
+        "direction":"DOWNGRADE",
+        "type":"rolling_upgrade",
+        "isRevert":False,
+        "orchestration":"STANDARD"
       }
     }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7f413d3/ambari-server/src/test/python/TestUpgradeSummary.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/TestUpgradeSummary.py 
b/ambari-server/src/test/python/TestUpgradeSummary.py
new file mode 100644
index 0000000..614dcd2
--- /dev/null
+++ b/ambari-server/src/test/python/TestUpgradeSummary.py
@@ -0,0 +1,87 @@
+# !/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+
+
+from resource_management.core.logger import Logger
+from resource_management.libraries.functions import upgrade_summary
+from resource_management.libraries.script import Script
+from unittest import TestCase
+
+Logger.initialize_logger()
+
+class TestUpgradeSummary(TestCase):
+
+  def test_get_stack_feature_version_missing_params(self):
+    """
+    Tests that simple upgrade information can be extracted from JSON
+    :return:
+    """
+    command_json = TestUpgradeSummary._get_cluster_simple_upgrade_json()
+    Script.config = command_json
+
+    summary = upgrade_summary.get_upgrade_summary()
+    self.assertEqual(False, summary.is_revert)
+    self.assertEqual("UPGRADE", summary.direction)
+    self.assertEqual("STANDARD", summary.orchestration)
+    self.assertEqual("rolling_upgrade", summary.type)
+
+    services = summary.services
+    self.assertEqual("2.4.0.0-1234", services["HDFS"].source_version)
+    self.assertEqual("2.5.9.9-9999", services["HDFS"].target_version)
+
+    self.assertEqual("2.4.0.0-1234", 
upgrade_summary.get_source_version("HDFS"))
+    self.assertEqual("2.5.9.9-9999", 
upgrade_summary.get_target_version("HDFS"))
+
+
+  @staticmethod
+  def _get_cluster_simple_upgrade_json():
+    """
+    A restart command during an upgrade.
+    :return:
+    """
+    return {
+      "roleCommand":"ACTIONEXECUTE",
+      "hostLevelParams": {
+        "stack_name": "HDP",
+        "stack_version": "2.4",
+      },
+      "commandParams": {
+        "source_stack": "2.4",
+        "target_stack": "2.5",
+        "upgrade_direction": "upgrade",
+        "version": "2.5.9.9-9999"
+      },
+      "upgradeSummary": {
+        "services":{
+          "HDFS":{
+            "sourceRepositoryId":1,
+            "sourceStackId":"HDP-2.4",
+            "sourceVersion":"2.4.0.0-1234",
+            "targetRepositoryId":2,
+            "targetStackId":"HDP-2.5",
+            "targetVersion":"2.5.9.9-9999"
+          }
+        },
+        "direction":"UPGRADE",
+        "type":"rolling_upgrade",
+        "isRevert":False,
+        "orchestration":"STANDARD"
+      }
+    }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7f413d3/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py 
b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
index 6caadd3..30e76ef 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
@@ -255,22 +255,24 @@ class TestHiveServer(RMFTestCase):
   @patch("hive_service.check_fs_root")
   @patch("socket.socket")
   def test_start_secured(self, socket_mock, check_fs_root_mock, 
copy_to_hfds_mock):
+    config_file = 
self.get_src_folder()+"/test/python/stacks/2.0.6/configs/secured.json"
+    with open(config_file, "r") as f:
+      json_content = json.load(f)
+
+    json_content['commandParams']['version'] = '2.3.0.0-1234'
+
     s = socket_mock.return_value
     copy_to_hfds_mock.return_value = None
 
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + 
"/scripts/hive_server.py",
                        classname = "HiveServer",
                        command = "start",
-                       config_file="secured.json",
+                       config_dict = json_content,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES
     )
 
     self.assert_configure_secured()
-    self.assertResourceCalled('Execute',
-                              '/usr/bin/kinit -kt 
/etc/security/keytabs/hive.service.keytab 
hive/[email protected]; ',
-                              user = 'hive',
-                              )
     self.assertResourceCalled('Execute', '/tmp/start_hiveserver2_script 
/var/log/hive/hive-server2.out /var/log/hive/hive-server2.err 
/var/run/hive/hive-server.pid /etc/hive/conf.server /var/log/hive',
         environment = {'HADOOP_HOME': '/usr/hdp/current/hadoop-client',
            'HIVE_BIN': 'hive',
@@ -299,10 +301,6 @@ class TestHiveServer(RMFTestCase):
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES
     )
-    self.assertResourceCalled('Execute',
-                              '/usr/bin/kinit -kt 
/etc/security/keytabs/hive.service.keytab 
hive/[email protected]; ',
-                              user = 'hive',
-                              )
 
     self.assertResourceCalled('Execute', "ambari-sudo.sh kill 123",
         not_if = "! (ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p 
123 >/dev/null 2>&1)",
@@ -850,7 +848,6 @@ From source with checksum 
150f554beae04f76f814f59549dead8b"""
       ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hive-server2', 
'2.2.1.0-2065'),
       sudo=True)
 
-    self.assertNoMoreResources()
 
   @patch("resource_management.libraries.functions.copy_tarball.copy_to_hdfs")
   def test_pre_upgrade_restart(self, copy_to_hdfs_mock):

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7f413d3/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py 
b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py
index 6877519..ded4d45 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py
@@ -34,11 +34,15 @@ class TestServiceCheck(RMFTestCase):
 
 
   def test_service_check_default(self, socket_mock):
+    config_file = "default.json"
+
+    base_path, configs_path = 
self._get_test_paths(RMFTestCase.TARGET_COMMON_SERVICES, self.STACK_VERSION)
+    json_content = self.get_config_file(configs_path, config_file)
 
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + 
"/scripts/service_check.py",
                         classname="HiveServiceCheck",
                         command="service_check",
-                        config_file="default.json",
+                        config_dict = json_content,
                         stack_version = self.STACK_VERSION,
                         target = RMFTestCase.TARGET_COMMON_SERVICES
     )
@@ -146,11 +150,15 @@ class TestServiceCheck(RMFTestCase):
 
 
   def test_service_check_secured(self, socket_mock):
+    config_file = "secured.json"
+    base_path, configs_path = 
self._get_test_paths(RMFTestCase.TARGET_COMMON_SERVICES, self.STACK_VERSION)
+    json_content = self.get_config_file(configs_path, config_file)
+    del json_content["commandParams"]["version"]
 
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + 
"/scripts/service_check.py",
                         classname="HiveServiceCheck",
                         command="service_check",
-                        config_file="secured.json",
+                        config_dict = json_content,
                         stack_version = self.STACK_VERSION,
                         target = RMFTestCase.TARGET_COMMON_SERVICES
     )

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7f413d3/ambari-server/src/test/python/stacks/2.0.6/configs/nn_eu.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/configs/nn_eu.json 
b/ambari-server/src/test/python/stacks/2.0.6/configs/nn_eu.json
index 3aadf2c..3440085 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/configs/nn_eu.json
+++ b/ambari-server/src/test/python/stacks/2.0.6/configs/nn_eu.json
@@ -42,8 +42,7 @@
         "stack_name": "HDP", 
         "group_list": "[\"hadoop\",\"users\"]", 
         "host_sys_prepped": "false", 
-        "ambari_db_rca_username": "mapred", 
-        "current_version": "2.2.7.0-2816", 
+        "ambari_db_rca_username": "mapred",
         "jdk_name": "jdk-7u45-linux-x64.tar.gz", 
         "mysql_jdbc_url": 
"http://10.0.0.28:8080/resources//mysql-connector-java.jar";, 
         "repo_info": 
"[{\"baseUrl\":\"http://repos.ambari.apache.org/hdp/HDP-2.2.7.0-2816\",\"osType\":\"redhat6\",\"repoId\":\"HDP-2.2\",\"repoName\":\"HDP\",\"defaultBaseUrl\":\"http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.2.6.0\",\"latestBaseUrl\":\"http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.2.8.0\",\"baseSaved\":true},{\"baseUrl\":\"http://repos.ambari.apache.org/hdp/HDP-UTILS-1.1.0.20\",\"osType\":\"redhat6\",\"repoId\":\"HDP-UTILS-1.1.0.20\",\"repoName\":\"HDP-UTILS\",\"defaultBaseUrl\":\"http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.20/repos/centos6\",\"latestBaseUrl\":\"http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.20/repos/centos6\",\"baseSaved\":true}]";,
 
@@ -70,7 +69,23 @@
     "role": "NAMENODE", 
     "requestId": 22, 
     "taskId": 147, 
-    "public_hostname": "c6402.ambari.apache.org", 
+    "public_hostname": "c6402.ambari.apache.org",
+    "upgradeSummary": {
+      "services": {
+        "HDFS": {
+          "sourceRepositoryId": 1,
+          "sourceStackId": "HDP-2.2",
+          "sourceVersion": "2.2.7.0-2816",
+          "targetRepositoryId": 2,
+          "targetStackId": "HDP-2.3",
+          "targetVersion": "2.3.2.0-2844"
+        }
+      },
+      "direction": "UPGRADE",
+      "type": "nonrolling_upgrade",
+      "isRevert": false,
+      "orchestration": "STANDARD"
+    },
     "configurations": {
         "hdfs-site": {
             "dfs.namenode.http-address.nn1.nn1": 
"c6401.ambari.apache.org:50070", 

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7f413d3/ambari-server/src/test/python/stacks/2.0.6/configs/nn_eu_standby.json
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/python/stacks/2.0.6/configs/nn_eu_standby.json 
b/ambari-server/src/test/python/stacks/2.0.6/configs/nn_eu_standby.json
index 2d48ff6..90b2493 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/configs/nn_eu_standby.json
+++ b/ambari-server/src/test/python/stacks/2.0.6/configs/nn_eu_standby.json
@@ -42,8 +42,7 @@
         "stack_name": "HDP", 
         "group_list": "[\"hadoop\",\"users\"]", 
         "host_sys_prepped": "false", 
-        "ambari_db_rca_username": "mapred", 
-        "current_version": "2.2.7.0-2816", 
+        "ambari_db_rca_username": "mapred",
         "jdk_name": "jdk-7u45-linux-x64.tar.gz", 
         "mysql_jdbc_url": 
"http://10.0.0.28:8080/resources//mysql-connector-java.jar";, 
         "repo_info": 
"[{\"baseUrl\":\"http://repos.ambari.apache.org/hdp/HDP-2.2.7.0-2816\",\"osType\":\"redhat6\",\"repoId\":\"HDP-2.2\",\"repoName\":\"HDP\",\"defaultBaseUrl\":\"http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.2.6.0\",\"latestBaseUrl\":\"http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.2.8.0\",\"baseSaved\":true},{\"baseUrl\":\"http://repos.ambari.apache.org/hdp/HDP-UTILS-1.1.0.20\",\"osType\":\"redhat6\",\"repoId\":\"HDP-UTILS-1.1.0.20\",\"repoName\":\"HDP-UTILS\",\"defaultBaseUrl\":\"http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.20/repos/centos6\",\"latestBaseUrl\":\"http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.20/repos/centos6\",\"baseSaved\":true}]";,
 
@@ -70,7 +69,23 @@
     "role": "NAMENODE", 
     "requestId": 22, 
     "taskId": 147, 
-    "public_hostname": "c6402.ambari.apache.org", 
+    "public_hostname": "c6402.ambari.apache.org",
+    "upgradeSummary": {
+      "services": {
+        "HDFS": {
+          "sourceRepositoryId": 1,
+          "sourceStackId": "HDP-2.2",
+          "sourceVersion": "2.2.7.0-2816",
+          "targetRepositoryId": 2,
+          "targetStackId": "HDP-2.3",
+          "targetVersion": "2.3.2.0-2844"
+        }
+      },
+      "direction": "UPGRADE",
+      "type": "nonrolling_upgrade",
+      "isRevert": false,
+      "orchestration": "STANDARD"
+    },
     "configurations": {
         "hdfs-site": {
             "dfs.namenode.http-address.nn1.nn1": 
"c6401.ambari.apache.org:50070", 

Reply via email to