AMBARI-21580 - Replace Hard Coded stack-select Structures (jonathanhurley)

Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/2bab2159
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/2bab2159
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/2bab2159

Branch: refs/heads/branch-feature-AMBARI-14714
Commit: 2bab2159ba9f17a0fcdefd20c8ab401c0042fdb9
Parents: 70baac3
Author: Jonathan Hurley <jhur...@hortonworks.com>
Authored: Wed Jul 26 15:01:33 2017 -0400
Committer: Jonathan Hurley <jhur...@hortonworks.com>
Committed: Mon Jul 31 12:12:06 2017 -0400

----------------------------------------------------------------------
 .../libraries/functions/stack_select.py         | 171 +++-
 .../libraries/functions/stack_tools.py          |  15 +-
 .../libraries/script/script.py                  |  23 +-
 .../BlueprintConfigurationProcessor.java        |   3 +-
 .../ambari/server/state/ConfigHelper.java       |   1 +
 .../server/upgrade/FinalUpgradeCatalog.java     |   4 +-
 .../package/scripts/accumulo_client.py          |   5 +-
 .../package/scripts/accumulo_script.py          |  34 +-
 .../0.1.0.2.3/package/scripts/atlas_client.py   |   5 +-
 .../package/scripts/metadata_server.py          |   6 +-
 .../0.7.0.3.0/package/scripts/atlas_client.py   |   5 +-
 .../package/scripts/metadata_server.py          |   5 +-
 .../DRUID/0.9.2/package/scripts/druid_node.py   |   6 +-
 .../DRUID/0.9.2/package/scripts/superset.py     |   5 +-
 .../0.5.0.2.1/package/scripts/falcon_client.py  |   5 +-
 .../0.5.0.2.1/package/scripts/falcon_server.py  |   5 +-
 .../1.4.0.2.0/package/scripts/flume_handler.py  |   5 +-
 .../0.96.0.2.0/package/scripts/hbase_client.py  |   7 +-
 .../0.96.0.2.0/package/scripts/hbase_master.py  |   5 +-
 .../package/scripts/hbase_regionserver.py       |   5 +-
 .../package/scripts/phoenix_queryserver.py      |   6 +-
 .../HBASE/0.96.0.2.0/package/scripts/upgrade.py |   4 +-
 .../2.0.0.3.0/package/scripts/hbase_client.py   |   7 +-
 .../2.0.0.3.0/package/scripts/hbase_master.py   |   5 +-
 .../package/scripts/hbase_regionserver.py       |   5 +-
 .../package/scripts/phoenix_queryserver.py      |   6 +-
 .../HBASE/2.0.0.3.0/package/scripts/upgrade.py  |   4 +-
 .../HDFS/2.1.0.2.0/package/scripts/datanode.py  |   7 +-
 .../2.1.0.2.0/package/scripts/hdfs_client.py    |   5 +-
 .../2.1.0.2.0/package/scripts/journalnode.py    |   5 +-
 .../HDFS/2.1.0.2.0/package/scripts/namenode.py  |   7 +-
 .../2.1.0.2.0/package/scripts/nfsgateway.py     |   6 +-
 .../HDFS/2.1.0.2.0/package/scripts/snamenode.py |   5 +-
 .../2.1.0.2.0/package/scripts/zkfc_slave.py     |   9 +-
 .../HDFS/3.0.0.3.0/package/scripts/datanode.py  |   5 +-
 .../3.0.0.3.0/package/scripts/hdfs_client.py    |   5 +-
 .../3.0.0.3.0/package/scripts/journalnode.py    |   5 +-
 .../HDFS/3.0.0.3.0/package/scripts/namenode.py  |   5 +-
 .../3.0.0.3.0/package/scripts/nfsgateway.py     |   5 +-
 .../HDFS/3.0.0.3.0/package/scripts/snamenode.py |   5 +-
 .../3.0.0.3.0/package/scripts/zkfc_slave.py     |   9 +-
 .../0.12.0.2.0/package/scripts/hcat_client.py   |   8 +-
 .../0.12.0.2.0/package/scripts/hive_client.py   |   5 +-
 .../package/scripts/hive_metastore.py           |   6 +-
 .../0.12.0.2.0/package/scripts/hive_server.py   |   5 +-
 .../package/scripts/hive_server_interactive.py  |   6 +-
 .../package/scripts/webhcat_server.py           |   5 +-
 .../2.1.0.3.0/package/scripts/hcat_client.py    |   9 +-
 .../2.1.0.3.0/package/scripts/hive_client.py    |   5 +-
 .../2.1.0.3.0/package/scripts/hive_metastore.py |   6 +-
 .../2.1.0.3.0/package/scripts/hive_server.py    |   5 +-
 .../package/scripts/hive_server_interactive.py  |   5 +-
 .../2.1.0.3.0/package/scripts/webhcat_server.py |   5 +-
 .../0.10.0.3.0/package/scripts/kafka_broker.py  |   5 +-
 .../KAFKA/0.8.1/package/scripts/kafka_broker.py |   5 +-
 .../0.5.0.2.2/package/scripts/knox_gateway.py   |   5 +-
 .../0.5.0.3.0/package/scripts/knox_gateway.py   |   5 +-
 .../1.0.0.2.3/package/scripts/mahout_client.py  |   7 +-
 .../4.0.0.2.0/package/scripts/oozie_client.py   |   5 +-
 .../4.0.0.2.0/package/scripts/oozie_server.py   |   8 +-
 .../4.0.0.2.0/package/scripts/params_linux.py   |   1 -
 .../4.2.0.3.0/package/scripts/oozie_client.py   |   5 +-
 .../4.2.0.3.0/package/scripts/oozie_server.py   |   7 +-
 .../0.12.0.2.0/package/scripts/pig_client.py    |   5 +-
 .../0.16.1.3.0/package/scripts/pig_client.py    |   5 +-
 .../0.4.0/package/scripts/ranger_admin.py       |   5 +-
 .../0.4.0/package/scripts/ranger_tagsync.py     |   7 +-
 .../0.4.0/package/scripts/ranger_usersync.py    |   3 -
 .../RANGER/0.4.0/package/scripts/upgrade.py     |   2 +-
 .../1.0.0.3.0/package/scripts/ranger_admin.py   |   5 +-
 .../1.0.0.3.0/package/scripts/ranger_tagsync.py |   6 +-
 .../package/scripts/ranger_usersync.py          |   3 -
 .../RANGER/1.0.0.3.0/package/scripts/upgrade.py |   2 +-
 .../0.5.0.2.3/package/scripts/kms_server.py     |   3 -
 .../0.5.0.2.3/package/scripts/upgrade.py        |   2 +-
 .../1.0.0.3.0/package/scripts/kms_server.py     |   3 -
 .../1.0.0.3.0/package/scripts/upgrade.py        |   2 +-
 .../0.60.0.2.2/package/scripts/slider_client.py |   6 +-
 .../0.91.0.3.0/package/scripts/slider_client.py |   6 +-
 .../1.2.1/package/scripts/job_history_server.py |   5 +-
 .../SPARK/1.2.1/package/scripts/livy_server.py  |   5 +-
 .../SPARK/1.2.1/package/scripts/spark_client.py |   5 +-
 .../package/scripts/spark_thrift_server.py      |   5 +-
 .../2.2.0/package/scripts/job_history_server.py |   6 +-
 .../SPARK/2.2.0/package/scripts/livy_server.py  |   6 +-
 .../SPARK/2.2.0/package/scripts/spark_client.py |   6 +-
 .../package/scripts/spark_thrift_server.py      |   6 +-
 .../2.0.0/package/scripts/job_history_server.py |   5 +-
 .../2.0.0/package/scripts/livy2_server.py       |   5 +-
 .../2.0.0/package/scripts/spark_client.py       |   5 +-
 .../package/scripts/spark_thrift_server.py      |   5 +-
 .../1.4.4.2.0/package/scripts/service_check.py  |   3 -
 .../1.4.4.2.0/package/scripts/sqoop_client.py   |   5 +-
 .../1.4.4.3.0/package/scripts/service_check.py  |   3 -
 .../1.4.4.3.0/package/scripts/sqoop_client.py   |   5 +-
 .../STORM/0.9.1/package/scripts/drpc_server.py  |   5 +-
 .../STORM/0.9.1/package/scripts/nimbus.py       |   7 +-
 .../STORM/0.9.1/package/scripts/nimbus_prod.py  |   6 +-
 .../STORM/0.9.1/package/scripts/pacemaker.py    |   5 +-
 .../STORM/0.9.1/package/scripts/rest_api.py     |   3 -
 .../STORM/0.9.1/package/scripts/supervisor.py   |   6 +-
 .../0.9.1/package/scripts/supervisor_prod.py    |   6 +-
 .../STORM/0.9.1/package/scripts/ui_server.py    |   5 +-
 .../1.0.1.3.0/package/scripts/drpc_server.py    |   5 +-
 .../STORM/1.0.1.3.0/package/scripts/nimbus.py   |   6 +-
 .../1.0.1.3.0/package/scripts/nimbus_prod.py    |   6 +-
 .../1.0.1.3.0/package/scripts/pacemaker.py      |   5 +-
 .../STORM/1.0.1.3.0/package/scripts/rest_api.py |   3 -
 .../1.0.1.3.0/package/scripts/supervisor.py     |   6 +-
 .../package/scripts/supervisor_prod.py          |   6 +-
 .../1.0.1.3.0/package/scripts/ui_server.py      |   5 +-
 .../TEZ/0.4.0.2.1/package/scripts/tez_client.py |   5 +-
 .../TEZ/0.9.0.3.0/package/scripts/tez_client.py |   5 +-
 .../scripts/application_timeline_server.py      |   5 +-
 .../2.1.0.2.0/package/scripts/historyserver.py  |   5 +-
 .../package/scripts/mapreduce2_client.py        |   5 +-
 .../2.1.0.2.0/package/scripts/nodemanager.py    |   5 +-
 .../package/scripts/resourcemanager.py          |   5 +-
 .../2.1.0.2.0/package/scripts/yarn_client.py    |   5 +-
 .../scripts/application_timeline_server.py      |   5 +-
 .../3.0.0.3.0/package/scripts/historyserver.py  |   5 +-
 .../package/scripts/mapreduce2_client.py        |   5 +-
 .../3.0.0.3.0/package/scripts/nodemanager.py    |   5 +-
 .../package/scripts/resourcemanager.py          |   5 +-
 .../3.0.0.3.0/package/scripts/yarn_client.py    |   5 +-
 .../0.6.0.2.5/package/scripts/master.py         |   5 +-
 .../0.6.0.3.0/package/scripts/master.py         |   5 +-
 .../3.4.5/package/scripts/zookeeper_client.py   |   4 +-
 .../3.4.5/package/scripts/zookeeper_server.py   |   5 +-
 .../HDP/2.0.6/configuration/cluster-env.xml     |  15 +
 .../2.0.6/hooks/after-INSTALL/scripts/hook.py   |   2 +-
 .../2.0.6/hooks/after-INSTALL/scripts/params.py |   3 -
 .../scripts/shared_initialization.py            |  39 +-
 .../2.0.6/properties/stack_select_packages.json | 952 +++++++++++++++++++
 .../HDP/3.0/configuration/cluster-env.xml       |  15 +
 .../HDP/3.0/hooks/after-INSTALL/scripts/hook.py |   2 +-
 .../scripts/shared_initialization.py            |  38 +-
 .../3.0/properties/stack_select_packages.json   | 848 +++++++++++++++++
 .../python/stacks/2.0.6/FLUME/test_flume.py     |   5 +-
 .../stacks/2.0.6/HBASE/test_hbase_client.py     |   5 +-
 .../stacks/2.0.6/HBASE/test_hbase_master.py     |   4 +
 .../2.0.6/HBASE/test_hbase_regionserver.py      |   4 +
 .../2.0.6/HBASE/test_phoenix_queryserver.py     |   3 +
 .../python/stacks/2.0.6/HDFS/test_datanode.py   |   4 +
 .../stacks/2.0.6/HDFS/test_hdfs_client.py       |   5 +
 .../stacks/2.0.6/HDFS/test_journalnode.py       |   4 +
 .../python/stacks/2.0.6/HDFS/test_namenode.py   |   4 +
 .../python/stacks/2.0.6/HDFS/test_nfsgateway.py |   3 +
 .../stacks/2.0.6/HIVE/test_hcat_client.py       |   3 +
 .../stacks/2.0.6/HIVE/test_hive_client.py       |   6 +
 .../stacks/2.0.6/HIVE/test_webhcat_server.py    |   4 +
 .../stacks/2.0.6/OOZIE/test_oozie_client.py     |   4 +
 .../stacks/2.0.6/OOZIE/test_oozie_server.py     |  11 +
 .../python/stacks/2.0.6/PIG/test_pig_client.py  |   4 +
 .../python/stacks/2.0.6/SQOOP/test_sqoop.py     |   3 +
 .../stacks/2.0.6/YARN/test_historyserver.py     |   5 +-
 .../stacks/2.0.6/YARN/test_mapreduce2_client.py |   5 +
 .../stacks/2.0.6/YARN/test_nodemanager.py       |   6 +
 .../stacks/2.0.6/YARN/test_resourcemanager.py   |   3 +
 .../stacks/2.0.6/YARN/test_yarn_client.py       |   4 +
 .../2.0.6/ZOOKEEPER/test_zookeeper_client.py    |   4 +
 .../2.0.6/ZOOKEEPER/test_zookeeper_server.py    |   5 +
 .../hooks/after-INSTALL/test_after_install.py   |  43 +-
 .../stacks/2.1/FALCON/test_falcon_client.py     |   4 +
 .../stacks/2.1/FALCON/test_falcon_server.py     |   5 +
 .../stacks/2.1/HIVE/test_hive_metastore.py      |  42 +-
 .../stacks/2.1/STORM/test_storm_drpc_server.py  |   4 +
 .../stacks/2.1/STORM/test_storm_nimbus.py       |   3 +
 .../stacks/2.1/STORM/test_storm_nimbus_prod.py  |   3 +
 .../stacks/2.1/STORM/test_storm_supervisor.py   |   4 +
 .../2.1/STORM/test_storm_supervisor_prod.py     |   4 +
 .../stacks/2.1/STORM/test_storm_ui_server.py    |   5 +-
 .../python/stacks/2.1/TEZ/test_tez_client.py    |   4 +
 .../stacks/2.1/YARN/test_apptimelineserver.py   |   3 +
 .../stacks/2.2/ACCUMULO/test_accumulo_client.py |   4 +
 .../stacks/2.2/KAFKA/test_kafka_broker.py       |   4 +
 .../stacks/2.2/SLIDER/test_slider_client.py     |   6 +
 .../stacks/2.2/SPARK/test_job_history_server.py |   3 +
 .../stacks/2.2/SPARK/test_spark_client.py       |   4 +
 .../stacks/2.3/MAHOUT/test_mahout_client.py     |   4 +
 .../2.3/SPARK/test_spark_thrift_server.py       |   3 +
 .../src/test/python/stacks/utils/RMFTestCase.py |  12 +
 .../HDF/2.0/hooks/after-INSTALL/scripts/hook.py |   2 +-
 .../scripts/shared_initialization.py            |  35 +-
 .../2.0/hooks/after-INSTALL/scripts/hook.py     |   2 +-
 .../scripts/shared_initialization.py            |  31 +-
 186 files changed, 2436 insertions(+), 702 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/2bab2159/ambari-common/src/main/python/resource_management/libraries/functions/stack_select.py
----------------------------------------------------------------------
diff --git 
a/ambari-common/src/main/python/resource_management/libraries/functions/stack_select.py
 
b/ambari-common/src/main/python/resource_management/libraries/functions/stack_select.py
index 79393b9..723871b 100644
--- 
a/ambari-common/src/main/python/resource_management/libraries/functions/stack_select.py
+++ 
b/ambari-common/src/main/python/resource_management/libraries/functions/stack_select.py
@@ -22,6 +22,7 @@ limitations under the License.
 import os
 import sys
 import re
+import ambari_simplejson as json
 
 # Local Imports
 from resource_management.core.logger import Logger
@@ -40,50 +41,6 @@ from resource_management.libraries.functions import 
StackFeature
 
 STACK_SELECT_PREFIX = 'ambari-python-wrap'
 
-# a mapping of Ambari server role to <stack-selector-tool> component name for 
all
-# non-clients
-SERVER_ROLE_DIRECTORY_MAP = {
-  'ACCUMULO_MASTER' : 'accumulo-master',
-  'ACCUMULO_MONITOR' : 'accumulo-monitor',
-  'ACCUMULO_GC' : 'accumulo-gc',
-  'ACCUMULO_TRACER' : 'accumulo-tracer',
-  'ACCUMULO_TSERVER' : 'accumulo-tablet',
-  'ATLAS_SERVER' : 'atlas-server',
-  'FLUME_HANDLER' : 'flume-server',
-  'FALCON_SERVER' : 'falcon-server',
-  'NAMENODE' : 'hadoop-hdfs-namenode',
-  'DATANODE' : 'hadoop-hdfs-datanode',
-  'SECONDARY_NAMENODE' : 'hadoop-hdfs-secondarynamenode',
-  'NFS_GATEWAY' : 'hadoop-hdfs-nfs3',
-  'JOURNALNODE' : 'hadoop-hdfs-journalnode',
-  'HBASE_MASTER' : 'hbase-master',
-  'HBASE_REGIONSERVER' : 'hbase-regionserver',
-  'HIVE_METASTORE' : 'hive-metastore',
-  'HIVE_SERVER' : 'hive-server2',
-  'HIVE_SERVER_INTERACTIVE' : 'hive-server2-hive2',
-  'WEBHCAT_SERVER' : 'hive-webhcat',
-  'KAFKA_BROKER' : 'kafka-broker',
-  'KNOX_GATEWAY' : 'knox-server',
-  'OOZIE_SERVER' : 'oozie-server',
-  'RANGER_ADMIN' : 'ranger-admin',
-  'RANGER_USERSYNC' : 'ranger-usersync',
-  'RANGER_TAGSYNC' : 'ranger-tagsync',
-  'RANGER_KMS' : 'ranger-kms',
-  'SPARK_JOBHISTORYSERVER' : 'spark-historyserver',
-  'SPARK_THRIFTSERVER' : 'spark-thriftserver',
-  'NIMBUS' : 'storm-nimbus',
-  'SUPERVISOR' : 'storm-supervisor',
-  'HISTORYSERVER' : 'hadoop-mapreduce-historyserver',
-  'APP_TIMELINE_SERVER' : 'hadoop-yarn-timelineserver',
-  'NODEMANAGER' : 'hadoop-yarn-nodemanager',
-  'RESOURCEMANAGER' : 'hadoop-yarn-resourcemanager',
-  'ZOOKEEPER_SERVER' : 'zookeeper-server',
-
-  # ZKFC is tied to NN since it doesn't have its own componnet in 
<stack-selector-tool> and there is
-  # a requirement that the ZKFC is installed on each NN
-  'ZKFC' : 'hadoop-hdfs-namenode'
-}
-
 # mapping of service check to <stack-selector-tool> component
 SERVICE_CHECK_DIRECTORY_MAP = {
   "HDFS_SERVICE_CHECK" : "hadoop-client",
@@ -113,6 +70,110 @@ HADOOP_DIR_DEFAULTS = {
   "lib": "/usr/lib/hadoop/lib"
 }
 
+PACKAGE_SCOPE_INSTALL = "INSTALL"
+PACKAGE_SCOPE_STANDARD = "STANDARD"
+PACKAGE_SCOPE_PATCH = "PATCH"
+PACKAGE_SCOPE_STACK_SELECT = "STACK-SELECT-PACKAGE"
+_PACKAGE_SCOPES = (PACKAGE_SCOPE_INSTALL, PACKAGE_SCOPE_STANDARD, 
PACKAGE_SCOPE_PATCH, PACKAGE_SCOPE_STACK_SELECT)
+
+
+def get_package_name(default_package = None):
+  """
+  Gets the stack-select package name for the service name and
+  component from the current command. Not all services/components are used 
with the
+  stack-select tools, so those will return no packages.
+
+  :return:  the stack-select package name for the command's component or None
+  """
+  config = Script.get_config()
+
+  if 'role' not in config or 'serviceName' not in config:
+    raise Fail("Both the role and the service name must be included in the 
command in order to determine which packages to use with the stack-select tool")
+
+  service_name = config['serviceName']
+  component_name = config['role']
+
+  # should return a single item
+  try:
+    package = get_packages(PACKAGE_SCOPE_STACK_SELECT, service_name, 
component_name)
+    if package is None:
+      package = default_package
+
+    return package
+  except:
+    if default_package is not None:
+      return default_package
+    else:
+      raise
+
+
+
+def get_packages(scope, service_name = None, component_name = None):
+  """
+  Gets the packages which should be used with the stack's stack-select tool 
for the
+  specified service/component. Not all services/components are used with the 
stack-select tools,
+  so those will return no packages.
+
+  :param scope: the scope of the command
+  :param service_name:  the service name, such as ZOOKEEPER
+  :param component_name: the component name, such as ZOOKEEPER_SERVER
+  :return:  the packages to use with stack-select or None
+  """
+  from resource_management.libraries.functions.default import default
+
+  import time
+
+  if scope not in _PACKAGE_SCOPES:
+    raise Fail("The specified scope of {0} is not valid".format(scope))
+
+  config = Script.get_config()
+
+  if service_name is None or component_name is None:
+    if 'role' not in config or 'serviceName' not in config:
+      raise Fail("Both the role and the service name must be included in the 
command in order to determine which packages to use with the stack-select tool")
+
+    service_name = config['serviceName']
+    component_name = config['role']
+
+
+  stack_name = default("/hostLevelParams/stack_name", None)
+  if stack_name is None:
+    raise Fail("The stack name is not present in the command. Packages for 
stack-select tool cannot be loaded.")
+
+  stack_select_packages_config = 
default("/configurations/cluster-env/stack_select_packages", None)
+  if stack_select_packages_config is None:
+    raise Fail("The stack packages are not defined on the command. Unable to 
load packages for the stack-select tool")
+
+  data = json.loads(stack_select_packages_config)
+
+  if stack_name not in data:
+    raise Fail(
+      "Cannot find stack-select packages for the {0} stack".format(stack_name))
+
+  stack_select_key = "stack-select"
+  data = data[stack_name]
+  if stack_select_key not in data:
+    raise Fail(
+      "There are no stack-select packages defined for this command for the {0} 
stack".format(stack_name))
+
+  # this should now be the dictionary of role name to package name
+  data = data[stack_select_key]
+  service_name = service_name.upper()
+  component_name = component_name.upper()
+
+  if service_name not in data:
+    Logger.info("Skipping stack-select on {0} because it does not exist in the 
stack-select package structure.".format(service_name))
+    return None
+
+  data = data[service_name]
+
+  if component_name not in data:
+    Logger.info("Skipping stack-select on {0} because it does not exist in the 
stack-select package structure.".format(component_name))
+    return None
+
+  return data[component_name][scope]
+
+
 def select_all(version_to_select):
   """
   Executes <stack-selector-tool> on every component for the specified version. 
If the value passed in is a
@@ -135,6 +196,20 @@ def select_all(version_to_select):
   Execute(command, only_if = only_if_command)
 
 
+def select_packages(version):
+  """
+  Uses the command's service and role to determine the stack-select packages 
which need to be invoked.
+  :param version: the version to select
+  :return: None
+  """
+  stack_select_packages = get_packages(PACKAGE_SCOPE_STANDARD)
+  if stack_select_packages is None:
+    return
+
+  for stack_select_package_name in stack_select_packages:
+    select(stack_select_package_name, version)
+
+
 def select(component, version):
   """
   Executes <stack-selector-tool> on the specific component and version. Some 
global
@@ -170,15 +245,15 @@ def get_role_component_current_stack_version():
   Gets the current HDP version of the component that this role command is for.
   :return:  the current HDP version of the specified component or None
   """
-  stack_select_component = None
   role = default("/role", "")
   role_command =  default("/roleCommand", "")
+
   stack_selector_name = 
stack_tools.get_stack_tool_name(stack_tools.STACK_SELECTOR_NAME)
   Logger.info("Checking version for {0} via {1}".format(role, 
stack_selector_name))
-  if role in SERVER_ROLE_DIRECTORY_MAP:
-    stack_select_component = SERVER_ROLE_DIRECTORY_MAP[role]
-  elif role_command == "SERVICE_CHECK" and role in SERVICE_CHECK_DIRECTORY_MAP:
+  if role_command == "SERVICE_CHECK" and role in SERVICE_CHECK_DIRECTORY_MAP:
     stack_select_component = SERVICE_CHECK_DIRECTORY_MAP[role]
+  else:
+    stack_select_component = get_package_name()
 
   if stack_select_component is None:
     if not role:

http://git-wip-us.apache.org/repos/asf/ambari/blob/2bab2159/ambari-common/src/main/python/resource_management/libraries/functions/stack_tools.py
----------------------------------------------------------------------
diff --git 
a/ambari-common/src/main/python/resource_management/libraries/functions/stack_tools.py
 
b/ambari-common/src/main/python/resource_management/libraries/functions/stack_tools.py
index 420ae11..fa97fd1 100644
--- 
a/ambari-common/src/main/python/resource_management/libraries/functions/stack_tools.py
+++ 
b/ambari-common/src/main/python/resource_management/libraries/functions/stack_tools.py
@@ -19,7 +19,7 @@ limitations under the License.
 '''
 
 __all__ = ["get_stack_tool", "get_stack_tool_name", "get_stack_tool_path",
-           "get_stack_tool_package", "STACK_SELECTOR_NAME", 
"CONF_SELECTOR_NAME"]
+           "get_stack_tool_package", "get_stack_name", "STACK_SELECTOR_NAME", 
"CONF_SELECTOR_NAME"]
 
 # simplejson is much faster comparing to Python 2.6 json module and has the 
same functions set.
 import ambari_simplejson as json
@@ -120,3 +120,16 @@ def get_stack_root(stack_name, stack_root_json):
     return "/usr/{0}".format(stack_name.lower())
 
   return stack_root[stack_name]
+
+
+def get_stack_name(stack_formatted):
+  """
+  Get the stack name (eg. HDP) from formatted string that may contain stack 
version (eg. HDP-2.6.1.0-123)
+  """
+  if stack_formatted is None:
+    return None
+
+  if '-' not in stack_formatted:
+    return stack_formatted
+
+  return stack_formatted.split('-')[0]
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/2bab2159/ambari-common/src/main/python/resource_management/libraries/script/script.py
----------------------------------------------------------------------
diff --git 
a/ambari-common/src/main/python/resource_management/libraries/script/script.py 
b/ambari-common/src/main/python/resource_management/libraries/script/script.py
index c2c89c4..63b55ff 100644
--- 
a/ambari-common/src/main/python/resource_management/libraries/script/script.py
+++ 
b/ambari-common/src/main/python/resource_management/libraries/script/script.py
@@ -183,12 +183,6 @@ class Script(object):
     except IOError, err:
       Script.structuredOut.update({"errMsg" : "Unable to write to " + 
self.stroutfile})
 
-  def get_component_name(self):
-    """
-    To be overridden by subclasses.
-     Returns a string with the component name used in selecting the version.
-    """
-    pass
 
   def get_config_dir_during_stack_upgrade(self, env, base_dir, 
conf_select_name):
     """
@@ -218,11 +212,13 @@ class Script(object):
     :param stack_name: One of HDP, HDPWIN, PHD, BIGTOP.
     :return: Append the version number to the structured out.
     """
+    from resource_management.libraries.functions import stack_select
+
     stack_name = Script.get_stack_name()
-    component_name = self.get_component_name()
+    stack_select_package_name = stack_select.get_package_name()
 
-    if component_name and stack_name:
-      component_version = get_component_version(stack_name, component_name)
+    if stack_select_package_name and stack_name:
+      component_version = get_component_version(stack_name, 
stack_select_package_name)
 
       if component_version:
         self.put_structured_out({"version": component_version})
@@ -446,11 +442,12 @@ class Script(object):
 
     :return: stack version including the build number. e.g.: 2.3.4.0-1234.
     """
+    from resource_management.libraries.functions import stack_select
+
     # preferred way is to get the actual selected version of current component
-    component_name = self.get_component_name()
-    if not Script.stack_version_from_distro_select and component_name:
-      from resource_management.libraries.functions import stack_select
-      Script.stack_version_from_distro_select = 
stack_select.get_stack_version_before_install(component_name)
+    stack_select_package_name = stack_select.get_package_name()
+    if not Script.stack_version_from_distro_select and 
stack_select_package_name:
+      Script.stack_version_from_distro_select = 
stack_select.get_stack_version_before_install(stack_select_package_name)
 
     # If <stack-selector-tool> has not yet been done (situations like first 
install),
     # we can use <stack-selector-tool> version itself.

http://git-wip-us.apache.org/repos/asf/ambari/blob/2bab2159/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessor.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessor.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessor.java
index 144e2e7..46ace32 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessor.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessor.java
@@ -2947,7 +2947,8 @@ public class BlueprintConfigurationProcessor {
 
     Set<String> properties = 
Sets.newHashSet(ConfigHelper.CLUSTER_ENV_STACK_NAME_PROPERTY,
         ConfigHelper.CLUSTER_ENV_STACK_ROOT_PROPERTY, 
ConfigHelper.CLUSTER_ENV_STACK_TOOLS_PROPERTY,
-        ConfigHelper.CLUSTER_ENV_STACK_FEATURES_PROPERTY);
+        ConfigHelper.CLUSTER_ENV_STACK_FEATURES_PROPERTY,
+        ConfigHelper.CLUSTER_ENV_STACK_SELECT_PACKAGES_PROPERTY);
 
     try {
       Map<String, Map<String, String>> defaultStackProperties = 
configHelper.getDefaultStackProperties(stackId);

http://git-wip-us.apache.org/repos/asf/ambari/blob/2bab2159/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java 
b/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java
index e8250fe..359b225 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java
@@ -92,6 +92,7 @@ public class ConfigHelper {
   public static final String CLUSTER_ENV_STACK_FEATURES_PROPERTY = 
"stack_features";
   public static final String CLUSTER_ENV_STACK_TOOLS_PROPERTY = "stack_tools";
   public static final String CLUSTER_ENV_STACK_ROOT_PROPERTY = "stack_root";
+  public static final String CLUSTER_ENV_STACK_SELECT_PACKAGES_PROPERTY = 
"stack_select_packages";
 
   public static final String HTTP_ONLY = "HTTP_ONLY";
   public static final String HTTPS_ONLY = "HTTPS_ONLY";

http://git-wip-us.apache.org/repos/asf/ambari/blob/2bab2159/ambari-server/src/main/java/org/apache/ambari/server/upgrade/FinalUpgradeCatalog.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/FinalUpgradeCatalog.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/FinalUpgradeCatalog.java
index 3d9d163..216d39d 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/FinalUpgradeCatalog.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/FinalUpgradeCatalog.java
@@ -77,6 +77,7 @@ public class FinalUpgradeCatalog extends 
AbstractUpgradeCatalog {
    * <ul>
    * <li>Adds/Updates {@link ConfigHelper#CLUSTER_ENV_STACK_FEATURES_PROPERTY} 
from stack</li>
    * <li>Adds/Updates {@link ConfigHelper#CLUSTER_ENV_STACK_TOOLS_PROPERTY} 
from stack</li>
+   * <li>Adds/Updates {@link 
ConfigHelper#CLUSTER_ENV_STACK_SELECT_PACKAGES_PROPERTY} from stack</li>
    * </ul>
    *
    * Note: Config properties stack_features and stack_tools should always be 
updated to latest values as defined
@@ -106,7 +107,8 @@ public class FinalUpgradeCatalog extends 
AbstractUpgradeCatalog {
         List<PropertyInfo> properties = stackInfo.getProperties();
         for(PropertyInfo property : properties) {
           
if(property.getName().equals(ConfigHelper.CLUSTER_ENV_STACK_FEATURES_PROPERTY) 
||
-              
property.getName().equals(ConfigHelper.CLUSTER_ENV_STACK_TOOLS_PROPERTY)) {
+              
property.getName().equals(ConfigHelper.CLUSTER_ENV_STACK_TOOLS_PROPERTY) ||
+              
property.getName().equals(ConfigHelper.CLUSTER_ENV_STACK_SELECT_PACKAGES_PROPERTY))
 {
             propertyMap.put(property.getName(), property.getValue());
           }
         }

http://git-wip-us.apache.org/repos/asf/ambari/blob/2bab2159/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_client.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_client.py
 
b/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_client.py
index 67ca525..856446c 100644
--- 
a/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_client.py
+++ 
b/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_client.py
@@ -30,9 +30,6 @@ from accumulo_configuration import setup_conf_dir
 
 
 class AccumuloClient(Script):
-  def get_component_name(self):
-    return "accumulo-client"
-
   def install(self, env):
     self.install_packages(env)
     self.configure(env)
@@ -60,7 +57,7 @@ class AccumuloClient(Script):
 
     Logger.info("Executing Accumulo Client Upgrade pre-restart")
     conf_select.select(params.stack_name, "accumulo", params.version)
-    stack_select.select("accumulo-client", params.version)
+    stack_select.select_packages(params.version)
 
 if __name__ == "__main__":
   AccumuloClient().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/2bab2159/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_script.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_script.py
 
b/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_script.py
index 445c996..d884bcd 100644
--- 
a/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_script.py
+++ 
b/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_script.py
@@ -37,33 +37,10 @@ from accumulo_service import accumulo_service
 
 class AccumuloScript(Script):
 
-  # a mapping between the component named used by these scripts and the name
-  # which is used by <stack-selector-tool>
-  COMPONENT_TO_STACK_SELECT_MAPPING = {
-    "gc" : "accumulo-gc",
-    "master" : "accumulo-master",
-    "monitor" : "accumulo-monitor",
-    "tserver" : "accumulo-tablet",
-    "tracer" : "accumulo-tracer"
-  }
-
   def __init__(self, component):
     self.component = component
 
 
-  def get_component_name(self):
-    """
-    Gets the <stack-selector-tool> component name given the script component
-    :return:  the name of the component on the stack which is used by
-              <stack-selector-tool>
-    """
-    if self.component not in self.COMPONENT_TO_STACK_SELECT_MAPPING:
-      return None
-
-    stack_component = self.COMPONENT_TO_STACK_SELECT_MAPPING[self.component]
-    return stack_component
-
-
   def install(self, env):
     self.install_packages(env)
 
@@ -107,19 +84,12 @@ class AccumuloScript(Script):
     if not (params.stack_version_formatted and 
check_stack_feature(StackFeature.ROLLING_UPGRADE, 
params.stack_version_formatted)):
       return
 
-    if self.component not in self.COMPONENT_TO_STACK_SELECT_MAPPING:
-      Logger.info("Unable to execute an upgrade for unknown component 
{0}".format(self.component))
-      raise Fail("Unable to execute an upgrade for unknown component 
{0}".format(self.component))
-
-    stack_component = self.COMPONENT_TO_STACK_SELECT_MAPPING[self.component]
+    stack_component = stack_select.get_package_name()
 
     Logger.info("Executing Accumulo Upgrade pre-restart for 
{0}".format(stack_component))
     conf_select.select(params.stack_name, "accumulo", params.version)
-    stack_select.select(stack_component, params.version)
+    stack_select.select_packages(params.version)
 
-    # some accumulo components depend on the client, so update that too
-    stack_select.select("accumulo-client", params.version)
-      
   def get_log_folder(self):
     import params
     return params.log_dir

http://git-wip-us.apache.org/repos/asf/ambari/blob/2bab2159/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/atlas_client.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/atlas_client.py
 
b/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/atlas_client.py
index 2414fff..4a8210d 100644
--- 
a/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/atlas_client.py
+++ 
b/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/atlas_client.py
@@ -30,16 +30,13 @@ from metadata import metadata
 
 class AtlasClient(Script):
 
-  def get_component_name(self):
-    return "atlas-client"
-
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params
     env.set_params(params)
 
     if check_stack_feature(StackFeature.ATLAS_UPGRADE_SUPPORT, 
params.version_for_stack_feature_checks):
       conf_select.select(params.stack_name, "atlas", params.version)
-      stack_select.select("atlas-client", params.version)
+      stack_select.select_packages(params.version)
 
   def install(self, env):
     self.install_packages(env)

http://git-wip-us.apache.org/repos/asf/ambari/blob/2bab2159/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/metadata_server.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/metadata_server.py
 
b/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/metadata_server.py
index 1ef77cf..948fe8c 100644
--- 
a/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/metadata_server.py
+++ 
b/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/metadata_server.py
@@ -40,10 +40,6 @@ from setup_ranger_atlas import setup_ranger_atlas
 from resource_management.core.resources.zkmigrator import ZkMigrator
 
 class MetadataServer(Script):
-
-  def get_component_name(self):
-    return "atlas-server"
-
   def install(self, env):
     import params
     env.set_params(params)
@@ -65,7 +61,7 @@ class MetadataServer(Script):
 
     if check_stack_feature(StackFeature.ATLAS_UPGRADE_SUPPORT, params.version):
       conf_select.select(params.stack_name, "atlas", params.version)
-      stack_select.select("atlas-server", params.version)
+      stack_select.select_packages(params.version)
 
   def start(self, env, upgrade_type=None):
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/2bab2159/ambari-server/src/main/resources/common-services/ATLAS/0.7.0.3.0/package/scripts/atlas_client.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/ATLAS/0.7.0.3.0/package/scripts/atlas_client.py
 
b/ambari-server/src/main/resources/common-services/ATLAS/0.7.0.3.0/package/scripts/atlas_client.py
index 26742ae..3f9a5bc 100644
--- 
a/ambari-server/src/main/resources/common-services/ATLAS/0.7.0.3.0/package/scripts/atlas_client.py
+++ 
b/ambari-server/src/main/resources/common-services/ATLAS/0.7.0.3.0/package/scripts/atlas_client.py
@@ -30,16 +30,13 @@ from metadata import metadata
 
 class AtlasClient(Script):
 
-  def get_component_name(self):
-    return "atlas-client"
-
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params
     env.set_params(params)
 
     if check_stack_feature(StackFeature.ATLAS_UPGRADE_SUPPORT, params.version):
       conf_select.select(params.stack_name, "atlas", params.version)
-      stack_select.select("atlas-client", params.version)
+      stack_select.select_packages(params.version)
 
   def install(self, env):
     self.install_packages(env)

http://git-wip-us.apache.org/repos/asf/ambari/blob/2bab2159/ambari-server/src/main/resources/common-services/ATLAS/0.7.0.3.0/package/scripts/metadata_server.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/ATLAS/0.7.0.3.0/package/scripts/metadata_server.py
 
b/ambari-server/src/main/resources/common-services/ATLAS/0.7.0.3.0/package/scripts/metadata_server.py
index 1ef77cf..daaa871 100644
--- 
a/ambari-server/src/main/resources/common-services/ATLAS/0.7.0.3.0/package/scripts/metadata_server.py
+++ 
b/ambari-server/src/main/resources/common-services/ATLAS/0.7.0.3.0/package/scripts/metadata_server.py
@@ -41,9 +41,6 @@ from resource_management.core.resources.zkmigrator import 
ZkMigrator
 
 class MetadataServer(Script):
 
-  def get_component_name(self):
-    return "atlas-server"
-
   def install(self, env):
     import params
     env.set_params(params)
@@ -65,7 +62,7 @@ class MetadataServer(Script):
 
     if check_stack_feature(StackFeature.ATLAS_UPGRADE_SUPPORT, params.version):
       conf_select.select(params.stack_name, "atlas", params.version)
-      stack_select.select("atlas-server", params.version)
+      stack_select.select_packages(params.version)
 
   def start(self, env, upgrade_type=None):
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/2bab2159/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/druid_node.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/druid_node.py
 
b/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/druid_node.py
index 7c6bf39..20623f7 100644
--- 
a/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/druid_node.py
+++ 
b/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/druid_node.py
@@ -35,10 +35,6 @@ class DruidBase(Script):
   def __init__(self, nodeType=None):
     self.nodeType = nodeType
 
-  def get_component_name(self):
-    node_type_lower = self.nodeType.lower()
-    return format("druid-{node_type_lower}")
-
   def install(self, env):
     self.install_packages(env)
 
@@ -55,7 +51,7 @@ class DruidBase(Script):
     env.set_params(params)
 
     if params.stack_version and 
check_stack_feature(StackFeature.ROLLING_UPGRADE, params.stack_version):
-      stack_select.select(self.get_component_name(), params.stack_version)
+      stack_select.select_packages(params.stack_version)
     if params.stack_version and 
check_stack_feature(StackFeature.CONFIG_VERSIONING, params.stack_version):
       conf_select.select(params.stack_name, "druid", params.stack_version)
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/2bab2159/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/superset.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/superset.py
 
b/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/superset.py
index b837b24..36dab51 100644
--- 
a/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/superset.py
+++ 
b/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/superset.py
@@ -36,9 +36,6 @@ from resource_management.libraries.resources.properties_file 
import PropertiesFi
 
 class Superset(Script):
 
-  def get_component_name(self):
-    return format("druid-superset")
-
   def install(self, env):
     self.install_packages(env)
 
@@ -98,7 +95,7 @@ class Superset(Script):
     env.set_params(params)
 
     if params.stack_version and 
check_stack_feature(StackFeature.ROLLING_UPGRADE, params.stack_version):
-      stack_select.select(self.get_component_name(), params.stack_version)
+      stack_select.select_packages(params.version)
     if params.stack_version and 
check_stack_feature(StackFeature.CONFIG_VERSIONING, params.stack_version):
       conf_select.select(params.stack_name, "superset", params.stack_version)
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/2bab2159/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon_client.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon_client.py
 
b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon_client.py
index b0f517b..f75f34f 100644
--- 
a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon_client.py
+++ 
b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon_client.py
@@ -38,9 +38,6 @@ class FalconClient(Script):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class FalconClientLinux(FalconClient):
-  def get_component_name(self):
-    return "falcon-client"
-
   def install(self, env):
     self.install_packages(env)
     self.configure(env)
@@ -57,7 +54,7 @@ class FalconClientLinux(FalconClient):
 
     Logger.info("Executing Falcon Client Stack Upgrade pre-restart")
     conf_select.select(params.stack_name, "falcon", params.version)
-    stack_select.select("falcon-client", params.version)
+    stack_select.select_packages(params.version)
 
 @OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY)
 class FalconClientWindows(FalconClient):

http://git-wip-us.apache.org/repos/asf/ambari/blob/2bab2159/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon_server.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon_server.py
 
b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon_server.py
index 23f9ef8..d547a1a 100644
--- 
a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon_server.py
+++ 
b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon_server.py
@@ -60,9 +60,6 @@ class FalconServer(Script):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class FalconServerLinux(FalconServer):
-  def get_component_name(self):
-    return "falcon-server"
-
   def install(self, env):
     import params
     self.install_packages(env)
@@ -85,7 +82,7 @@ class FalconServerLinux(FalconServer):
 
     Logger.info("Executing Falcon Server Stack Upgrade pre-restart")
     conf_select.select(params.stack_name, "falcon", params.version)
-    stack_select.select("falcon-server", params.version)
+    stack_select.select_packages(params.version)
 
     falcon_server_upgrade.pre_start_restore()
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/2bab2159/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/flume_handler.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/flume_handler.py
 
b/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/flume_handler.py
index 98b357e..a21ecf5 100644
--- 
a/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/flume_handler.py
+++ 
b/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/flume_handler.py
@@ -40,9 +40,6 @@ class FlumeHandler(Script):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class FlumeHandlerLinux(FlumeHandler):
-  def get_component_name(self):
-    return "flume-server"
-
   def install(self, env):
     import params
     self.install_packages(env)
@@ -90,7 +87,7 @@ class FlumeHandlerLinux(FlumeHandler):
 
     Logger.info("Executing Flume Stack Upgrade pre-restart")
     conf_select.select(params.stack_name, "flume", params.version)
-    stack_select.select("flume-server", params.version)
+    stack_select.select_packages(params.version)
 
   def get_log_folder(self):
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/2bab2159/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_client.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_client.py
 
b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_client.py
index c8128ab..3027bff 100644
--- 
a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_client.py
+++ 
b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_client.py
@@ -51,20 +51,16 @@ class HbaseClientWindows(HbaseClient):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class HbaseClientDefault(HbaseClient):
-  def get_component_name(self):
-    return "hbase-client"
-
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params
     env.set_params(params)
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, 
params.version): 
       conf_select.select(params.stack_name, "hbase", params.version)
-      stack_select.select("hbase-client", params.version)
 
       # phoenix may not always be deployed
       try:
-        stack_select.select("phoenix-client", params.version)
+        stack_select.select_packages(params.version)
       except Exception as e:
         print "Ignoring error due to missing phoenix-client"
         print str(e)
@@ -74,7 +70,6 @@ class HbaseClientDefault(HbaseClient):
       # of the final "CLIENTS" group and we need to ensure that hadoop-client
       # is also set
       conf_select.select(params.stack_name, "hadoop", params.version)
-      stack_select.select("hadoop-client", params.version)
 
 
 if __name__ == "__main__":

http://git-wip-us.apache.org/repos/asf/ambari/blob/2bab2159/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_master.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_master.py
 
b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_master.py
index 83af3aa..a5f1e35 100644
--- 
a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_master.py
+++ 
b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_master.py
@@ -72,13 +72,10 @@ class HbaseMasterWindows(HbaseMaster):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class HbaseMasterDefault(HbaseMaster):
-  def get_component_name(self):
-    return "hbase-master"
-
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params
     env.set_params(params)
-    upgrade.prestart(env, "hbase-master")
+    upgrade.prestart(env)
 
   def start(self, env, upgrade_type=None):
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/2bab2159/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_regionserver.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_regionserver.py
 
b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_regionserver.py
index 75910b1..04f0e22 100644
--- 
a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_regionserver.py
+++ 
b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_regionserver.py
@@ -74,13 +74,10 @@ class HbaseRegionServerWindows(HbaseRegionServer):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class HbaseRegionServerDefault(HbaseRegionServer):
-  def get_component_name(self):
-    return "hbase-regionserver"
-
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params
     env.set_params(params)
-    upgrade.prestart(env, "hbase-regionserver")
+    upgrade.prestart(env)
 
   def post_upgrade_restart(self, env, upgrade_type=None):
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/2bab2159/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/phoenix_queryserver.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/phoenix_queryserver.py
 
b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/phoenix_queryserver.py
index 8a85d6e..e6dff39 100644
--- 
a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/phoenix_queryserver.py
+++ 
b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/phoenix_queryserver.py
@@ -34,10 +34,6 @@ class PhoenixQueryServer(Script):
     self.install_packages(env)
 
 
-  def get_component_name(self):
-    return "phoenix-server"
-
-
   def configure(self, env):
     import params
     env.set_params(params)
@@ -64,7 +60,7 @@ class PhoenixQueryServer(Script):
     if params.stack_version_formatted and 
check_stack_feature(StackFeature.PHOENIX, params.stack_version_formatted):     
       # phoenix uses hbase configs
       conf_select.select(params.stack_name, "hbase", params.version)
-      stack_select.select("phoenix-server", params.version)
+      stack_select.select_packages(params.version)
 
 
   def status(self, env):

http://git-wip-us.apache.org/repos/asf/ambari/blob/2bab2159/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/upgrade.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/upgrade.py
 
b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/upgrade.py
index 703fe26..a502c1d 100644
--- 
a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/upgrade.py
+++ 
b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/upgrade.py
@@ -33,12 +33,12 @@ from resource_management.libraries.functions.format import 
format
 from resource_management.libraries.functions import check_process_status
 
 
-def prestart(env, stack_component):
+def prestart(env):
   import params
 
   if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, 
params.version):
     conf_select.select(params.stack_name, "hbase", params.version)
-    stack_select.select(stack_component, params.version)
+    stack_select.select_packages(params.version)
 
 def post_regionserver(env):
   import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/2bab2159/ambari-server/src/main/resources/common-services/HBASE/2.0.0.3.0/package/scripts/hbase_client.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HBASE/2.0.0.3.0/package/scripts/hbase_client.py
 
b/ambari-server/src/main/resources/common-services/HBASE/2.0.0.3.0/package/scripts/hbase_client.py
index c8128ab..3027bff 100644
--- 
a/ambari-server/src/main/resources/common-services/HBASE/2.0.0.3.0/package/scripts/hbase_client.py
+++ 
b/ambari-server/src/main/resources/common-services/HBASE/2.0.0.3.0/package/scripts/hbase_client.py
@@ -51,20 +51,16 @@ class HbaseClientWindows(HbaseClient):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class HbaseClientDefault(HbaseClient):
-  def get_component_name(self):
-    return "hbase-client"
-
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params
     env.set_params(params)
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, 
params.version): 
       conf_select.select(params.stack_name, "hbase", params.version)
-      stack_select.select("hbase-client", params.version)
 
       # phoenix may not always be deployed
       try:
-        stack_select.select("phoenix-client", params.version)
+        stack_select.select_packages(params.version)
       except Exception as e:
         print "Ignoring error due to missing phoenix-client"
         print str(e)
@@ -74,7 +70,6 @@ class HbaseClientDefault(HbaseClient):
       # of the final "CLIENTS" group and we need to ensure that hadoop-client
       # is also set
       conf_select.select(params.stack_name, "hadoop", params.version)
-      stack_select.select("hadoop-client", params.version)
 
 
 if __name__ == "__main__":

http://git-wip-us.apache.org/repos/asf/ambari/blob/2bab2159/ambari-server/src/main/resources/common-services/HBASE/2.0.0.3.0/package/scripts/hbase_master.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HBASE/2.0.0.3.0/package/scripts/hbase_master.py
 
b/ambari-server/src/main/resources/common-services/HBASE/2.0.0.3.0/package/scripts/hbase_master.py
index d2c8089..8af08c6 100644
--- 
a/ambari-server/src/main/resources/common-services/HBASE/2.0.0.3.0/package/scripts/hbase_master.py
+++ 
b/ambari-server/src/main/resources/common-services/HBASE/2.0.0.3.0/package/scripts/hbase_master.py
@@ -72,13 +72,10 @@ class HbaseMasterWindows(HbaseMaster):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class HbaseMasterDefault(HbaseMaster):
-  def get_component_name(self):
-    return "hbase-master"
-
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params
     env.set_params(params)
-    upgrade.prestart(env, "hbase-master")
+    upgrade.prestart(env)
 
   def start(self, env, upgrade_type=None):
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/2bab2159/ambari-server/src/main/resources/common-services/HBASE/2.0.0.3.0/package/scripts/hbase_regionserver.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HBASE/2.0.0.3.0/package/scripts/hbase_regionserver.py
 
b/ambari-server/src/main/resources/common-services/HBASE/2.0.0.3.0/package/scripts/hbase_regionserver.py
index 226e7fd5..e0c766e 100644
--- 
a/ambari-server/src/main/resources/common-services/HBASE/2.0.0.3.0/package/scripts/hbase_regionserver.py
+++ 
b/ambari-server/src/main/resources/common-services/HBASE/2.0.0.3.0/package/scripts/hbase_regionserver.py
@@ -74,13 +74,10 @@ class HbaseRegionServerWindows(HbaseRegionServer):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class HbaseRegionServerDefault(HbaseRegionServer):
-  def get_component_name(self):
-    return "hbase-regionserver"
-
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params
     env.set_params(params)
-    upgrade.prestart(env, "hbase-regionserver")
+    upgrade.prestart(env)
 
   def post_upgrade_restart(self, env, upgrade_type=None):
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/2bab2159/ambari-server/src/main/resources/common-services/HBASE/2.0.0.3.0/package/scripts/phoenix_queryserver.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HBASE/2.0.0.3.0/package/scripts/phoenix_queryserver.py
 
b/ambari-server/src/main/resources/common-services/HBASE/2.0.0.3.0/package/scripts/phoenix_queryserver.py
index 77820cc..872a5c1 100644
--- 
a/ambari-server/src/main/resources/common-services/HBASE/2.0.0.3.0/package/scripts/phoenix_queryserver.py
+++ 
b/ambari-server/src/main/resources/common-services/HBASE/2.0.0.3.0/package/scripts/phoenix_queryserver.py
@@ -34,10 +34,6 @@ class PhoenixQueryServer(Script):
     self.install_packages(env)
 
 
-  def get_component_name(self):
-    return "phoenix-server"
-
-
   def configure(self, env):
     import params
     env.set_params(params)
@@ -64,7 +60,7 @@ class PhoenixQueryServer(Script):
     if params.stack_version_formatted and 
check_stack_feature(StackFeature.PHOENIX, params.stack_version_formatted):     
       # phoenix uses hbase configs
       conf_select.select(params.stack_name, "hbase", params.version)
-      stack_select.select("phoenix-server", params.version)
+      stack_select.select_packages(params.version)
 
 
   def status(self, env):

http://git-wip-us.apache.org/repos/asf/ambari/blob/2bab2159/ambari-server/src/main/resources/common-services/HBASE/2.0.0.3.0/package/scripts/upgrade.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HBASE/2.0.0.3.0/package/scripts/upgrade.py
 
b/ambari-server/src/main/resources/common-services/HBASE/2.0.0.3.0/package/scripts/upgrade.py
index 703fe26..a502c1d 100644
--- 
a/ambari-server/src/main/resources/common-services/HBASE/2.0.0.3.0/package/scripts/upgrade.py
+++ 
b/ambari-server/src/main/resources/common-services/HBASE/2.0.0.3.0/package/scripts/upgrade.py
@@ -33,12 +33,12 @@ from resource_management.libraries.functions.format import 
format
 from resource_management.libraries.functions import check_process_status
 
 
-def prestart(env, stack_component):
+def prestart(env):
   import params
 
   if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, 
params.version):
     conf_select.select(params.stack_name, "hbase", params.version)
-    stack_select.select(stack_component, params.version)
+    stack_select.select_packages(params.version)
 
 def post_regionserver(env):
   import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/2bab2159/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/datanode.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/datanode.py
 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/datanode.py
index da03cce..257ccf9 100644
--- 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/datanode.py
+++ 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/datanode.py
@@ -39,14 +39,11 @@ from utils import get_dfsadmin_base_command
 
 class DataNode(Script):
 
-  def get_component_name(self):
-    return "hadoop-hdfs-datanode"
-
   def get_hdfs_binary(self):
     """
     Get the name or path to the hdfs binary depending on the component name.
     """
-    component_name = self.get_component_name()
+    component_name = stack_select.get_package_name()
     return get_hdfs_binary(component_name)
 
 
@@ -134,7 +131,7 @@ class DataNodeDefault(DataNode):
     env.set_params(params)
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, 
params.version):
       conf_select.select(params.stack_name, "hadoop", params.version)
-      stack_select.select("hadoop-hdfs-datanode", params.version)
+      stack_select.select_packages(params.version)
 
   def post_upgrade_restart(self, env, upgrade_type=None):
     Logger.info("Executing DataNode Stack Upgrade post-restart")

http://git-wip-us.apache.org/repos/asf/ambari/blob/2bab2159/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_client.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_client.py
 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_client.py
index 51acc9e..5633cba 100644
--- 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_client.py
+++ 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_client.py
@@ -56,15 +56,12 @@ class HdfsClient(Script):
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class HdfsClientDefault(HdfsClient):
 
-  def get_component_name(self):
-    return "hadoop-client"
-
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params
     env.set_params(params)
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, 
params.version):
       conf_select.select(params.stack_name, "hadoop", params.version)
-      stack_select.select("hadoop-client", params.version)
+      stack_select.select_packages(params.version)
 
 @OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY)
 class HdfsClientWindows(HdfsClient):

http://git-wip-us.apache.org/repos/asf/ambari/blob/2bab2159/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/journalnode.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/journalnode.py
 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/journalnode.py
index 7fd8d70..bb2895e 100644
--- 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/journalnode.py
+++ 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/journalnode.py
@@ -43,9 +43,6 @@ class JournalNode(Script):
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class JournalNodeDefault(JournalNode):
 
-  def get_component_name(self):
-    return "hadoop-hdfs-journalnode"
-
   def pre_upgrade_restart(self, env, upgrade_type=None):
     Logger.info("Executing Stack Upgrade pre-restart")
     import params
@@ -53,7 +50,7 @@ class JournalNodeDefault(JournalNode):
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, 
params.version):
       conf_select.select(params.stack_name, "hadoop", params.version)
-      stack_select.select("hadoop-hdfs-journalnode", params.version)
+      stack_select.select_packages(params.version)
 
   def start(self, env, upgrade_type=None):
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/2bab2159/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/namenode.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/namenode.py
 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/namenode.py
index 897e6cb..47b8021 100644
--- 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/namenode.py
+++ 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/namenode.py
@@ -67,14 +67,11 @@ except ImportError:
 
 class NameNode(Script):
 
-  def get_component_name(self):
-    return "hadoop-hdfs-namenode"
-
   def get_hdfs_binary(self):
     """
     Get the name or path to the hdfs binary depending on the component name.
     """
-    component_name = self.get_component_name()
+    component_name = stack_select.get_package_name()
     return get_hdfs_binary(component_name)
 
   def install(self, env):
@@ -204,7 +201,7 @@ class NameNodeDefault(NameNode):
     if upgrade_type != constants.UPGRADE_TYPE_NON_ROLLING or 
params.upgrade_direction != Direction.DOWNGRADE:
       conf_select.select(params.stack_name, "hadoop", params.version)
 
-    stack_select.select("hadoop-hdfs-namenode", params.version)
+    stack_select.select_packages(params.version)
 
   def post_upgrade_restart(self, env, upgrade_type=None):
     Logger.info("Executing Stack Upgrade post-restart")

http://git-wip-us.apache.org/repos/asf/ambari/blob/2bab2159/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/nfsgateway.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/nfsgateway.py
 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/nfsgateway.py
index fa451f4..66968b7 100644
--- 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/nfsgateway.py
+++ 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/nfsgateway.py
@@ -31,10 +31,6 @@ from resource_management.libraries.functions.stack_features 
import check_stack_f
 
 
 class NFSGateway(Script):
-
-  def get_component_name(self):
-    return "hadoop-hdfs-nfs3"
-
   def install(self, env):
     import params
 
@@ -48,7 +44,7 @@ class NFSGateway(Script):
 
     if params.stack_version_formatted and 
check_stack_feature(StackFeature.NFS, params.stack_version_formatted):
       conf_select.select(params.stack_name, "hadoop", params.version)
-      stack_select.select("hadoop-hdfs-nfs3", params.version)
+      stack_select.select_packages(params.version)
 
   def start(self, env, upgrade_type=None):
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/2bab2159/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/snamenode.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/snamenode.py
 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/snamenode.py
index 1408468..0494df0 100644
--- 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/snamenode.py
+++ 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/snamenode.py
@@ -63,9 +63,6 @@ class SNameNode(Script):
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class SNameNodeDefault(SNameNode):
 
-  def get_component_name(self):
-    return "hadoop-hdfs-secondarynamenode"
-
   def pre_upgrade_restart(self, env, upgrade_type=None):
     Logger.info("Executing Stack Upgrade pre-restart")
     import params
@@ -73,7 +70,7 @@ class SNameNodeDefault(SNameNode):
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, 
params.version):
       conf_select.select(params.stack_name, "hadoop", params.version)
-      stack_select.select("hadoop-hdfs-secondarynamenode", params.version)
+      stack_select.select_packages(params.version)
       
   def get_log_folder(self):
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/2bab2159/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/zkfc_slave.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/zkfc_slave.py
 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/zkfc_slave.py
index cd47109..628b01a 100644
--- 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/zkfc_slave.py
+++ 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/zkfc_slave.py
@@ -35,13 +35,6 @@ from resource_management.libraries.script import Script
 from resource_management.core.resources.zkmigrator import ZkMigrator
 
 class ZkfcSlave(Script):
-  def get_component_name(self):
-    import params
-    if params.version_for_stack_feature_checks and 
check_stack_feature(StackFeature.ZKFC_VERSION_ADVERTISED, 
params.version_for_stack_feature_checks):
-      # params.version is not defined when installing cluster from blueprint
-      return "hadoop-hdfs-zkfc"
-    pass
-
   def install(self, env):
     import params
     env.set_params(params)
@@ -141,7 +134,7 @@ class ZkfcSlaveDefault(ZkfcSlave):
     env.set_params(params)
     if check_stack_feature(StackFeature.ZKFC_VERSION_ADVERTISED, 
params.version_for_stack_feature_checks):
       conf_select.select(params.stack_name, "hadoop", params.version)
-      stack_select.select("hadoop-hdfs-zkfc", params.version)
+      stack_select.select_packages(params.version)
 
 def initialize_ha_zookeeper(params):
   try:

http://git-wip-us.apache.org/repos/asf/ambari/blob/2bab2159/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/datanode.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/datanode.py
 
b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/datanode.py
index 78a8f4b..9f72aa0 100644
--- 
a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/datanode.py
+++ 
b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/datanode.py
@@ -32,9 +32,6 @@ from utils import get_hdfs_binary
 
 class DataNode(Script):
 
-  def get_component_name(self):
-    return "hadoop-hdfs-datanode"
-
   def get_hdfs_binary(self):
     """
     Get the name or path to the hdfs binary depending on the component name.
@@ -88,7 +85,7 @@ class DataNodeDefault(DataNode):
     env.set_params(params)
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, 
params.version):
       conf_select.select(params.stack_name, "hadoop", params.version)
-      stack_select.select("hadoop-hdfs-datanode", params.version)
+      stack_select.select_packages(params.version)
 
   def post_upgrade_restart(self, env, upgrade_type=None):
     Logger.info("Executing DataNode Stack Upgrade post-restart")

http://git-wip-us.apache.org/repos/asf/ambari/blob/2bab2159/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/hdfs_client.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/hdfs_client.py
 
b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/hdfs_client.py
index 51acc9e..5633cba 100644
--- 
a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/hdfs_client.py
+++ 
b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/hdfs_client.py
@@ -56,15 +56,12 @@ class HdfsClient(Script):
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class HdfsClientDefault(HdfsClient):
 
-  def get_component_name(self):
-    return "hadoop-client"
-
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params
     env.set_params(params)
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, 
params.version):
       conf_select.select(params.stack_name, "hadoop", params.version)
-      stack_select.select("hadoop-client", params.version)
+      stack_select.select_packages(params.version)
 
 @OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY)
 class HdfsClientWindows(HdfsClient):

http://git-wip-us.apache.org/repos/asf/ambari/blob/2bab2159/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/journalnode.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/journalnode.py
 
b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/journalnode.py
index 7fd8d70..bb2895e 100644
--- 
a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/journalnode.py
+++ 
b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/journalnode.py
@@ -43,9 +43,6 @@ class JournalNode(Script):
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class JournalNodeDefault(JournalNode):
 
-  def get_component_name(self):
-    return "hadoop-hdfs-journalnode"
-
   def pre_upgrade_restart(self, env, upgrade_type=None):
     Logger.info("Executing Stack Upgrade pre-restart")
     import params
@@ -53,7 +50,7 @@ class JournalNodeDefault(JournalNode):
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, 
params.version):
       conf_select.select(params.stack_name, "hadoop", params.version)
-      stack_select.select("hadoop-hdfs-journalnode", params.version)
+      stack_select.select_packages(params.version)
 
   def start(self, env, upgrade_type=None):
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/2bab2159/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/namenode.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/namenode.py
 
b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/namenode.py
index 4c4a7eb..a904de8 100644
--- 
a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/namenode.py
+++ 
b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/namenode.py
@@ -67,9 +67,6 @@ except ImportError:
 
 class NameNode(Script):
 
-  def get_component_name(self):
-    return "hadoop-hdfs-namenode"
-
   def get_hdfs_binary(self):
     """
     Get the name or path to the hdfs binary depending on the component name.
@@ -200,7 +197,7 @@ class NameNodeDefault(NameNode):
     if upgrade_type != constants.UPGRADE_TYPE_NON_ROLLING or 
params.upgrade_direction != Direction.DOWNGRADE:
       conf_select.select(params.stack_name, "hadoop", params.version)
 
-    stack_select.select("hadoop-hdfs-namenode", params.version)
+      stack_select.select_packages(params.version)
 
   def post_upgrade_restart(self, env, upgrade_type=None):
     Logger.info("Executing Stack Upgrade post-restart")

http://git-wip-us.apache.org/repos/asf/ambari/blob/2bab2159/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/nfsgateway.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/nfsgateway.py
 
b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/nfsgateway.py
index 602c179..ba38526 100644
--- 
a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/nfsgateway.py
+++ 
b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/nfsgateway.py
@@ -32,9 +32,6 @@ from resource_management.libraries.functions.stack_features 
import check_stack_f
 
 class NFSGateway(Script):
 
-  def get_component_name(self):
-    return "hadoop-hdfs-nfs3"
-
   def install(self, env):
     import params
 
@@ -48,7 +45,7 @@ class NFSGateway(Script):
 
     if params.stack_version_formatted and 
check_stack_feature(StackFeature.NFS, params.stack_version_formatted):
       conf_select.select(params.stack_name, "hadoop", params.version)
-      stack_select.select("hadoop-hdfs-nfs3", params.version)
+      stack_select.select_packages(params.version)
 
   def start(self, env, upgrade_type=None):
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/2bab2159/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/snamenode.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/snamenode.py
 
b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/snamenode.py
index 030a470..5a4cc5a 100644
--- 
a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/snamenode.py
+++ 
b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/snamenode.py
@@ -63,9 +63,6 @@ class SNameNode(Script):
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class SNameNodeDefault(SNameNode):
 
-  def get_component_name(self):
-    return "hadoop-hdfs-secondarynamenode"
-
   def pre_upgrade_restart(self, env, upgrade_type=None):
     Logger.info("Executing Stack Upgrade pre-restart")
     import params
@@ -73,7 +70,7 @@ class SNameNodeDefault(SNameNode):
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, 
params.version):
       conf_select.select(params.stack_name, "hadoop", params.version)
-      stack_select.select("hadoop-hdfs-secondarynamenode", params.version)
+      stack_select.select_packages(params.version)
 
   def get_log_folder(self):
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/2bab2159/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/zkfc_slave.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/zkfc_slave.py
 
b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/zkfc_slave.py
index fa948ca..6ea9b52 100644
--- 
a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/zkfc_slave.py
+++ 
b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/zkfc_slave.py
@@ -40,13 +40,6 @@ from resource_management.libraries.script import Script
 from resource_management.core.resources.zkmigrator import ZkMigrator
 
 class ZkfcSlave(Script):
-  def get_component_name(self):
-    import params
-    if params.version_for_stack_feature_checks and 
check_stack_feature(StackFeature.ZKFC_VERSION_ADVERTISED, 
params.version_for_stack_feature_checks):
-      # params.version is not defined when installing cluster from blueprint
-      return "hadoop-hdfs-zkfc"
-    pass
-
   def install(self, env):
     import params
     env.set_params(params)
@@ -148,7 +141,7 @@ class ZkfcSlaveDefault(ZkfcSlave):
     if params.version and 
check_stack_feature(StackFeature.ZKFC_VERSION_ADVERTISED, params.version) \
         and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
       conf_select.select(params.stack_name, "hadoop", params.version)
-      stack_select.select("hadoop-hdfs-zkfc", params.version)
+      stack_select.select_packages(params.version)
 
 def initialize_ha_zookeeper(params):
   try:

http://git-wip-us.apache.org/repos/asf/ambari/blob/2bab2159/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat_client.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat_client.py
 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat_client.py
index 47bbc41..e54740d 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat_client.py
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat_client.py
@@ -51,12 +51,6 @@ class HCatClientWindows(HCatClient):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class HCatClientDefault(HCatClient):
-  def get_component_name(self):
-    # HCat client doesn't have a first-class entry in <stack-selector-tool>. 
Since clients always
-    # update after daemons, this ensures that the hcat directories are correct 
on hosts
-    # which do not include the WebHCat daemon
-    return "hive-webhcat"
-
 
   def pre_upgrade_restart(self, env, upgrade_type=None):
     """
@@ -78,7 +72,7 @@ class HCatClientDefault(HCatClient):
     # HCat client doesn't have a first-class entry in <stack-selector-tool>. 
Since clients always
     # update after daemons, this ensures that the hcat directories are correct 
on hosts
     # which do not include the WebHCat daemon
-    stack_select.select("hive-webhcat", params.version)
+    stack_select.select_packages(params.version)
 
 
 if __name__ == "__main__":

http://git-wip-us.apache.org/repos/asf/ambari/blob/2bab2159/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_client.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_client.py
 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_client.py
index 83d82df..e6c9aab 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_client.py
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_client.py
@@ -50,9 +50,6 @@ class HiveClientWindows(HiveClient):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class HiveClientDefault(HiveClient):
-  def get_component_name(self):
-    return "hadoop-client"
-
   def pre_upgrade_restart(self, env, upgrade_type=None):
     Logger.info("Executing Hive client Stack Upgrade pre-restart")
 
@@ -61,7 +58,7 @@ class HiveClientDefault(HiveClient):
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, 
params.version):
       conf_select.select(params.stack_name, "hive", params.version)
       conf_select.select(params.stack_name, "hadoop", params.version)
-      stack_select.select("hadoop-client", params.version)
+      stack_select.select_packages(params.version)
 
 
 if __name__ == "__main__":

http://git-wip-us.apache.org/repos/asf/ambari/blob/2bab2159/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore.py
 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore.py
index 99eb8b5..43f0c86 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore.py
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore.py
@@ -87,10 +87,6 @@ class HiveMetastoreWindows(HiveMetastore):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class HiveMetastoreDefault(HiveMetastore):
-  def get_component_name(self):
-    return "hive-metastore"
-
-
   def status(self, env):
     import status_params
     from resource_management.libraries.functions import check_process_status
@@ -110,7 +106,7 @@ class HiveMetastoreDefault(HiveMetastore):
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, 
params.version):
       conf_select.select(params.stack_name, "hive", params.version)
-      stack_select.select("hive-metastore", params.version)
+      stack_select.select_packages(params.version)
 
     if is_upgrade and params.stack_version_formatted_major and \
             check_stack_feature(StackFeature.HIVE_METASTORE_UPGRADE_SCHEMA, 
params.stack_version_formatted_major):

http://git-wip-us.apache.org/repos/asf/ambari/blob/2bab2159/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
index 7c3a805..6c76af8 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
@@ -72,9 +72,6 @@ class HiveServerWindows(HiveServer):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class HiveServerDefault(HiveServer):
-  def get_component_name(self):
-    return "hive-server2"
-
   def start(self, env, upgrade_type=None):
     import params
     env.set_params(params)
@@ -117,7 +114,7 @@ class HiveServerDefault(HiveServer):
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, 
params.version):
       conf_select.select(params.stack_name, "hive", params.version)
-      stack_select.select("hive-server2", params.version)
+      stack_select.select_packages(params.version)
 
       # Copy mapreduce.tar.gz and tez.tar.gz to HDFS
       resource_created = copy_to_hdfs(

http://git-wip-us.apache.org/repos/asf/ambari/blob/2bab2159/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_interactive.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_interactive.py
 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_interactive.py
index 0504d18..df2a295 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_interactive.py
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_interactive.py
@@ -67,10 +67,6 @@ class HiveServerInteractive(Script):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class HiveServerInteractiveDefault(HiveServerInteractive):
-
-    def get_component_name(self):
-      return "hive-server2-hive2"
-
     def install(self, env):
       import params
       self.install_packages(env)
@@ -86,7 +82,7 @@ class HiveServerInteractiveDefault(HiveServerInteractive):
       env.set_params(params)
 
       if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, 
params.version):
-        stack_select.select("hive-server2-hive2", params.version)
+        stack_select.select_packages(params.version)
         conf_select.select(params.stack_name, "hive2", params.version)
 
         # Copy hive.tar.gz and tez.tar.gz used by Hive Interactive to HDFS

http://git-wip-us.apache.org/repos/asf/ambari/blob/2bab2159/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_server.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_server.py
 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_server.py
index 18e11ab..9bd5c6e 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_server.py
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_server.py
@@ -65,9 +65,6 @@ class WebHCatServerWindows(WebHCatServer):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class WebHCatServerDefault(WebHCatServer):
-  def get_component_name(self):
-    return "hive-webhcat"
-
   def status(self, env):
     import status_params
     env.set_params(status_params)
@@ -82,7 +79,7 @@ class WebHCatServerDefault(WebHCatServer):
       # webhcat has no conf, but uses hadoop home, so verify that regular 
hadoop conf is set
       conf_select.select(params.stack_name, "hive-hcatalog", params.version)
       conf_select.select(params.stack_name, "hadoop", params.version)
-      stack_select.select("hive-webhcat", params.version)
+      stack_select.select_packages(params.version)
 
   def get_log_folder(self):
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/2bab2159/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/hcat_client.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/hcat_client.py
 
b/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/hcat_client.py
index 47bbc41..139dda8 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/hcat_client.py
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/hcat_client.py
@@ -51,13 +51,6 @@ class HCatClientWindows(HCatClient):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class HCatClientDefault(HCatClient):
-  def get_component_name(self):
-    # HCat client doesn't have a first-class entry in <stack-selector-tool>. 
Since clients always
-    # update after daemons, this ensures that the hcat directories are correct 
on hosts
-    # which do not include the WebHCat daemon
-    return "hive-webhcat"
-
-
   def pre_upgrade_restart(self, env, upgrade_type=None):
     """
     Execute <stack-selector-tool> before reconfiguring this client to the new 
stack version.
@@ -78,7 +71,7 @@ class HCatClientDefault(HCatClient):
     # HCat client doesn't have a first-class entry in <stack-selector-tool>. 
Since clients always
     # update after daemons, this ensures that the hcat directories are correct 
on hosts
     # which do not include the WebHCat daemon
-    stack_select.select("hive-webhcat", params.version)
+    stack_select.select_packages(params.version)
 
 
 if __name__ == "__main__":

Reply via email to