http://git-wip-us.apache.org/repos/asf/ambari/blob/0535ca64/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HDFS/package/scripts/namenode.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HDFS/package/scripts/namenode.py
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HDFS/package/scripts/namenode.py
index 280aeee..6751864 100755
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HDFS/package/scripts/namenode.py
+++ 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HDFS/package/scripts/namenode.py
@@ -24,7 +24,6 @@ import  tempfile
 from resource_management import Script
 from resource_management.core.resources.system import Execute, File
 from resource_management.core import shell
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.security_commons import 
build_expectations, \
   cached_kinit_executor, get_params_from_filesystem, 
validate_security_config_properties, \
@@ -63,7 +62,7 @@ class NameNode(Script):
     Get the name or path to the hdfs binary depending on the stack and version.
     """
     import params
-    stack_to_comp = stack_select.get_package_name()
+    stack_to_comp = "hadoop-hdfs-namenode"
     if params.stack_name in stack_to_comp:
       return get_hdfs_binary(stack_to_comp[params.stack_name])
     return "hdfs"
@@ -96,7 +95,6 @@ class NameNode(Script):
     env.set_params(params)
 
     if params.version and 
compare_versions(format_stack_version(params.version), '4.0.0.0') >= 0:
-      conf_select.select(params.stack_name, "hadoop", params.version)
       stack_select.select_packages(params.version)
       #Execute(format("iop-select set hadoop-hdfs-namenode {version}"))
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/0535ca64/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HDFS/package/scripts/nfsgateway.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HDFS/package/scripts/nfsgateway.py
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HDFS/package/scripts/nfsgateway.py
index b105d69..448b2dd 100755
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HDFS/package/scripts/nfsgateway.py
+++ 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HDFS/package/scripts/nfsgateway.py
@@ -24,7 +24,6 @@ from resource_management.libraries.functions.security_commons 
import build_expec
   FILE_TYPE_XML
 from hdfs_nfsgateway import nfsgateway
 from hdfs import hdfs
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.version import compare_versions, 
format_stack_version
 
@@ -42,7 +41,6 @@ class NFSGateway(Script):
     env.set_params(params)
 
     if Script.is_stack_greater_or_equal('4.1.0.0'):
-      conf_select.select(params.stack_name, "hadoop", params.version)
       stack_select.select_packages(params.version)
 
   def start(self, env, upgrade_type=None):

http://git-wip-us.apache.org/repos/asf/ambari/blob/0535ca64/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HDFS/package/scripts/snamenode.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HDFS/package/scripts/snamenode.py
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HDFS/package/scripts/snamenode.py
index 6a7fd43..a364002 100755
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HDFS/package/scripts/snamenode.py
+++ 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HDFS/package/scripts/snamenode.py
@@ -18,7 +18,6 @@ limitations under the License.
 """
 
 from resource_management import *
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.version import compare_versions, 
format_stack_version
 from resource_management.libraries.functions.security_commons import 
build_expectations, \
@@ -44,7 +43,6 @@ class SNameNode(Script):
     env.set_params(params)
 
     if params.version and 
compare_versions(format_stack_version(params.version), '4.0.0.0') >= 0:
-      conf_select.select(params.stack_name, "hadoop", params.version)
       stack_select.select_packages(params.version)
 
   def start(self, env, upgrade_type=None):

http://git-wip-us.apache.org/repos/asf/ambari/blob/0535ca64/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HIVE/package/scripts/hive_client.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HIVE/package/scripts/hive_client.py
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HIVE/package/scripts/hive_client.py
index 14a5f83..00aac02 100755
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HIVE/package/scripts/hive_client.py
+++ 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HIVE/package/scripts/hive_client.py
@@ -19,7 +19,6 @@ limitations under the License.
 """
 import sys
 from resource_management import *
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from hive import hive
 from ambari_commons.os_family_impl import OsFamilyImpl
@@ -49,8 +48,6 @@ class HiveClientDefault(HiveClient):
     env.set_params(params)
 
     if params.version and 
compare_versions(format_stack_version(params.version), '4.1.0.0') >= 0:
-      conf_select.select(params.stack_name, "hive", params.version)
-      conf_select.select(params.stack_name, "hadoop", params.version)
       stack_select.select_packages(params.version)
 
   def pre_upgrade_restart(self, env, upgrade_type=None):

http://git-wip-us.apache.org/repos/asf/ambari/blob/0535ca64/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HIVE/package/scripts/hive_metastore.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HIVE/package/scripts/hive_metastore.py
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HIVE/package/scripts/hive_metastore.py
index 0a651f9..adfbead 100755
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HIVE/package/scripts/hive_metastore.py
+++ 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HIVE/package/scripts/hive_metastore.py
@@ -22,7 +22,6 @@ import os
 from resource_management.core.logger import Logger
 from resource_management.core.resources.system import Execute, Directory
 from resource_management.libraries.script import Script
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.constants import Direction
 from resource_management.libraries.functions.format import format
@@ -96,7 +95,6 @@ class HiveMetastoreDefault(HiveMetastore):
       self.upgrade_schema(env)
       
     if params.version and 
compare_versions(format_stack_version(params.version), '4.0.0.0') >= 0:
-      conf_select.select(params.stack_name, "hive", params.version)
       stack_select.select_packages(params.version)
 
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/0535ca64/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HIVE/package/scripts/hive_server.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HIVE/package/scripts/hive_server.py
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HIVE/package/scripts/hive_server.py
index 79e4c73..e6cb861 100755
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HIVE/package/scripts/hive_server.py
+++ 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HIVE/package/scripts/hive_server.py
@@ -21,7 +21,6 @@ limitations under the License.
 
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.resources.hdfs_resource import HdfsResource
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions.copy_tarball import copy_to_hdfs
@@ -95,7 +94,6 @@ class HiveServerDefault(HiveServer):
     env.set_params(params)
 
     if params.version and 
compare_versions(format_stack_version(params.version), '4.0.0.0') >= 0:
-      conf_select.select(params.stack_name, "hive", params.version)
       stack_select.select_packages(params.version)
       
       # Copy mapreduce.tar.gz and tez.tar.gz to HDFS

http://git-wip-us.apache.org/repos/asf/ambari/blob/0535ca64/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HIVE/package/scripts/webhcat_server.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HIVE/package/scripts/webhcat_server.py
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HIVE/package/scripts/webhcat_server.py
index 9f1596b..6a2f0b1 100755
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HIVE/package/scripts/webhcat_server.py
+++ 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HIVE/package/scripts/webhcat_server.py
@@ -19,7 +19,6 @@ Ambari Agent
 
 """
 from resource_management import *
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.security_commons import 
build_expectations, \
   cached_kinit_executor, get_params_from_filesystem, 
validate_security_config_properties, \
@@ -66,8 +65,6 @@ class WebHCatServerDefault(WebHCatServer):
 
     if params.version and 
compare_versions(format_stack_version(params.version), '4.1.0.0') >= 0:
       # webhcat has no conf, but uses hadoop home, so verify that regular 
hadoop conf is set
-      conf_select.select(params.stack_name, "hive-hcatalog", params.version)
-      conf_select.select(params.stack_name, "hadoop", params.version)
       stack_select.select_packages(params.version)
 
   def security_status(self, env):

http://git-wip-us.apache.org/repos/asf/ambari/blob/0535ca64/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KAFKA/package/scripts/kafka_broker.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KAFKA/package/scripts/kafka_broker.py
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KAFKA/package/scripts/kafka_broker.py
index cf08b9b..772ecad 100755
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KAFKA/package/scripts/kafka_broker.py
+++ 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KAFKA/package/scripts/kafka_broker.py
@@ -16,7 +16,6 @@ See the License for the specific language governing 
permissions and
 limitations under the License.
 
 """
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions import Direction
 from resource_management.libraries.functions.version import compare_versions, 
format_stack_version
@@ -48,10 +47,7 @@ class KafkaBroker(Script):
     if params.version and 
compare_versions(format_stack_version(params.version), '4.1.0.0') >= 0:
       stack_select.select_packages(params.version)
 
-    if params.version and 
compare_versions(format_stack_version(params.version), '4.1.0.0') >= 0:
-      conf_select.select(params.stack_name, "kafka", params.version)
-
-    # This is extremely important since it should only be called if crossing 
the IOP 4.2 boundary. 
+    # This is extremely important since it should only be called if crossing 
the IOP 4.2 boundary.
     if params.current_version and params.version and params.upgrade_direction:
       src_version = dst_version = None
       if params.upgrade_direction == Direction.UPGRADE:

http://git-wip-us.apache.org/repos/asf/ambari/blob/0535ca64/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KNOX/package/scripts/knox_gateway.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KNOX/package/scripts/knox_gateway.py
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KNOX/package/scripts/knox_gateway.py
index 2b3a103..4746043 100755
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KNOX/package/scripts/knox_gateway.py
+++ 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KNOX/package/scripts/knox_gateway.py
@@ -18,7 +18,6 @@ limitations under the License.
 """
 
 from resource_management import *
-from resource_management.libraries.functions import conf_select
 import os
 import tarfile
 
@@ -111,7 +110,6 @@ class KnoxGateway(Script):
         absolute_backup_dir = upgrade.backup_data()
 
       # conf-select will change the symlink to the conf folder.
-      conf_select.select(params.stack_name, "knox", params.version)
 #     hdp_select.select("knox-server", params.version)
       stack_select.select_packages(params.version)
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/0535ca64/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/OOZIE/package/scripts/oozie_client.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/OOZIE/package/scripts/oozie_client.py
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/OOZIE/package/scripts/oozie_client.py
index 8f4577e..e35c3f1 100755
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/OOZIE/package/scripts/oozie_client.py
+++ 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/OOZIE/package/scripts/oozie_client.py
@@ -20,7 +20,6 @@ limitations under the License.
 
 import sys
 from resource_management import *
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 
 from oozie import oozie
@@ -54,7 +53,6 @@ class OozieClient(Script):
       return
 
     Logger.info("Executing Oozie Client Rolling Upgrade pre-restart")
-    conf_select.select(params.stack_name, "oozie", params.version)
     stack_select.select_packages(params.version)
     #Execute(format("iop-select set oozie-client {version}"))
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/0535ca64/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/OOZIE/package/scripts/oozie_server.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/OOZIE/package/scripts/oozie_server.py
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/OOZIE/package/scripts/oozie_server.py
index c2e1af5..5ba9fbd 100755
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/OOZIE/package/scripts/oozie_server.py
+++ 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/OOZIE/package/scripts/oozie_server.py
@@ -23,7 +23,6 @@ import oozie_server_upgrade
 from resource_management.core import Logger
 from resource_management.core.resources.system import Execute
 from resource_management.libraries.functions import format
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.constants import Direction
 from resource_management.libraries.script import Script
@@ -52,7 +51,6 @@ class OozieServer(Script):
     #TODO: needed?
     if upgrade_type == "nonrolling" and params.upgrade_direction == 
Direction.UPGRADE and \
             params.version and 
compare_versions(format_stack_version(params.version), '4.1.0.0') >= 0:
-      conf_select.select(params.stack_name, "oozie", params.version)
       # In order for the "/usr/hdp/current/oozie-<client/server>" point to the 
new version of
       # oozie, we need to create the symlinks both for server and client.
       # This is required as both need to be pointing to new installed oozie 
version.
@@ -176,7 +174,6 @@ class OozieServer(Script):
 
     oozie_server_upgrade.backup_configuration()
 
-    conf_select.select(params.stack_name, "oozie", params.version)
     stack_select.select_packages(params.version)
     #Execute(format("iop-select set oozie-server {version}"))
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/0535ca64/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/PIG/package/scripts/pig_client.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/PIG/package/scripts/pig_client.py
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/PIG/package/scripts/pig_client.py
index 96a48c1..65403a1 100755
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/PIG/package/scripts/pig_client.py
+++ 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/PIG/package/scripts/pig_client.py
@@ -22,7 +22,6 @@ Ambari Agent
 import sys
 import os
 from resource_management import *
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from pig import pig
 
@@ -44,8 +43,6 @@ class PigClientLinux(PigClient):
     env.set_params(params)
 
     if params.version and 
compare_versions(format_stack_version(params.version), '4.0.0.0') >= 0:
-      conf_select.select(params.stack_name, "pig", params.version)
-      conf_select.select(params.stack_name, "hadoop", params.version)
       stack_select.select_packages(params.version)
 
   def install(self, env):

http://git-wip-us.apache.org/repos/asf/ambari/blob/0535ca64/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/RANGER/package/scripts/ranger_admin.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/RANGER/package/scripts/ranger_admin.py
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/RANGER/package/scripts/ranger_admin.py
index c34951b..37555cc 100755
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/RANGER/package/scripts/ranger_admin.py
+++ 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/RANGER/package/scripts/ranger_admin.py
@@ -18,7 +18,6 @@ limitations under the License.
 
 """
 from resource_management.libraries.functions import stack_select
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions.constants import Direction
 from resource_management.libraries.script import Script
 from resource_management.core.resources.system import Execute
@@ -176,7 +175,6 @@ class RangerAdmin(Script):
     stack_version = upgrade_stack[1]
 
     stack_select.select_packages(params.version)
-    conf_select.select(stack_name, "ranger-admin", stack_version)
 
   def get_log_folder(self):
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/0535ca64/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/RANGER/package/scripts/upgrade.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/RANGER/package/scripts/upgrade.py
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/RANGER/package/scripts/upgrade.py
index 75b1fbe..920b8c1 100755
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/RANGER/package/scripts/upgrade.py
+++ 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/RANGER/package/scripts/upgrade.py
@@ -26,5 +26,4 @@ from resource_management.libraries.functions.format import 
format
 def prestart(env, iop_component):
   import params
 
-  conf_select.select(params.stack_name, iop_component, params.version)
   stack_select.select_packages(params.version)

http://git-wip-us.apache.org/repos/asf/ambari/blob/0535ca64/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/RANGER_KMS/package/scripts/upgrade.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/RANGER_KMS/package/scripts/upgrade.py
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/RANGER_KMS/package/scripts/upgrade.py
index b37c295..95817f4 100755
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/RANGER_KMS/package/scripts/upgrade.py
+++ 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/RANGER_KMS/package/scripts/upgrade.py
@@ -25,5 +25,4 @@ from resource_management.libraries.functions.format import 
format
 def prestart(env, iop_component):
   import params
 
-  conf_select.select(params.stack_name, iop_component, params.version)
   stack_select.select_packages(params.version)

http://git-wip-us.apache.org/repos/asf/ambari/blob/0535ca64/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SLIDER/package/scripts/slider_client.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SLIDER/package/scripts/slider_client.py
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SLIDER/package/scripts/slider_client.py
index 33e2588..d97ce83 100755
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SLIDER/package/scripts/slider_client.py
+++ 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SLIDER/package/scripts/slider_client.py
@@ -19,7 +19,6 @@ limitations under the License.
 """
 
 from resource_management import *
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from slider import slider
 from ambari_commons import OSConst
@@ -36,14 +35,9 @@ class SliderClientLinux(SliderClient):
     env.set_params(params)
 
     if params.version and 
compare_versions(format_stack_version(params.version), '4.0.0.0') >= 0:
-      conf_select.select(params.stack_name, "slider", params.version)
       stack_select.select_packages(params.version)
       #Execute(format("iop-select set slider-client {version}"))
 
-      # also set all of the hadoop clients since slider client is upgraded as
-      # part of the final "CLIENTS" group and we need to ensure that
-      # hadoop-client is also set
-      conf_select.select(params.stack_name, "hadoop", params.version)
       #Execute(format("iop-select set hadoop-client {version}"))
 
   def install(self, env):

http://git-wip-us.apache.org/repos/asf/ambari/blob/0535ca64/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/package/scripts/solr_server.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/package/scripts/solr_server.py
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/package/scripts/solr_server.py
index 6b4454e..38c4edf 100755
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/package/scripts/solr_server.py
+++ 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/package/scripts/solr_server.py
@@ -20,7 +20,6 @@ limitations under the License.
 
 import sys
 from resource_management import *
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from solr_service import solr_service
 from solr import solr
@@ -40,7 +39,6 @@ class SolrServer(Script):
     env.set_params(params)
     if params.version and 
compare_versions(format_stack_version(params.version), '4.1.0.0') >= 0:
       stack_select.select_packages(params.version)
-      conf_select.select(params.stack_name, "solr", params.version)
 
   def start(self, env, upgrade_type=None):
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/0535ca64/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/package/scripts/solr_upgrade.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/package/scripts/solr_upgrade.py
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/package/scripts/solr_upgrade.py
index 61142e3..55a61b6 100755
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/package/scripts/solr_upgrade.py
+++ 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/package/scripts/solr_upgrade.py
@@ -20,7 +20,6 @@ import os
 from resource_management import *
 from resource_management.core.logger import Logger
 from resource_management.core.resources.system import Execute
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import get_unique_id_and_date
 
 class SolrServerUpgrade(Script):
@@ -40,8 +39,6 @@ class SolrServerUpgrade(Script):
 
     solr41_conf_dir="/usr/iop/4.1.0.0/solr/conf"
     solr41_etc_dir="/etc/solr/4.1.0.0/0"
-    if not os.path.exists(solr41_etc_dir):
-      conf_select.create(params.stack_name, "solr", "4.1.0.0")
 
     content_path=solr41_conf_dir
     if not os.path.isfile("/usr/iop/4.1.0.0/solr/conf/solr.in.sh"):
@@ -65,8 +62,6 @@ class SolrServerUpgrade(Script):
            to=solr41_etc_dir
       )
 
-    conf_select.select(params.stack_name, "solr", "4.1.0.0")
-
   def pre_stop_backup_cores(self, env):
     """
     Backs up the Solr cores under Solr's home directory.

http://git-wip-us.apache.org/repos/asf/ambari/blob/0535ca64/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/package/scripts/job_history_server.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/package/scripts/job_history_server.py
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/package/scripts/job_history_server.py
index 50fef1a..080ef72 100755
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/package/scripts/job_history_server.py
+++ 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/package/scripts/job_history_server.py
@@ -21,7 +21,6 @@ limitations under the License.
 import sys
 import os
 from resource_management import *
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 #from resource_management.libraries.functions.version import compare_versions, 
format_stack_version
 from resource_management.core.exceptions import ComponentIsNotRunning
@@ -37,7 +36,6 @@ class JobHistoryServer(Script):
 
     env.set_params(params)
     if params.version and 
compare_versions(format_stack_version(params.version), '4.0.0.0') >= 0:
-      conf_select.select(params.stack_name, "spark", params.version)
       stack_select.select_packages(params.version)
       #Execute(format("iop-select set spark-historyserver {version}"))
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/0535ca64/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/package/scripts/spark_client.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/package/scripts/spark_client.py
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/package/scripts/spark_client.py
index afd9238..a858c6e 100755
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/package/scripts/spark_client.py
+++ 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/package/scripts/spark_client.py
@@ -20,7 +20,6 @@ limitations under the License.
 
 import sys
 from resource_management import *
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 #from resource_management.libraries.functions.version import compare_versions, 
format_stack_version
 from resource_management.core.exceptions import ComponentIsNotRunning
@@ -36,7 +35,6 @@ class SparkClient(Script):
 
     env.set_params(params)
     if params.version and 
compare_versions(format_stack_version(params.version), '4.0.0.0') >= 0:
-      conf_select.select(params.stack_name, "spark", params.version)
       stack_select.select_packages(params.version)
       #Execute(format("iop-select set spark-client {version}"))
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/0535ca64/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/package/scripts/spark_thrift_server.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/package/scripts/spark_thrift_server.py
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/package/scripts/spark_thrift_server.py
index a680701..de8a4c8 100755
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/package/scripts/spark_thrift_server.py
+++ 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/package/scripts/spark_thrift_server.py
@@ -23,7 +23,6 @@ import socket
 import os
 from resource_management import *
 from resource_management.libraries.functions import stack_select
-from resource_management.libraries.functions import conf_select
 from resource_management.core.exceptions import ComponentIsNotRunning
 from resource_management.core.logger import Logger
 from resource_management.core import shell
@@ -38,7 +37,6 @@ class ThriftServer(Script):
 
     env.set_params(params)
     if params.version and 
compare_versions(format_stack_version(params.version), '4.0.0.0') >= 0:
-      conf_select.select(params.stack_name, "spark", params.version)
       stack_select.select_packages(params.version)
 
   def install(self, env):

http://git-wip-us.apache.org/repos/asf/ambari/blob/0535ca64/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SQOOP/package/scripts/sqoop_client.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SQOOP/package/scripts/sqoop_client.py
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SQOOP/package/scripts/sqoop_client.py
index 7b48dfe..1063393 100755
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SQOOP/package/scripts/sqoop_client.py
+++ 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SQOOP/package/scripts/sqoop_client.py
@@ -20,7 +20,6 @@ limitations under the License.
 
 import sys
 from resource_management import *
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.version import compare_versions, 
format_stack_version
 
@@ -34,7 +33,6 @@ class SqoopClient(Script):
     env.set_params(params)
 
     if params.version and 
compare_versions(format_stack_version(params.version), '4.0.0.0') >= 0:
-      conf_select.select(params.stack_name, "sqoop", params.version)
       stack_select.select_packages(params.version)
       #Execute(format("iop-select set sqoop-client {version}"))
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/0535ca64/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/TITAN/package/scripts/titan_client.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/TITAN/package/scripts/titan_client.py
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/TITAN/package/scripts/titan_client.py
index 814fb38..478498d 100755
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/TITAN/package/scripts/titan_client.py
+++ 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/TITAN/package/scripts/titan_client.py
@@ -22,7 +22,6 @@ Ambari Agent
 import sys
 import os
 from resource_management import *
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from titan import titan
 
@@ -44,7 +43,6 @@ class TitanClientLinux(TitanClient):
         env.set_params(params)
 
         if params.version and 
compare_versions(format_stack_version(params.version), '4.2.0.0') >= 0:
-            conf_select.select(params.stack_name, "titan", params.version)
             stack_select.select_packages(params.version)
 
     def install(self, env):

http://git-wip-us.apache.org/repos/asf/ambari/blob/0535ca64/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/YARN/package/scripts/application_timeline_server.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/YARN/package/scripts/application_timeline_server.py
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/YARN/package/scripts/application_timeline_server.py
index fbac488..633f47a 100755
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/YARN/package/scripts/application_timeline_server.py
+++ 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/YARN/package/scripts/application_timeline_server.py
@@ -20,7 +20,6 @@ Ambari Agent
 """
 
 from resource_management import *
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.version import compare_versions, 
format_stack_version
 from resource_management.libraries.functions.security_commons import 
build_expectations, \
@@ -48,7 +47,6 @@ class ApplicationTimelineServer(Script):
     env.set_params(params)
 
     if params.version and 
compare_versions(format_stack_version(params.version), '4.0.0.0') >= 0:
-      conf_select.select(params.stack_name, "hadoop", params.version)
       stack_select.select_packages(params.version)
       #Execute(format("iop-select set hadoop-yarn-timelineserver {version}"))
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/0535ca64/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/YARN/package/scripts/historyserver.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/YARN/package/scripts/historyserver.py
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/YARN/package/scripts/historyserver.py
index 5e1cfea..8fd5c0d 100755
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/YARN/package/scripts/historyserver.py
+++ 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/YARN/package/scripts/historyserver.py
@@ -21,7 +21,6 @@ Ambari Agent
 
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.resources.hdfs_resource import HdfsResource
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.check_process_status import 
check_process_status
 from resource_management.libraries.functions.copy_tarball import copy_to_hdfs
@@ -54,7 +53,6 @@ class HistoryServer(Script):
     env.set_params(params)
 
     if params.version and 
compare_versions(format_stack_version(params.version), '4.0.0.0') >= 0:
-      conf_select.select(params.stack_name, "hadoop", params.version)
       stack_select.select_packages(params.version)
       #Execute(format("iop-select set hadoop-mapreduce-historyserver 
{version}"))
       #copy_tarballs_to_hdfs('mapreduce', 'hadoop-mapreduce-historyserver', 
params.mapred_user, params.hdfs_user, params.user_group)

http://git-wip-us.apache.org/repos/asf/ambari/blob/0535ca64/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/YARN/package/scripts/mapreduce2_client.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/YARN/package/scripts/mapreduce2_client.py
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/YARN/package/scripts/mapreduce2_client.py
index 57d0d72..5c381e1 100755
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/YARN/package/scripts/mapreduce2_client.py
+++ 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/YARN/package/scripts/mapreduce2_client.py
@@ -21,7 +21,6 @@ Ambari Agent
 
 import sys
 from resource_management import *
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 
 from yarn import yarn
@@ -33,7 +32,6 @@ class MapReduce2Client(Script):
     env.set_params(params)
 
     if params.version and 
compare_versions(format_stack_version(params.version), '4.0.0.0') >= 0:
-      conf_select.select(params.stack_name, "hadoop", params.version)
       stack_select.select_packages(params.version)
       #Execute(format("iop-select set hadoop-client {version}"))
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/0535ca64/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/YARN/package/scripts/nodemanager.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/YARN/package/scripts/nodemanager.py
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/YARN/package/scripts/nodemanager.py
index 6b09b13..d52ce8f 100755
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/YARN/package/scripts/nodemanager.py
+++ 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/YARN/package/scripts/nodemanager.py
@@ -22,7 +22,6 @@ Ambari Agent
 import nodemanager_upgrade
 
 from resource_management import *
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.version import compare_versions, 
format_stack_version
 from resource_management.libraries.functions.format import format
@@ -49,7 +48,6 @@ class Nodemanager(Script):
     env.set_params(params)
 
     if params.version and 
compare_versions(format_stack_version(params.version), '4.0.0.0') >= 0:
-      conf_select.select(params.stack_name, "hadoop", params.version)
       stack_select.select_packages(params.version)
       #Execute(format("iop-select set hadoop-yarn-nodemanager {version}"))
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/0535ca64/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/YARN/package/scripts/resourcemanager.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/YARN/package/scripts/resourcemanager.py
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/YARN/package/scripts/resourcemanager.py
index 274100d..54ec124 100755
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/YARN/package/scripts/resourcemanager.py
+++ 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/YARN/package/scripts/resourcemanager.py
@@ -20,7 +20,6 @@ Ambari Agent
 """
 
 from resource_management import *
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.version import compare_versions, 
format_stack_version
 from resource_management.libraries.functions.security_commons import 
build_expectations, \
@@ -48,7 +47,6 @@ class Resourcemanager(Script):
     env.set_params(params)
 
     if params.version and 
compare_versions(format_stack_version(params.version), '4.0.0.0') >= 0:
-      conf_select.select(params.stack_name, "hadoop", params.version)
       stack_select.select_packages(params.version)
       #Execute(format("iop-select set hadoop-yarn-resourcemanager {version}"))
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/0535ca64/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/YARN/package/scripts/yarn_client.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/YARN/package/scripts/yarn_client.py
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/YARN/package/scripts/yarn_client.py
index e3b9125..ddf9475 100755
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/YARN/package/scripts/yarn_client.py
+++ 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/YARN/package/scripts/yarn_client.py
@@ -21,7 +21,6 @@ Ambari Agent
 
 import sys
 from resource_management import *
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 
 from yarn import yarn
@@ -45,7 +44,6 @@ class YarnClient(Script):
     env.set_params(params)
 
     if params.version and 
compare_versions(format_stack_version(params.version), '4.0.0.0') >= 0:
-      conf_select.select(params.stack_name, "hadoop", params.version)
       stack_select.select_packages(params.version)
       #Execute(format("iop-select set hadoop-client {version}"))
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/0535ca64/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/ZOOKEEPER/package/scripts/zookeeper_client.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/ZOOKEEPER/package/scripts/zookeeper_client.py
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/ZOOKEEPER/package/scripts/zookeeper_client.py
index f5408aa..dfd03e2 100755
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/ZOOKEEPER/package/scripts/zookeeper_client.py
+++ 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/ZOOKEEPER/package/scripts/zookeeper_client.py
@@ -21,7 +21,6 @@ Ambari Agent
 
 import sys
 from resource_management import *
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.version import compare_versions, 
format_stack_version
 from resource_management.libraries.functions.format import format
@@ -46,7 +45,6 @@ class ZookeeperClient(Script):
     env.set_params(params)
 
     if params.version and 
compare_versions(format_stack_version(params.version), '4.0.0.0') >= 0:
-      conf_select.select(params.stack_name, "zookeeper", params.version)
       stack_select.select_packages(params.version)
       #Execute(format("iop-select set zookeeper-client {version}"))
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/0535ca64/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/ZOOKEEPER/package/scripts/zookeeper_server.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/ZOOKEEPER/package/scripts/zookeeper_server.py
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/ZOOKEEPER/package/scripts/zookeeper_server.py
index fb92b2e..42ce012 100755
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/ZOOKEEPER/package/scripts/zookeeper_server.py
+++ 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/ZOOKEEPER/package/scripts/zookeeper_server.py
@@ -22,7 +22,6 @@ import random
 
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.functions import get_unique_id_and_date
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.version import compare_versions, 
format_stack_version
 from resource_management.libraries.functions.security_commons import 
build_expectations, \
@@ -55,7 +54,6 @@ class ZookeeperServer(Script):
     env.set_params(params)
 
     if params.version and 
compare_versions(format_stack_version(params.version), '4.0.0.0') >= 0:
-      conf_select.select(params.stack_name, "zookeeper", params.version)
       stack_select.select_packages(params.version)
       #Execute(format("iop-select set zookeeper-server {version}"))
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/0535ca64/ambari-server/src/main/resources/stacks/HDP/2.0.6/configuration/cluster-env.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/configuration/cluster-env.xml
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/configuration/cluster-env.xml
index 2d797bb..eb16778 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/configuration/cluster-env.xml
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/configuration/cluster-env.xml
@@ -262,14 +262,14 @@ gpgcheck=0</value>
     </value-attributes>
     <on-ambari-upgrade add="true"/>
   </property>
-  <!-- Define stack_select_packages property in the base stack. DO NOT 
override this property for each stack version -->
+  <!-- Define stack_packages property in the base stack. DO NOT override this 
property for each stack version -->
   <property>
-    <name>stack_select_packages</name>
+    <name>stack_packages</name>
     <value/>
     <description>Associations between component and stack-select 
tools.</description>
     <property-type>VALUE_FROM_PROPERTY_FILE</property-type>
     <value-attributes>
-      <property-file-name>stack_select_packages.json</property-file-name>
+      <property-file-name>stack_packages.json</property-file-name>
       <property-file-type>json</property-file-type>
       <read-only>true</read-only>
       <overridable>false</overridable>

http://git-wip-us.apache.org/repos/asf/ambari/blob/0535ca64/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/shared_initialization.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/shared_initialization.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/shared_initialization.py
index ce106d2..ac1f752 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/shared_initialization.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/shared_initialization.py
@@ -46,8 +46,8 @@ def setup_stack_symlinks(struct_out_file):
     return
 
   # get the packages which the stack-select tool should be used on
-  stack_select_packages = 
stack_select.get_packages(stack_select.PACKAGE_SCOPE_INSTALL)
-  if stack_select_packages is None:
+  stack_packages = 
stack_select.get_packages(stack_select.PACKAGE_SCOPE_INSTALL)
+  if stack_packages is None:
     return
 
   json_version = load_version(struct_out_file)
@@ -58,7 +58,7 @@ def setup_stack_symlinks(struct_out_file):
 
   # On parallel command execution this should be executed by a single process 
at a time.
   with FcntlBasedProcessLock(params.stack_select_lock_file, enabled = 
params.is_parallel_execution_enabled, skip_fcntl_failures = True):
-    for package in stack_select_packages:
+    for package in stack_packages:
       stack_select.select(package, json_version)
 
 
@@ -106,10 +106,6 @@ def link_configs(struct_out_file):
   """
   import params
 
-  if not Script.is_stack_greater_or_equal("2.3"):
-    Logger.info("Can only link configs for HDP-2.3 and higher.")
-    return
-
   json_version = load_version(struct_out_file)
 
   if not json_version:

http://git-wip-us.apache.org/repos/asf/ambari/blob/0535ca64/ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_packages.json
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_packages.json
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_packages.json
new file mode 100644
index 0000000..704fb54
--- /dev/null
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_packages.json
@@ -0,0 +1,1146 @@
+{
+  "HDP": {
+    "stack-select": {
+      "ACCUMULO": {
+        "ACCUMULO_CLIENT": {
+          "STACK-SELECT-PACKAGE": "accumulo-client",
+          "INSTALL": [
+            "accumulo-client"
+          ],
+          "PATCH": [
+            "accumulo-client"
+          ],
+          "STANDARD": [
+            "accumulo-client"
+          ]
+        },
+        "ACCUMULO_GC": {
+          "STACK-SELECT-PACKAGE": "accumulo-gc",
+          "INSTALL": [
+            "accumulo-gc"
+          ],
+          "PATCH": [
+            "accumulo-gc"
+          ],
+          "STANDARD": [
+            "accumulo-gc",
+            "accumulo-client"
+          ]
+        },
+        "ACCUMULO_MASTER": {
+          "STACK-SELECT-PACKAGE": "accumulo-master",
+          "INSTALL": [
+            "accumulo-master"
+          ],
+          "PATCH": [
+            "accumulo-master"
+          ],
+          "STANDARD": [
+            "accumulo-master",
+            "accumulo-client"
+          ]
+        },
+        "ACCUMULO_MONITOR": {
+          "STACK-SELECT-PACKAGE": "accumulo-monitor",
+          "INSTALL": [
+            "accumulo-monitor"
+          ],
+          "PATCH": [
+            "accumulo-monitor"
+          ],
+          "STANDARD": [
+            "accumulo-monitor",
+            "accumulo-client"
+          ]
+        },
+        "ACCUMULO_TRACER": {
+          "STACK-SELECT-PACKAGE": "accumulo-tracer",
+          "INSTALL": [
+            "accumulo-tracer"
+          ],
+          "PATCH": [
+            "accumulo-tracer"
+          ],
+          "STANDARD": [
+            "accumulo-tracer",
+            "accumulo-client"
+          ]
+        },
+        "ACCUMULO_TSERVER": {
+          "STACK-SELECT-PACKAGE": "accumulo-tablet",
+          "INSTALL": [
+            "accumulo-tablet"
+          ],
+          "PATCH": [
+            "accumulo-tablet"
+          ],
+          "STANDARD": [
+            "accumulo-tablet",
+            "accumulo-client"
+          ]
+        }
+      },
+      "ATLAS": {
+        "ATLAS_CLIENT": {
+          "STACK-SELECT-PACKAGE": "atlas-client",
+          "INSTALL": [
+            "atlas-client"
+          ],
+          "PATCH": [
+            "atlas-client"
+          ],
+          "STANDARD": [
+            "atlas-client"
+          ]
+        },
+        "ATLAS_SERVER": {
+          "STACK-SELECT-PACKAGE": "atlas-server",
+          "INSTALL": [
+            "atlas-server"
+          ],
+          "PATCH": [
+            "atlas-server"
+          ],
+          "STANDARD": [
+            "atlas-server"
+          ]
+        }
+      },
+      "DRUID": {
+        "DRUID_COORDINATOR": {
+          "STACK-SELECT-PACKAGE": "druid-coordinator",
+          "INSTALL": [
+            "druid-coordinator"
+          ],
+          "PATCH": [
+            "druid-coordinator"
+          ],
+          "STANDARD": [
+            "druid-coordinator"
+          ]
+        },
+        "DRUID_OVERLORD": {
+          "STACK-SELECT-PACKAGE": "druid-overlord",
+          "INSTALL": [
+            "druid-overlord"
+          ],
+          "PATCH": [
+            "druid-overlord"
+          ],
+          "STANDARD": [
+            "druid-overlord"
+          ]
+        },
+        "DRUID_HISTORICAL": {
+          "STACK-SELECT-PACKAGE": "druid-historical",
+          "INSTALL": [
+            "druid-historical"
+          ],
+          "PATCH": [
+            "druid-historical"
+          ],
+          "STANDARD": [
+            "druid-historical"
+          ]
+        },
+        "DRUID_BROKER": {
+          "STACK-SELECT-PACKAGE": "druid-broker",
+          "INSTALL": [
+            "druid-broker"
+          ],
+          "PATCH": [
+            "druid-broker"
+          ],
+          "STANDARD": [
+            "druid-broker"
+          ]
+        },
+        "DRUID_MIDDLEMANAGER": {
+          "STACK-SELECT-PACKAGE": "druid-middlemanager",
+          "INSTALL": [
+            "druid-middlemanager"
+          ],
+          "PATCH": [
+            "druid-middlemanager"
+          ],
+          "STANDARD": [
+            "druid-middlemanager"
+          ]
+        },
+        "DRUID_ROUTER": {
+          "STACK-SELECT-PACKAGE": "druid-router",
+          "INSTALL": [
+            "druid-router"
+          ],
+          "PATCH": [
+            "druid-router"
+          ],
+          "STANDARD": [
+            "druid-router"
+          ]
+        },
+        "DRUID_SUPERSET": {
+          "STACK-SELECT-PACKAGE": "druid-superset",
+          "INSTALL": [
+            "druid-superset"
+          ],
+          "PATCH": [
+            "druid-superset"
+          ],
+          "STANDARD": [
+            "druid-superset"
+          ]
+        }
+      },
+      "FALCON": {
+        "FALCON_CLIENT": {
+          "STACK-SELECT-PACKAGE": "falcon-client",
+          "INSTALL": [
+            "falcon-client"
+          ],
+          "PATCH": [
+            "falcon-client"
+          ],
+          "STANDARD": [
+            "falcon-client"
+          ]
+        },
+        "FALCON_SERVER": {
+          "STACK-SELECT-PACKAGE": "falcon-server",
+          "INSTALL": [
+            "falcon-server"
+          ],
+          "PATCH": [
+            "falcon-server"
+          ],
+          "STANDARD": [
+            "falcon-server"
+          ]
+        }
+      },
+      "FLUME": {
+        "FLUME_HANDLER": {
+          "STACK-SELECT-PACKAGE": "flume-server",
+          "INSTALL": [
+            "flume-server"
+          ],
+          "PATCH": [
+            "flume-server"
+          ],
+          "STANDARD": [
+            "flume-server"
+          ]
+        }
+      },
+      "HBASE": {
+        "HBASE_CLIENT": {
+          "STACK-SELECT-PACKAGE": "hbase-client",
+          "INSTALL": [
+            "hbase-client"
+          ],
+          "PATCH": [
+            "hbase-client"
+          ],
+          "STANDARD": [
+            "hbase-client",
+            "phoenix-client",
+            "hadoop-client"
+          ]
+        },
+        "HBASE_MASTER": {
+          "STACK-SELECT-PACKAGE": "hbase-master",
+          "INSTALL": [
+            "hbase-master"
+          ],
+          "PATCH": [
+            "hbase-master"
+          ],
+          "STANDARD": [
+            "hbase-master"
+          ]
+        },
+        "HBASE_REGIONSERVER": {
+          "STACK-SELECT-PACKAGE": "hbase-regionserver",
+          "INSTALL": [
+            "hbase-regionserver"
+          ],
+          "PATCH": [
+            "hbase-regionserver"
+          ],
+          "STANDARD": [
+            "hbase-regionserver"
+          ]
+        },
+        "PHOENIX_QUERY_SERVER": {
+          "STACK-SELECT-PACKAGE": "phoenix-server",
+          "INSTALL": [
+            "phoenix-server"
+          ],
+          "PATCH": [
+            "phoenix-server"
+          ],
+          "STANDARD": [
+            "phoenix-server"
+          ]
+        }
+      },
+      "HDFS": {
+        "DATANODE": {
+          "STACK-SELECT-PACKAGE": "hadoop-hdfs-datanode",
+          "INSTALL": [
+            "hadoop-hdfs-datanode"
+          ],
+          "PATCH": [
+            "hadoop-hdfs-datanode"
+          ],
+          "STANDARD": [
+            "hadoop-hdfs-datanode"
+          ]
+        },
+        "HDFS_CLIENT": {
+          "STACK-SELECT-PACKAGE": "hadoop-client",
+          "INSTALL": [
+            "hadoop-client"
+          ],
+          "PATCH": [
+            "INVALID"
+          ],
+          "STANDARD": [
+            "hadoop-client"
+          ]
+        },
+        "NAMENODE": {
+          "STACK-SELECT-PACKAGE": "hadoop-hdfs-namenode",
+          "INSTALL": [
+            "hadoop-hdfs-namenode"
+          ],
+          "PATCH": [
+            "hadoop-hdfs-namenode"
+          ],
+          "STANDARD": [
+            "hadoop-hdfs-namenode"
+          ]
+        },
+        "NFS_GATEWAY": {
+          "STACK-SELECT-PACKAGE": "hadoop-hdfs-nfs3",
+          "INSTALL": [
+            "hadoop-hdfs-nfs3"
+          ],
+          "PATCH": [
+            "hadoop-hdfs-nfs3"
+          ],
+          "STANDARD": [
+            "hadoop-hdfs-nfs3"
+          ]
+        },
+        "JOURNALNODE": {
+          "STACK-SELECT-PACKAGE": "hadoop-hdfs-journalnode",
+          "INSTALL": [
+            "hadoop-hdfs-journalnode"
+          ],
+          "PATCH": [
+            "hadoop-hdfs-journalnode"
+          ],
+          "STANDARD": [
+            "hadoop-hdfs-journalnode"
+          ]
+        },
+        "SECONDARY_NAMENODE": {
+          "STACK-SELECT-PACKAGE": "hadoop-hdfs-secondarynamenode",
+          "INSTALL": [
+            "hadoop-hdfs-secondarynamenode"
+          ],
+          "PATCH": [
+            "hadoop-hdfs-secondarynamenode"
+          ],
+          "STANDARD": [
+            "hadoop-hdfs-secondarynamenode"
+          ]
+        },
+        "ZKFC": {
+          "STACK-SELECT-PACKAGE": "hadoop-hdfs-zkfc",
+          "INSTALL": [
+            "hadoop-hdfs-zkfc"
+          ],
+          "PATCH": [
+            "hadoop-hdfs-zkfc"
+          ],
+          "STANDARD": [
+            "hadoop-hdfs-zkfc"
+          ]
+        }
+      },
+      "HIVE": {
+        "HCAT": {
+          "STACK-SELECT-PACKAGE": "hive-webhcat",
+          "INSTALL": [
+            "hive-webhcat"
+          ],
+          "PATCH": [
+            "hive-webhcat"
+          ],
+          "STANDARD": [
+            "hive-webhcat"
+          ]
+        },
+        "HIVE_METASTORE": {
+          "STACK-SELECT-PACKAGE": "hive-metastore",
+          "INSTALL": [
+            "hive-metastore"
+          ],
+          "PATCH": [
+            "hive-metastore"
+          ],
+          "STANDARD": [
+            "hive-metastore"
+          ]
+        },
+        "HIVE_SERVER": {
+          "STACK-SELECT-PACKAGE": "hive-server2",
+          "INSTALL": [
+            "hive-server2"
+          ],
+          "PATCH": [
+            "hive-server2"
+          ],
+          "STANDARD": [
+            "hive-server2"
+          ]
+        },
+        "HIVE_SERVER_INTERACTIVE": {
+          "STACK-SELECT-PACKAGE": "hive-server2-hive2",
+          "INSTALL": [
+            "hive-server2-hive2"
+          ],
+          "PATCH": [
+            "hive-server2-hive2"
+          ],
+          "STANDARD": [
+            "hive-server2-hive2"
+          ]
+        },
+        "HIVE_CLIENT": {
+          "STACK-SELECT-PACKAGE": "hadoop-client",
+          "INSTALL": [
+            "hadoop-client"
+          ],
+          "PATCH": [
+            "INVALID"
+          ],
+          "STANDARD": [
+            "hadoop-client"
+          ]
+        },
+        "WEBHCAT_SERVER": {
+          "STACK-SELECT-PACKAGE": "hive-webhcat",
+          "INSTALL": [
+            "hive-webhcat"
+          ],
+          "PATCH": [
+            "hive-webhcat"
+          ],
+          "STANDARD": [
+            "hive-webhcat"
+          ]
+        }
+      },
+      "KAFKA": {
+        "KAFKA_BROKER": {
+          "STACK-SELECT-PACKAGE": "kafka-broker",
+          "INSTALL": [
+            "kafka-broker"
+          ],
+          "PATCH": [
+            "kafka-broker"
+          ],
+          "STANDARD": [
+            "kafka-broker"
+          ]
+        }
+      },
+      "KNOX": {
+        "KNOX_GATEWAY": {
+          "STACK-SELECT-PACKAGE": "knox-server",
+          "INSTALL": [
+            "knox-server"
+          ],
+          "PATCH": [
+            "knox-server"
+          ],
+          "STANDARD": [
+            "knox-server"
+          ]
+        }
+      },
+      "MAHOUT": {
+        "MAHOUT": {
+          "STACK-SELECT-PACKAGE": "mahout-client",
+          "INSTALL": [
+            "mahout-client"
+          ],
+          "PATCH": [
+            "mahout-client"
+          ],
+          "STANDARD": [
+            "mahout-client"
+          ]
+        }
+      },
+      "MAPREDUCE2": {
+        "HISTORYSERVER": {
+          "STACK-SELECT-PACKAGE": "hadoop-mapreduce-historyserver",
+          "INSTALL": [
+            "hadoop-mapreduce-historyserver"
+          ],
+          "PATCH": [
+            "hadoop-mapreduce-historyserver"
+          ],
+          "STANDARD": [
+            "hadoop-mapreduce-historyserver"
+          ]
+        },
+        "MAPREDUCE2_CLIENT": {
+          "STACK-SELECT-PACKAGE": "hadoop-client",
+          "INSTALL": [
+            "hadoop-client"
+          ],
+          "PATCH": [
+            "hadoop-mapreduce-INVALID"
+          ],
+          "STANDARD": [
+            "hadoop-client"
+          ]
+        }
+      },
+      "OOZIE": {
+        "OOZIE_CLIENT": {
+          "STACK-SELECT-PACKAGE": "oozie-client",
+          "INSTALL": [
+            "oozie-client"
+          ],
+          "PATCH": [
+            "oozie-client"
+          ],
+          "STANDARD": [
+            "oozie-client"
+          ]
+        },
+        "OOZIE_SERVER": {
+          "STACK-SELECT-PACKAGE": "oozie-server",
+          "INSTALL": [
+            "oozie-client",
+            "oozie-server"
+          ],
+          "PATCH": [
+            "oozie-server"
+          ],
+          "STANDARD": [
+            "oozie-client",
+            "oozie-server"
+          ]
+        }
+      },
+      "PIG": {
+        "PIG": {
+          "STACK-SELECT-PACKAGE": "hadoop-client",
+          "INSTALL": [
+            "hadoop-client"
+          ],
+          "PATCH": [
+            "INVALID"
+          ],
+          "STANDARD": [
+            "hadoop-client"
+          ]
+        }
+      },
+      "R4ML": {
+        "R4ML": {
+          "STACK-SELECT-PACKAGE": "r4ml-client",
+          "INSTALL": [
+            "r4ml-client"
+          ],
+          "PATCH": [
+            "r4ml-client"
+          ],
+          "STANDARD": [
+            "r4ml-client"
+          ]
+        }
+      },
+      "RANGER": {
+        "RANGER_ADMIN": {
+          "STACK-SELECT-PACKAGE": "ranger-admin",
+          "INSTALL": [
+            "ranger-admin"
+          ],
+          "PATCH": [
+            "ranger-admin"
+          ],
+          "STANDARD": [
+            "ranger-admin"
+          ]
+        },
+        "RANGER_TAGSYNC": {
+          "STACK-SELECT-PACKAGE": "ranger-tagsync",
+          "INSTALL": [
+            "ranger-tagsync"
+          ],
+          "PATCH": [
+            "ranger-tagsync"
+          ],
+          "STANDARD": [
+            "ranger-tagsync"
+          ]
+        },
+        "RANGER_USERSYNC": {
+          "STACK-SELECT-PACKAGE": "ranger-usersync",
+          "INSTALL": [
+            "ranger-usersync"
+          ],
+          "PATCH": [
+            "ranger-usersync"
+          ],
+          "STANDARD": [
+            "ranger-usersync"
+          ]
+        }
+      },
+      "RANGER_KMS": {
+        "RANGER_KMS_SERVER": {
+          "STACK-SELECT-PACKAGE": "ranger-kms",
+          "INSTALL": [
+            "ranger-kms"
+          ],
+          "PATCH": [
+            "ranger-kms"
+          ],
+          "STANDARD": [
+            "ranger-kms"
+          ]
+        }
+      },
+      "SLIDER": {
+        "SLIDER": {
+          "STACK-SELECT-PACKAGE": "slider-client",
+          "INSTALL": [
+            "slider-client"
+          ],
+          "PATCH": [
+            "slider-client"
+          ],
+          "STANDARD": [
+            "slider-client",
+            "hadoop-client"
+          ]
+        }
+      },
+      "SPARK": {
+        "LIVY_SERVER": {
+          "STACK-SELECT-PACKAGE": "livy-server",
+          "INSTALL": [
+            "livy-server"
+          ],
+          "PATCH": [
+            "livy-server"
+          ],
+          "STANDARD": [
+            "livy-server"
+          ]
+        },
+        "SPARK_CLIENT": {
+          "STACK-SELECT-PACKAGE": "spark-client",
+          "INSTALL": [
+            "spark-client"
+          ],
+          "PATCH": [
+            "spark-client"
+          ],
+          "STANDARD": [
+            "spark-client"
+          ]
+        },
+        "SPARK_JOBHISTORYSERVER": {
+          "STACK-SELECT-PACKAGE": "spark-historyserver",
+          "INSTALL": [
+            "spark-historyserver"
+          ],
+          "PATCH": [
+            "spark-historyserver"
+          ],
+          "STANDARD": [
+            "spark-historyserver"
+          ]
+        },
+        "SPARK_THRIFTSERVER": {
+          "STACK-SELECT-PACKAGE": "spark-thriftserver",
+          "INSTALL": [
+            "spark-thriftserver"
+          ],
+          "PATCH": [
+            "spark-thriftserver"
+          ],
+          "STANDARD": [
+            "spark-thriftserver"
+          ]
+        }
+      },
+      "SPARK2": {
+        "LIVY2_SERVER": {
+          "STACK-SELECT-PACKAGE": "livy2-server",
+          "INSTALL": [
+            "livy2-server"
+          ],
+          "PATCH": [
+            "livy2-server"
+          ],
+          "STANDARD": [
+            "livy2-server"
+          ]
+        },
+        "SPARK2_CLIENT": {
+          "STACK-SELECT-PACKAGE": "spark2-client",
+          "INSTALL": [
+            "spark2-client"
+          ],
+          "PATCH": [
+            "spark2-client"
+          ],
+          "STANDARD": [
+            "spark2-client"
+          ]
+        },
+        "SPARK2_JOBHISTORYSERVER": {
+          "STACK-SELECT-PACKAGE": "spark2-historyserver",
+          "INSTALL": [
+            "spark2-historyserver"
+          ],
+          "PATCH": [
+            "spark2-historyserver"
+          ],
+          "STANDARD": [
+            "spark2-historyserver"
+          ]
+        },
+        "SPARK2_THRIFTSERVER": {
+          "STACK-SELECT-PACKAGE": "spark2-thriftserver",
+          "INSTALL": [
+            "spark2-thriftserver"
+          ],
+          "PATCH": [
+            "spark2-thriftserver"
+          ],
+          "STANDARD": [
+            "spark2-thriftserver"
+          ]
+        }
+      },
+      "SQOOP": {
+        "SQOOP": {
+          "STACK-SELECT-PACKAGE": "sqoop-client",
+          "INSTALL": [
+            "sqoop-client"
+          ],
+          "PATCH": [
+            "sqoop-client"
+          ],
+          "STANDARD": [
+            "sqoop-client"
+          ]
+        }
+      },
+      "STORM": {
+        "NIMBUS": {
+          "STACK-SELECT-PACKAGE": "storm-nimbus",
+          "INSTALL": [
+            "storm-client",
+            "storm-nimbus"
+          ],
+          "PATCH": [
+            "storm-client",
+            "storm-nimbus"
+          ],
+          "STANDARD": [
+            "storm-client",
+            "storm-nimbus"
+          ]
+        },
+        "SUPERVISOR": {
+          "STACK-SELECT-PACKAGE": "storm-supervisor",
+          "INSTALL": [
+            "storm-supervisor"
+          ],
+          "PATCH": [
+            "storm-supervisor"
+          ],
+          "STANDARD": [
+            "storm-client",
+            "storm-supervisor"
+          ]
+        },
+        "DRPC_SERVER": {
+          "STACK-SELECT-PACKAGE": "storm-client",
+          "INSTALL": [
+            "storm-client"
+          ],
+          "PATCH": [
+            "storm-client"
+          ],
+          "STANDARD": [
+            "storm-client"
+          ]
+        },
+        "STORM_UI_SERVER": {
+          "STACK-SELECT-PACKAGE": "storm-client",
+          "INSTALL": [
+            "storm-client"
+          ],
+          "PATCH": [
+            "storm-client"
+          ],
+          "STANDARD": [
+            "storm-client"
+          ]
+        }
+      },
+      "SYSTEMML": {
+        "SYSTEMML": {
+          "STACK-SELECT-PACKAGE": "systemml-client",
+          "INSTALL": [
+            "systemml-client"
+          ],
+          "PATCH": [
+            "systemml-client"
+          ],
+          "STANDARD": [
+            "systemml-client"
+          ]
+        }
+      },
+      "TEZ": {
+        "TEZ_CLIENT": {
+          "STACK-SELECT-PACKAGE": "hadoop-client",
+          "INSTALL": [
+            "hadoop-client"
+          ],
+          "PATCH": [
+            "INVALID"
+          ],
+          "STANDARD": [
+            "hadoop-client"
+          ]
+        }
+      },
+      "TITAN": {
+        "TITAN_CLIENT": {
+          "STACK-SELECT-PACKAGE": "titan-client",
+          "INSTALL": [
+            "titan-client"
+          ],
+          "PATCH": [
+            "titan-client"
+          ],
+          "STANDARD": [
+            "titan-client"
+          ]
+        },
+        "TITAN_SERVER": {
+          "STACK-SELECT-PACKAGE": "titan-server",
+          "INSTALL": [
+            "titan-server"
+          ],
+          "PATCH": [
+            "titan-server"
+          ],
+          "STANDARD": [
+            "titan-server"
+          ]
+        }
+      },
+      "YARN": {
+        "APP_TIMELINE_SERVER": {
+          "STACK-SELECT-PACKAGE": "hadoop-yarn-timelineserver",
+          "INSTALL": [
+            "hadoop-yarn-timelineserver"
+          ],
+          "PATCH": [
+            "hadoop-yarn-timelineserver"
+          ],
+          "STANDARD": [
+            "hadoop-yarn-timelineserver"
+          ]
+        },
+        "NODEMANAGER": {
+          "STACK-SELECT-PACKAGE": "hadoop-yarn-nodemanager",
+          "INSTALL": [
+            "hadoop-yarn-nodemanager"
+          ],
+          "PATCH": [
+            "hadoop-yarn-nodemanager"
+          ],
+          "STANDARD": [
+            "hadoop-yarn-nodemanager"
+          ]
+        },
+        "RESOURCEMANAGER": {
+          "STACK-SELECT-PACKAGE": "hadoop-yarn-resourcemanager",
+          "INSTALL": [
+            "hadoop-yarn-resourcemanager"
+          ],
+          "PATCH": [
+            "hadoop-yarn-resourcemanager"
+          ],
+          "STANDARD": [
+            "hadoop-yarn-resourcemanager"
+          ]
+        },
+        "YARN_CLIENT": {
+          "STACK-SELECT-PACKAGE": "hadoop-client",
+          "INSTALL": [
+            "hadoop-client"
+          ],
+          "PATCH": [
+            "INVALID"
+          ],
+          "STANDARD": [
+            "hadoop-client"
+          ]
+        }
+      },
+      "ZEPPELIN": {
+        "ZEPPELIN_MASTER": {
+          "STACK-SELECT-PACKAGE": "zeppelin-server",
+          "INSTALL": [
+            "zeppelin-server"
+          ],
+          "PATCH": [
+            "zeppelin-server"
+          ],
+          "STANDARD": [
+            "zeppelin-server"
+          ]
+        }
+      },
+      "ZOOKEEPER": {
+        "ZOOKEEPER_CLIENT": {
+          "STACK-SELECT-PACKAGE": "zookeeper-client",
+          "INSTALL": [
+            "zookeeper-client"
+          ],
+          "PATCH": [
+            "zookeeper-client"
+          ],
+          "STANDARD": [
+            "zookeeper-client"
+          ]
+        },
+        "ZOOKEEPER_SERVER": {
+          "STACK-SELECT-PACKAGE": "zookeeper-server",
+          "INSTALL": [
+            "zookeeper-server"
+          ],
+          "PATCH": [
+            "zookeeper-server"
+          ],
+          "STANDARD": [
+            "zookeeper-server"
+          ]
+        }
+      }
+    },
+    "conf-select": {
+      "accumulo": [
+        {
+          "conf_dir": "/etc/accumulo/conf",
+          "current_dir": "{0}/current/accumulo-client/conf"
+        }
+      ],
+      "atlas": [
+        {
+          "conf_dir": "/etc/atlas/conf",
+          "current_dir": "{0}/current/atlas-client/conf"
+        }
+      ],
+      "druid": [
+        {
+          "conf_dir": "/etc/druid/conf",
+          "current_dir": "{0}/current/druid-overlord/conf"
+        }
+      ],
+      "falcon": [
+        {
+          "conf_dir": "/etc/falcon/conf",
+          "current_dir": "{0}/current/falcon-client/conf"
+        }
+      ],
+      "flume": [
+        {
+          "conf_dir": "/etc/flume/conf",
+          "current_dir": "{0}/current/flume-server/conf"
+        }
+      ],
+      "hadoop": [
+        {
+          "conf_dir": "/etc/hadoop/conf",
+          "current_dir": "{0}/current/hadoop-client/conf"
+        }
+      ],
+      "hbase": [
+        {
+          "conf_dir": "/etc/hbase/conf",
+          "current_dir": "{0}/current/hbase-client/conf"
+        }
+      ],
+      "hive": [
+        {
+          "conf_dir": "/etc/hive/conf",
+          "current_dir": "{0}/current/hive-client/conf"
+        }
+      ],
+      "hive2": [
+        {
+          "conf_dir": "/etc/hive2/conf",
+          "current_dir": "{0}/current/hive-server2-hive2/conf"
+        }
+      ],
+      "hive-hcatalog": [
+        {
+          "conf_dir": "/etc/hive-webhcat/conf",
+          "prefix": "/etc/hive-webhcat",
+          "current_dir": "{0}/current/hive-webhcat/etc/webhcat"
+        },
+        {
+          "conf_dir": "/etc/hive-hcatalog/conf",
+          "prefix": "/etc/hive-hcatalog",
+          "current_dir": "{0}/current/hive-webhcat/etc/hcatalog"
+        }
+      ],
+      "kafka": [
+        {
+          "conf_dir": "/etc/kafka/conf",
+          "current_dir": "{0}/current/kafka-broker/conf"
+        }
+      ],
+      "knox": [
+        {
+          "conf_dir": "/etc/knox/conf",
+          "current_dir": "{0}/current/knox-server/conf"
+        }
+      ],
+      "mahout": [
+        {
+          "conf_dir": "/etc/mahout/conf",
+          "current_dir": "{0}/current/mahout-client/conf"
+        }
+      ],
+      "nifi": [
+        {
+          "conf_dir": "/etc/nifi/conf",
+          "current_dir": "{0}/current/nifi/conf"
+        }
+      ],
+      "oozie": [
+        {
+          "conf_dir": "/etc/oozie/conf",
+          "current_dir": "{0}/current/oozie-client/conf"
+        }
+      ],
+      "phoenix": [
+        {
+          "conf_dir": "/etc/phoenix/conf",
+          "current_dir": "{0}/current/phoenix-client/conf"
+        }
+      ],
+      "pig": [
+        {
+          "conf_dir": "/etc/pig/conf",
+          "current_dir": "{0}/current/pig-client/conf"
+        }
+      ],
+      "ranger-admin": [
+        {
+          "conf_dir": "/etc/ranger/admin/conf",
+          "current_dir": "{0}/current/ranger-admin/conf"
+        }
+      ],
+      "ranger-kms": [
+        {
+          "conf_dir": "/etc/ranger/kms/conf",
+          "current_dir": "{0}/current/ranger-kms/conf"
+        }
+      ],
+      "ranger-tagsync": [
+        {
+          "conf_dir": "/etc/ranger/tagsync/conf",
+          "current_dir": "{0}/current/ranger-tagsync/conf"
+        }
+      ],
+      "ranger-usersync": [
+        {
+          "conf_dir": "/etc/ranger/usersync/conf",
+          "current_dir": "{0}/current/ranger-usersync/conf"
+        }
+      ],
+      "slider": [
+        {
+          "conf_dir": "/etc/slider/conf",
+          "current_dir": "{0}/current/slider-client/conf"
+        }
+      ],
+      "spark": [
+        {
+          "conf_dir": "/etc/spark/conf",
+          "current_dir": "{0}/current/spark-client/conf"
+        }
+      ],
+      "spark2": [
+        {
+          "conf_dir": "/etc/spark2/conf",
+          "current_dir": "{0}/current/spark2-client/conf"
+        }
+      ],
+      "sqoop": [
+        {
+          "conf_dir": "/etc/sqoop/conf",
+          "current_dir": "{0}/current/sqoop-client/conf"
+        }
+      ],
+      "storm": [
+        {
+          "conf_dir": "/etc/storm/conf",
+          "current_dir": "{0}/current/storm-client/conf"
+        }
+      ],
+      "storm-slider-client": [
+        {
+          "conf_dir": "/etc/storm-slider-client/conf",
+          "current_dir": "{0}/current/storm-slider-client/conf"
+        }
+      ],
+      "superset": [
+        {
+          "conf_dir": "/etc/druid-superset/conf",
+          "current_dir": "{0}/current/druid-superset/conf"
+        }
+      ],
+      "tez": [
+        {
+          "conf_dir": "/etc/tez/conf",
+          "current_dir": "{0}/current/tez-client/conf"
+        }
+      ],
+      "zeppelin": [
+        {
+          "conf_dir": "/etc/zeppelin/conf",
+          "current_dir": "{0}/current/zeppelin-server/conf"
+        }
+      ],
+      "zookeeper": [
+        {
+          "conf_dir": "/etc/zookeeper/conf",
+          "current_dir": "{0}/current/zookeeper-client/conf"
+        }
+      ]
+    }
+  }
+}

Reply via email to