http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/main/resources/common-services/TEZ/0.9.0.3.0/package/scripts/tez_client.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/TEZ/0.9.0.3.0/package/scripts/tez_client.py
 
b/ambari-server/src/main/resources/common-services/TEZ/0.9.0.3.0/package/scripts/tez_client.py
index 8a6a6d3..b42d14e 100644
--- 
a/ambari-server/src/main/resources/common-services/TEZ/0.9.0.3.0/package/scripts/tez_client.py
+++ 
b/ambari-server/src/main/resources/common-services/TEZ/0.9.0.3.0/package/scripts/tez_client.py
@@ -28,7 +28,6 @@ from ambari_commons.os_utils import copy_file, 
extract_path_component
 
 from resource_management.core.exceptions import ClientComponentHasNoStatus
 from resource_management.core.source import InlineTemplate
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions import StackFeature
 from resource_management.libraries.functions.stack_features import 
check_stack_feature
@@ -77,7 +76,6 @@ class TezClientLinux(TezClient):
       # Because this script was called from ru_execute_tasks.py which already 
enters an Environment with its own basedir,
       # must change it now so this function can find the Jinja Templates for 
the service.
       env.config.basedir = base_dir
-      conf_select.select(params.stack_name, conf_select_name, params.version)
       self.configure(env, config_dir=config_dir)
 
   def pre_upgrade_restart(self, env, upgrade_type=None):
@@ -85,8 +83,6 @@ class TezClientLinux(TezClient):
     env.set_params(params)
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, 
params.version):
-      conf_select.select(params.stack_name, "tez", params.version)
-      conf_select.select(params.stack_name, "hadoop", params.version)
       stack_select.select_packages(params.version)
 
   def install(self, env):

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/application_timeline_server.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/application_timeline_server.py
 
b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/application_timeline_server.py
index a435b80..2aec6ba 100644
--- 
a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/application_timeline_server.py
+++ 
b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/application_timeline_server.py
@@ -20,7 +20,7 @@ Ambari Agent
 """
 
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.constants import StackFeature
 from resource_management.libraries.functions.stack_features import 
check_stack_feature
 from resource_management.libraries.functions import check_process_status
@@ -72,7 +72,6 @@ class 
ApplicationTimelineServerDefault(ApplicationTimelineServer):
     env.set_params(params)
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, 
params.version):
-      conf_select.select(params.stack_name, "hadoop", params.version)
       stack_select.select_packages(params.version)
 
   def status(self, env):

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/historyserver.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/historyserver.py
 
b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/historyserver.py
index 30045f8..a93bc17 100644
--- 
a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/historyserver.py
+++ 
b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/historyserver.py
@@ -21,7 +21,7 @@ Ambari Agent
 
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.resources.hdfs_resource import HdfsResource
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.constants import StackFeature
 from resource_management.libraries.functions.stack_features import 
check_stack_feature
 from resource_management.libraries.functions.check_process_status import 
check_process_status
@@ -74,7 +74,6 @@ class HistoryServerDefault(HistoryServer):
     env.set_params(params)
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, 
params.version):
-      conf_select.select(params.stack_name, "hadoop", params.version)
       stack_select.select_packages(params.version)
       # MC Hammer said, "Can't touch this"
       copy_to_hdfs("mapreduce", params.user_group, params.hdfs_user, 
skip=params.sysprep_skip_copy_tarballs_hdfs)

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/mapreduce2_client.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/mapreduce2_client.py
 
b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/mapreduce2_client.py
index efcb2da..234e931 100644
--- 
a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/mapreduce2_client.py
+++ 
b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/mapreduce2_client.py
@@ -24,7 +24,7 @@ import sys
 
 # Local imports
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.constants import StackFeature
 from resource_management.libraries.functions.stack_features import 
check_stack_feature
 from resource_management.core.exceptions import ClientComponentHasNoStatus
@@ -66,12 +66,9 @@ class MapReduce2Client(Script):
     config_dir = self.get_config_dir_during_stack_upgrade(env, base_dir, 
conf_select_name)
 
     if config_dir:
-      Logger.info("stack_upgrade_save_new_config(): Calling conf-select on %s 
using version %s" % (conf_select_name, str(params.version)))
-
       # Because this script was called from ru_execute_tasks.py which already 
enters an Environment with its own basedir,
       # must change it now so this function can find the Jinja Templates for 
the service.
       env.config.basedir = base_dir
-      conf_select.select(params.stack_name, conf_select_name, params.version)
       self.configure(env, config_dir=config_dir)
 
 
@@ -87,7 +84,6 @@ class MapReduce2ClientDefault(MapReduce2Client):
     env.set_params(params)
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, 
params.version):
-      conf_select.select(params.stack_name, "hadoop", params.version)
       stack_select.select_packages(params.version)
 
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/nodemanager.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/nodemanager.py
 
b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/nodemanager.py
index ed83402..280fc2f 100644
--- 
a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/nodemanager.py
+++ 
b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/nodemanager.py
@@ -22,7 +22,7 @@ Ambari Agent
 import nodemanager_upgrade
 
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.constants import StackFeature
 from resource_management.libraries.functions.stack_features import 
check_stack_feature
 from resource_management.libraries.functions.check_process_status import 
check_process_status
@@ -72,7 +72,6 @@ class NodemanagerDefault(Nodemanager):
     env.set_params(params)
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, 
params.version):
-      conf_select.select(params.stack_name, "hadoop", params.version)
       stack_select.select_packages(params.version)
 
   def post_upgrade_restart(self, env, upgrade_type=None):

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/resourcemanager.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/resourcemanager.py
 
b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/resourcemanager.py
index ecaea4c..7885c94 100644
--- 
a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/resourcemanager.py
+++ 
b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/resourcemanager.py
@@ -20,7 +20,6 @@ Ambari Agent
 """
 
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions import StackFeature
 from resource_management.libraries.functions.stack_features import 
check_stack_feature
@@ -111,7 +110,6 @@ class ResourcemanagerDefault(Resourcemanager):
     env.set_params(params)
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, 
params.version):
-      conf_select.select(params.stack_name, "hadoop", params.version)
       stack_select.select_packages(params.version)
 
   def start(self, env, upgrade_type=None):

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/yarn_client.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/yarn_client.py
 
b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/yarn_client.py
index ef4f7ea..b6a89b4 100644
--- 
a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/yarn_client.py
+++ 
b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/yarn_client.py
@@ -21,7 +21,7 @@ Ambari Agent
 
 import sys
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.constants import StackFeature
 from resource_management.libraries.functions.stack_features import 
check_stack_feature
 from resource_management.core.exceptions import ClientComponentHasNoStatus
@@ -56,7 +56,6 @@ class YarnClientDefault(YarnClient):
     env.set_params(params)
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, 
params.version):
-      conf_select.select(params.stack_name, "hadoop", params.version)
       stack_select.select_packages(params.version)
 
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/application_timeline_server.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/application_timeline_server.py
 
b/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/application_timeline_server.py
index a435b80..2aec6ba 100644
--- 
a/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/application_timeline_server.py
+++ 
b/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/application_timeline_server.py
@@ -20,7 +20,7 @@ Ambari Agent
 """
 
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.constants import StackFeature
 from resource_management.libraries.functions.stack_features import 
check_stack_feature
 from resource_management.libraries.functions import check_process_status
@@ -72,7 +72,6 @@ class 
ApplicationTimelineServerDefault(ApplicationTimelineServer):
     env.set_params(params)
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, 
params.version):
-      conf_select.select(params.stack_name, "hadoop", params.version)
       stack_select.select_packages(params.version)
 
   def status(self, env):

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/historyserver.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/historyserver.py
 
b/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/historyserver.py
index 3938c15..d89e5b5 100644
--- 
a/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/historyserver.py
+++ 
b/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/historyserver.py
@@ -21,7 +21,7 @@ Ambari Agent
 
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.resources.hdfs_resource import HdfsResource
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.constants import StackFeature
 from resource_management.libraries.functions.stack_features import 
check_stack_feature
 from resource_management.libraries.functions.check_process_status import 
check_process_status
@@ -74,7 +74,6 @@ class HistoryServerDefault(HistoryServer):
     env.set_params(params)
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, 
params.version):
-      conf_select.select(params.stack_name, "hadoop", params.version)
       stack_select.select_packages(params.version)
       # MC Hammer said, "Can't touch this"
       copy_to_hdfs("mapreduce", params.user_group, params.hdfs_user, 
skip=params.sysprep_skip_copy_tarballs_hdfs)

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/mapreduce2_client.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/mapreduce2_client.py
 
b/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/mapreduce2_client.py
index efcb2da..79e3158 100644
--- 
a/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/mapreduce2_client.py
+++ 
b/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/mapreduce2_client.py
@@ -24,7 +24,7 @@ import sys
 
 # Local imports
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.constants import StackFeature
 from resource_management.libraries.functions.stack_features import 
check_stack_feature
 from resource_management.core.exceptions import ClientComponentHasNoStatus
@@ -71,7 +71,6 @@ class MapReduce2Client(Script):
       # Because this script was called from ru_execute_tasks.py which already 
enters an Environment with its own basedir,
       # must change it now so this function can find the Jinja Templates for 
the service.
       env.config.basedir = base_dir
-      conf_select.select(params.stack_name, conf_select_name, params.version)
       self.configure(env, config_dir=config_dir)
 
 
@@ -87,7 +86,6 @@ class MapReduce2ClientDefault(MapReduce2Client):
     env.set_params(params)
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, 
params.version):
-      conf_select.select(params.stack_name, "hadoop", params.version)
       stack_select.select_packages(params.version)
 
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/nodemanager.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/nodemanager.py
 
b/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/nodemanager.py
index ed83402..280fc2f 100644
--- 
a/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/nodemanager.py
+++ 
b/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/nodemanager.py
@@ -22,7 +22,7 @@ Ambari Agent
 import nodemanager_upgrade
 
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.constants import StackFeature
 from resource_management.libraries.functions.stack_features import 
check_stack_feature
 from resource_management.libraries.functions.check_process_status import 
check_process_status
@@ -72,7 +72,6 @@ class NodemanagerDefault(Nodemanager):
     env.set_params(params)
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, 
params.version):
-      conf_select.select(params.stack_name, "hadoop", params.version)
       stack_select.select_packages(params.version)
 
   def post_upgrade_restart(self, env, upgrade_type=None):

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/resourcemanager.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/resourcemanager.py
 
b/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/resourcemanager.py
index 4ceff1c..55214f6 100644
--- 
a/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/resourcemanager.py
+++ 
b/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/resourcemanager.py
@@ -20,7 +20,6 @@ Ambari Agent
 """
 
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions import StackFeature
 from resource_management.libraries.functions.stack_features import 
check_stack_feature
@@ -111,7 +110,6 @@ class ResourcemanagerDefault(Resourcemanager):
     env.set_params(params)
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, 
params.version):
-      conf_select.select(params.stack_name, "hadoop", params.version)
       stack_select.select_packages(params.version)
 
   def disable_security(self, env):

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/yarn_client.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/yarn_client.py
 
b/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/yarn_client.py
index ef4f7ea..b6a89b4 100644
--- 
a/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/yarn_client.py
+++ 
b/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/yarn_client.py
@@ -21,7 +21,7 @@ Ambari Agent
 
 import sys
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.constants import StackFeature
 from resource_management.libraries.functions.stack_features import 
check_stack_feature
 from resource_management.core.exceptions import ClientComponentHasNoStatus
@@ -56,7 +56,6 @@ class YarnClientDefault(YarnClient):
     env.set_params(params)
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, 
params.version):
-      conf_select.select(params.stack_name, "hadoop", params.version)
       stack_select.select_packages(params.version)
 
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/master.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/master.py
 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/master.py
index f8f6e3d..7f506c5 100644
--- 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/master.py
+++ 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/master.py
@@ -30,7 +30,6 @@ from resource_management.core.source import StaticFile
 from resource_management.libraries import XmlConfig
 from resource_management.libraries.functions.check_process_status import 
check_process_status
 from resource_management.libraries.functions.format import format
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions import StackFeature
 from resource_management.libraries.functions.decorator import retry
@@ -251,7 +250,6 @@ class Master(Script):
     env.set_params(params)
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, 
format_stack_version(params.version)):
-      conf_select.select(params.stack_name, "zeppelin", params.version)
       stack_select.select_packages(params.version)
 
   def set_interpreter_settings(self, config_data):

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/package/scripts/master.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/package/scripts/master.py
 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/package/scripts/master.py
index c4fdfcc..ab798ec 100644
--- 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/package/scripts/master.py
+++ 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/package/scripts/master.py
@@ -30,7 +30,6 @@ from resource_management.core.source import StaticFile
 from resource_management.libraries import XmlConfig
 from resource_management.libraries.functions.check_process_status import 
check_process_status
 from resource_management.libraries.functions.format import format
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions import StackFeature
 from resource_management.libraries.functions.decorator import retry
@@ -253,7 +252,6 @@ class Master(Script):
     env.set_params(params)
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, 
format_stack_version(params.version)):
-      conf_select.select(params.stack_name, "zeppelin", params.version)
       stack_select.select_packages(params.version)
 
   def set_interpreter_settings(self, config_data):

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5/package/scripts/zookeeper_client.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5/package/scripts/zookeeper_client.py
 
b/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5/package/scripts/zookeeper_client.py
index e52522a..39daea4 100644
--- 
a/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5/package/scripts/zookeeper_client.py
+++ 
b/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5/package/scripts/zookeeper_client.py
@@ -21,7 +21,7 @@ Ambari Agent
 
 import sys
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.constants import StackFeature
 from resource_management.libraries.functions.version import 
format_stack_version
 from resource_management.libraries.functions.format import format
@@ -67,7 +67,6 @@ class ZookeeperClientLinux(ZookeeperClient):
     env.set_params(params)
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, 
format_stack_version(params.version)):
-      conf_select.select(params.stack_name, "zookeeper", params.version)
       stack_select.select_packages(params.version)
 
 @OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY)

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5/package/scripts/zookeeper_server.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5/package/scripts/zookeeper_server.py
 
b/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5/package/scripts/zookeeper_server.py
index 8d6acd9..f0e4ab9 100644
--- 
a/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5/package/scripts/zookeeper_server.py
+++ 
b/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5/package/scripts/zookeeper_server.py
@@ -24,7 +24,6 @@ from ambari_commons.constants import UPGRADE_TYPE_NON_ROLLING
 
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.functions import get_unique_id_and_date
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions import StackFeature
 from resource_management.libraries.functions.version import 
format_stack_version
@@ -74,7 +73,6 @@ class ZookeeperServerLinux(ZookeeperServer):
     env.set_params(params)
 
     if check_stack_feature(StackFeature.ROLLING_UPGRADE, 
format_stack_version(params.version)):
-      conf_select.select(params.stack_name, "zookeeper", params.version)
       stack_select.select_packages(params.version)
 
   def post_upgrade_restart(self, env, upgrade_type=None):

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/main/resources/custom_actions/scripts/install_packages.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/custom_actions/scripts/install_packages.py 
b/ambari-server/src/main/resources/custom_actions/scripts/install_packages.py
index eb3fb5d..ef5607e 100644
--- 
a/ambari-server/src/main/resources/custom_actions/scripts/install_packages.py
+++ 
b/ambari-server/src/main/resources/custom_actions/scripts/install_packages.py
@@ -205,10 +205,6 @@ class InstallPackages(Script):
       Link("/usr/bin/conf-select", to = "/usr/bin/hdfconf-select")
 
     for package_name, directories in 
conf_select.get_package_dirs().iteritems():
-      conf_selector_name = 
stack_tools.get_stack_tool_name(stack_tools.CONF_SELECTOR_NAME)
-      Logger.info("The current cluster stack of {0} does not require backing 
up configurations; "
-                  "only {1} versioned config directories will be 
created.".format(stack_version, conf_selector_name))
-      # only link configs for all known packages
       conf_select.select(self.stack_name, package_name, stack_version, 
ignore_errors = True)
 
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/main/resources/custom_actions/scripts/ru_set_all.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/custom_actions/scripts/ru_set_all.py 
b/ambari-server/src/main/resources/custom_actions/scripts/ru_set_all.py
index 95f7323..ef9ea29 100644
--- a/ambari-server/src/main/resources/custom_actions/scripts/ru_set_all.py
+++ b/ambari-server/src/main/resources/custom_actions/scripts/ru_set_all.py
@@ -79,7 +79,6 @@ class UpgradeSetAll(Script):
           link_config(dir_def['conf_dir'], dir_def['current_dir'])
 
 
-
 def is_host_skippable(stack_selector_path, formatted_version):
   """
   Gets whether this host should not have the stack select tool called.

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/main/resources/stacks/HDP/2.0.6/configuration/cluster-env.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/configuration/cluster-env.xml
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/configuration/cluster-env.xml
index 1915e9f..45f3b42 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/configuration/cluster-env.xml
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/configuration/cluster-env.xml
@@ -262,14 +262,14 @@ gpgcheck=0</value>
     </value-attributes>
     <on-ambari-upgrade add="true"/>
   </property>
-  <!-- Define stack_select_packages property in the base stack. DO NOT 
override this property for each stack version -->
+  <!-- Define stack_packages property in the base stack. DO NOT override this 
property for each stack version -->
   <property>
-    <name>stack_select_packages</name>
+    <name>stack_packages</name>
     <value/>
     <description>Associations between component and stack-select 
tools.</description>
     <property-type>VALUE_FROM_PROPERTY_FILE</property-type>
     <value-attributes>
-      <property-file-name>stack_select_packages.json</property-file-name>
+      <property-file-name>stack_packages.json</property-file-name>
       <property-file-type>json</property-file-type>
       <read-only>true</read-only>
       <overridable>false</overridable>

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/shared_initialization.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/shared_initialization.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/shared_initialization.py
index 8b61a93..daaffd8 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/shared_initialization.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/shared_initialization.py
@@ -50,8 +50,8 @@ def setup_stack_symlinks(struct_out_file):
     return
 
   # get the packages which the stack-select tool should be used on
-  stack_select_packages = 
stack_select.get_packages(stack_select.PACKAGE_SCOPE_INSTALL)
-  if stack_select_packages is None:
+  stack_packages = 
stack_select.get_packages(stack_select.PACKAGE_SCOPE_INSTALL)
+  if stack_packages is None:
     return
 
   json_version = load_version(struct_out_file)
@@ -62,7 +62,7 @@ def setup_stack_symlinks(struct_out_file):
 
   # On parallel command execution this should be executed by a single process 
at a time.
   with FcntlBasedProcessLock(params.stack_select_lock_file, enabled = 
params.is_parallel_execution_enabled, skip_fcntl_failures = True):
-    for package in stack_select_packages:
+    for package in stack_packages:
       stack_select.select(package, json_version)
 
 
@@ -123,10 +123,6 @@ def link_configs(struct_out_file):
   """
   import params
 
-  if not Script.is_stack_greater_or_equal("2.3"):
-    Logger.info("Can only link configs for HDP-2.3 and higher.")
-    return
-
   json_version = load_version(struct_out_file)
 
   if not json_version:

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_packages.json
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_packages.json
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_packages.json
new file mode 100644
index 0000000..704fb54
--- /dev/null
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_packages.json
@@ -0,0 +1,1146 @@
+{
+  "HDP": {
+    "stack-select": {
+      "ACCUMULO": {
+        "ACCUMULO_CLIENT": {
+          "STACK-SELECT-PACKAGE": "accumulo-client",
+          "INSTALL": [
+            "accumulo-client"
+          ],
+          "PATCH": [
+            "accumulo-client"
+          ],
+          "STANDARD": [
+            "accumulo-client"
+          ]
+        },
+        "ACCUMULO_GC": {
+          "STACK-SELECT-PACKAGE": "accumulo-gc",
+          "INSTALL": [
+            "accumulo-gc"
+          ],
+          "PATCH": [
+            "accumulo-gc"
+          ],
+          "STANDARD": [
+            "accumulo-gc",
+            "accumulo-client"
+          ]
+        },
+        "ACCUMULO_MASTER": {
+          "STACK-SELECT-PACKAGE": "accumulo-master",
+          "INSTALL": [
+            "accumulo-master"
+          ],
+          "PATCH": [
+            "accumulo-master"
+          ],
+          "STANDARD": [
+            "accumulo-master",
+            "accumulo-client"
+          ]
+        },
+        "ACCUMULO_MONITOR": {
+          "STACK-SELECT-PACKAGE": "accumulo-monitor",
+          "INSTALL": [
+            "accumulo-monitor"
+          ],
+          "PATCH": [
+            "accumulo-monitor"
+          ],
+          "STANDARD": [
+            "accumulo-monitor",
+            "accumulo-client"
+          ]
+        },
+        "ACCUMULO_TRACER": {
+          "STACK-SELECT-PACKAGE": "accumulo-tracer",
+          "INSTALL": [
+            "accumulo-tracer"
+          ],
+          "PATCH": [
+            "accumulo-tracer"
+          ],
+          "STANDARD": [
+            "accumulo-tracer",
+            "accumulo-client"
+          ]
+        },
+        "ACCUMULO_TSERVER": {
+          "STACK-SELECT-PACKAGE": "accumulo-tablet",
+          "INSTALL": [
+            "accumulo-tablet"
+          ],
+          "PATCH": [
+            "accumulo-tablet"
+          ],
+          "STANDARD": [
+            "accumulo-tablet",
+            "accumulo-client"
+          ]
+        }
+      },
+      "ATLAS": {
+        "ATLAS_CLIENT": {
+          "STACK-SELECT-PACKAGE": "atlas-client",
+          "INSTALL": [
+            "atlas-client"
+          ],
+          "PATCH": [
+            "atlas-client"
+          ],
+          "STANDARD": [
+            "atlas-client"
+          ]
+        },
+        "ATLAS_SERVER": {
+          "STACK-SELECT-PACKAGE": "atlas-server",
+          "INSTALL": [
+            "atlas-server"
+          ],
+          "PATCH": [
+            "atlas-server"
+          ],
+          "STANDARD": [
+            "atlas-server"
+          ]
+        }
+      },
+      "DRUID": {
+        "DRUID_COORDINATOR": {
+          "STACK-SELECT-PACKAGE": "druid-coordinator",
+          "INSTALL": [
+            "druid-coordinator"
+          ],
+          "PATCH": [
+            "druid-coordinator"
+          ],
+          "STANDARD": [
+            "druid-coordinator"
+          ]
+        },
+        "DRUID_OVERLORD": {
+          "STACK-SELECT-PACKAGE": "druid-overlord",
+          "INSTALL": [
+            "druid-overlord"
+          ],
+          "PATCH": [
+            "druid-overlord"
+          ],
+          "STANDARD": [
+            "druid-overlord"
+          ]
+        },
+        "DRUID_HISTORICAL": {
+          "STACK-SELECT-PACKAGE": "druid-historical",
+          "INSTALL": [
+            "druid-historical"
+          ],
+          "PATCH": [
+            "druid-historical"
+          ],
+          "STANDARD": [
+            "druid-historical"
+          ]
+        },
+        "DRUID_BROKER": {
+          "STACK-SELECT-PACKAGE": "druid-broker",
+          "INSTALL": [
+            "druid-broker"
+          ],
+          "PATCH": [
+            "druid-broker"
+          ],
+          "STANDARD": [
+            "druid-broker"
+          ]
+        },
+        "DRUID_MIDDLEMANAGER": {
+          "STACK-SELECT-PACKAGE": "druid-middlemanager",
+          "INSTALL": [
+            "druid-middlemanager"
+          ],
+          "PATCH": [
+            "druid-middlemanager"
+          ],
+          "STANDARD": [
+            "druid-middlemanager"
+          ]
+        },
+        "DRUID_ROUTER": {
+          "STACK-SELECT-PACKAGE": "druid-router",
+          "INSTALL": [
+            "druid-router"
+          ],
+          "PATCH": [
+            "druid-router"
+          ],
+          "STANDARD": [
+            "druid-router"
+          ]
+        },
+        "DRUID_SUPERSET": {
+          "STACK-SELECT-PACKAGE": "druid-superset",
+          "INSTALL": [
+            "druid-superset"
+          ],
+          "PATCH": [
+            "druid-superset"
+          ],
+          "STANDARD": [
+            "druid-superset"
+          ]
+        }
+      },
+      "FALCON": {
+        "FALCON_CLIENT": {
+          "STACK-SELECT-PACKAGE": "falcon-client",
+          "INSTALL": [
+            "falcon-client"
+          ],
+          "PATCH": [
+            "falcon-client"
+          ],
+          "STANDARD": [
+            "falcon-client"
+          ]
+        },
+        "FALCON_SERVER": {
+          "STACK-SELECT-PACKAGE": "falcon-server",
+          "INSTALL": [
+            "falcon-server"
+          ],
+          "PATCH": [
+            "falcon-server"
+          ],
+          "STANDARD": [
+            "falcon-server"
+          ]
+        }
+      },
+      "FLUME": {
+        "FLUME_HANDLER": {
+          "STACK-SELECT-PACKAGE": "flume-server",
+          "INSTALL": [
+            "flume-server"
+          ],
+          "PATCH": [
+            "flume-server"
+          ],
+          "STANDARD": [
+            "flume-server"
+          ]
+        }
+      },
+      "HBASE": {
+        "HBASE_CLIENT": {
+          "STACK-SELECT-PACKAGE": "hbase-client",
+          "INSTALL": [
+            "hbase-client"
+          ],
+          "PATCH": [
+            "hbase-client"
+          ],
+          "STANDARD": [
+            "hbase-client",
+            "phoenix-client",
+            "hadoop-client"
+          ]
+        },
+        "HBASE_MASTER": {
+          "STACK-SELECT-PACKAGE": "hbase-master",
+          "INSTALL": [
+            "hbase-master"
+          ],
+          "PATCH": [
+            "hbase-master"
+          ],
+          "STANDARD": [
+            "hbase-master"
+          ]
+        },
+        "HBASE_REGIONSERVER": {
+          "STACK-SELECT-PACKAGE": "hbase-regionserver",
+          "INSTALL": [
+            "hbase-regionserver"
+          ],
+          "PATCH": [
+            "hbase-regionserver"
+          ],
+          "STANDARD": [
+            "hbase-regionserver"
+          ]
+        },
+        "PHOENIX_QUERY_SERVER": {
+          "STACK-SELECT-PACKAGE": "phoenix-server",
+          "INSTALL": [
+            "phoenix-server"
+          ],
+          "PATCH": [
+            "phoenix-server"
+          ],
+          "STANDARD": [
+            "phoenix-server"
+          ]
+        }
+      },
+      "HDFS": {
+        "DATANODE": {
+          "STACK-SELECT-PACKAGE": "hadoop-hdfs-datanode",
+          "INSTALL": [
+            "hadoop-hdfs-datanode"
+          ],
+          "PATCH": [
+            "hadoop-hdfs-datanode"
+          ],
+          "STANDARD": [
+            "hadoop-hdfs-datanode"
+          ]
+        },
+        "HDFS_CLIENT": {
+          "STACK-SELECT-PACKAGE": "hadoop-client",
+          "INSTALL": [
+            "hadoop-client"
+          ],
+          "PATCH": [
+            "INVALID"
+          ],
+          "STANDARD": [
+            "hadoop-client"
+          ]
+        },
+        "NAMENODE": {
+          "STACK-SELECT-PACKAGE": "hadoop-hdfs-namenode",
+          "INSTALL": [
+            "hadoop-hdfs-namenode"
+          ],
+          "PATCH": [
+            "hadoop-hdfs-namenode"
+          ],
+          "STANDARD": [
+            "hadoop-hdfs-namenode"
+          ]
+        },
+        "NFS_GATEWAY": {
+          "STACK-SELECT-PACKAGE": "hadoop-hdfs-nfs3",
+          "INSTALL": [
+            "hadoop-hdfs-nfs3"
+          ],
+          "PATCH": [
+            "hadoop-hdfs-nfs3"
+          ],
+          "STANDARD": [
+            "hadoop-hdfs-nfs3"
+          ]
+        },
+        "JOURNALNODE": {
+          "STACK-SELECT-PACKAGE": "hadoop-hdfs-journalnode",
+          "INSTALL": [
+            "hadoop-hdfs-journalnode"
+          ],
+          "PATCH": [
+            "hadoop-hdfs-journalnode"
+          ],
+          "STANDARD": [
+            "hadoop-hdfs-journalnode"
+          ]
+        },
+        "SECONDARY_NAMENODE": {
+          "STACK-SELECT-PACKAGE": "hadoop-hdfs-secondarynamenode",
+          "INSTALL": [
+            "hadoop-hdfs-secondarynamenode"
+          ],
+          "PATCH": [
+            "hadoop-hdfs-secondarynamenode"
+          ],
+          "STANDARD": [
+            "hadoop-hdfs-secondarynamenode"
+          ]
+        },
+        "ZKFC": {
+          "STACK-SELECT-PACKAGE": "hadoop-hdfs-zkfc",
+          "INSTALL": [
+            "hadoop-hdfs-zkfc"
+          ],
+          "PATCH": [
+            "hadoop-hdfs-zkfc"
+          ],
+          "STANDARD": [
+            "hadoop-hdfs-zkfc"
+          ]
+        }
+      },
+      "HIVE": {
+        "HCAT": {
+          "STACK-SELECT-PACKAGE": "hive-webhcat",
+          "INSTALL": [
+            "hive-webhcat"
+          ],
+          "PATCH": [
+            "hive-webhcat"
+          ],
+          "STANDARD": [
+            "hive-webhcat"
+          ]
+        },
+        "HIVE_METASTORE": {
+          "STACK-SELECT-PACKAGE": "hive-metastore",
+          "INSTALL": [
+            "hive-metastore"
+          ],
+          "PATCH": [
+            "hive-metastore"
+          ],
+          "STANDARD": [
+            "hive-metastore"
+          ]
+        },
+        "HIVE_SERVER": {
+          "STACK-SELECT-PACKAGE": "hive-server2",
+          "INSTALL": [
+            "hive-server2"
+          ],
+          "PATCH": [
+            "hive-server2"
+          ],
+          "STANDARD": [
+            "hive-server2"
+          ]
+        },
+        "HIVE_SERVER_INTERACTIVE": {
+          "STACK-SELECT-PACKAGE": "hive-server2-hive2",
+          "INSTALL": [
+            "hive-server2-hive2"
+          ],
+          "PATCH": [
+            "hive-server2-hive2"
+          ],
+          "STANDARD": [
+            "hive-server2-hive2"
+          ]
+        },
+        "HIVE_CLIENT": {
+          "STACK-SELECT-PACKAGE": "hadoop-client",
+          "INSTALL": [
+            "hadoop-client"
+          ],
+          "PATCH": [
+            "INVALID"
+          ],
+          "STANDARD": [
+            "hadoop-client"
+          ]
+        },
+        "WEBHCAT_SERVER": {
+          "STACK-SELECT-PACKAGE": "hive-webhcat",
+          "INSTALL": [
+            "hive-webhcat"
+          ],
+          "PATCH": [
+            "hive-webhcat"
+          ],
+          "STANDARD": [
+            "hive-webhcat"
+          ]
+        }
+      },
+      "KAFKA": {
+        "KAFKA_BROKER": {
+          "STACK-SELECT-PACKAGE": "kafka-broker",
+          "INSTALL": [
+            "kafka-broker"
+          ],
+          "PATCH": [
+            "kafka-broker"
+          ],
+          "STANDARD": [
+            "kafka-broker"
+          ]
+        }
+      },
+      "KNOX": {
+        "KNOX_GATEWAY": {
+          "STACK-SELECT-PACKAGE": "knox-server",
+          "INSTALL": [
+            "knox-server"
+          ],
+          "PATCH": [
+            "knox-server"
+          ],
+          "STANDARD": [
+            "knox-server"
+          ]
+        }
+      },
+      "MAHOUT": {
+        "MAHOUT": {
+          "STACK-SELECT-PACKAGE": "mahout-client",
+          "INSTALL": [
+            "mahout-client"
+          ],
+          "PATCH": [
+            "mahout-client"
+          ],
+          "STANDARD": [
+            "mahout-client"
+          ]
+        }
+      },
+      "MAPREDUCE2": {
+        "HISTORYSERVER": {
+          "STACK-SELECT-PACKAGE": "hadoop-mapreduce-historyserver",
+          "INSTALL": [
+            "hadoop-mapreduce-historyserver"
+          ],
+          "PATCH": [
+            "hadoop-mapreduce-historyserver"
+          ],
+          "STANDARD": [
+            "hadoop-mapreduce-historyserver"
+          ]
+        },
+        "MAPREDUCE2_CLIENT": {
+          "STACK-SELECT-PACKAGE": "hadoop-client",
+          "INSTALL": [
+            "hadoop-client"
+          ],
+          "PATCH": [
+            "hadoop-mapreduce-INVALID"
+          ],
+          "STANDARD": [
+            "hadoop-client"
+          ]
+        }
+      },
+      "OOZIE": {
+        "OOZIE_CLIENT": {
+          "STACK-SELECT-PACKAGE": "oozie-client",
+          "INSTALL": [
+            "oozie-client"
+          ],
+          "PATCH": [
+            "oozie-client"
+          ],
+          "STANDARD": [
+            "oozie-client"
+          ]
+        },
+        "OOZIE_SERVER": {
+          "STACK-SELECT-PACKAGE": "oozie-server",
+          "INSTALL": [
+            "oozie-client",
+            "oozie-server"
+          ],
+          "PATCH": [
+            "oozie-server"
+          ],
+          "STANDARD": [
+            "oozie-client",
+            "oozie-server"
+          ]
+        }
+      },
+      "PIG": {
+        "PIG": {
+          "STACK-SELECT-PACKAGE": "hadoop-client",
+          "INSTALL": [
+            "hadoop-client"
+          ],
+          "PATCH": [
+            "INVALID"
+          ],
+          "STANDARD": [
+            "hadoop-client"
+          ]
+        }
+      },
+      "R4ML": {
+        "R4ML": {
+          "STACK-SELECT-PACKAGE": "r4ml-client",
+          "INSTALL": [
+            "r4ml-client"
+          ],
+          "PATCH": [
+            "r4ml-client"
+          ],
+          "STANDARD": [
+            "r4ml-client"
+          ]
+        }
+      },
+      "RANGER": {
+        "RANGER_ADMIN": {
+          "STACK-SELECT-PACKAGE": "ranger-admin",
+          "INSTALL": [
+            "ranger-admin"
+          ],
+          "PATCH": [
+            "ranger-admin"
+          ],
+          "STANDARD": [
+            "ranger-admin"
+          ]
+        },
+        "RANGER_TAGSYNC": {
+          "STACK-SELECT-PACKAGE": "ranger-tagsync",
+          "INSTALL": [
+            "ranger-tagsync"
+          ],
+          "PATCH": [
+            "ranger-tagsync"
+          ],
+          "STANDARD": [
+            "ranger-tagsync"
+          ]
+        },
+        "RANGER_USERSYNC": {
+          "STACK-SELECT-PACKAGE": "ranger-usersync",
+          "INSTALL": [
+            "ranger-usersync"
+          ],
+          "PATCH": [
+            "ranger-usersync"
+          ],
+          "STANDARD": [
+            "ranger-usersync"
+          ]
+        }
+      },
+      "RANGER_KMS": {
+        "RANGER_KMS_SERVER": {
+          "STACK-SELECT-PACKAGE": "ranger-kms",
+          "INSTALL": [
+            "ranger-kms"
+          ],
+          "PATCH": [
+            "ranger-kms"
+          ],
+          "STANDARD": [
+            "ranger-kms"
+          ]
+        }
+      },
+      "SLIDER": {
+        "SLIDER": {
+          "STACK-SELECT-PACKAGE": "slider-client",
+          "INSTALL": [
+            "slider-client"
+          ],
+          "PATCH": [
+            "slider-client"
+          ],
+          "STANDARD": [
+            "slider-client",
+            "hadoop-client"
+          ]
+        }
+      },
+      "SPARK": {
+        "LIVY_SERVER": {
+          "STACK-SELECT-PACKAGE": "livy-server",
+          "INSTALL": [
+            "livy-server"
+          ],
+          "PATCH": [
+            "livy-server"
+          ],
+          "STANDARD": [
+            "livy-server"
+          ]
+        },
+        "SPARK_CLIENT": {
+          "STACK-SELECT-PACKAGE": "spark-client",
+          "INSTALL": [
+            "spark-client"
+          ],
+          "PATCH": [
+            "spark-client"
+          ],
+          "STANDARD": [
+            "spark-client"
+          ]
+        },
+        "SPARK_JOBHISTORYSERVER": {
+          "STACK-SELECT-PACKAGE": "spark-historyserver",
+          "INSTALL": [
+            "spark-historyserver"
+          ],
+          "PATCH": [
+            "spark-historyserver"
+          ],
+          "STANDARD": [
+            "spark-historyserver"
+          ]
+        },
+        "SPARK_THRIFTSERVER": {
+          "STACK-SELECT-PACKAGE": "spark-thriftserver",
+          "INSTALL": [
+            "spark-thriftserver"
+          ],
+          "PATCH": [
+            "spark-thriftserver"
+          ],
+          "STANDARD": [
+            "spark-thriftserver"
+          ]
+        }
+      },
+      "SPARK2": {
+        "LIVY2_SERVER": {
+          "STACK-SELECT-PACKAGE": "livy2-server",
+          "INSTALL": [
+            "livy2-server"
+          ],
+          "PATCH": [
+            "livy2-server"
+          ],
+          "STANDARD": [
+            "livy2-server"
+          ]
+        },
+        "SPARK2_CLIENT": {
+          "STACK-SELECT-PACKAGE": "spark2-client",
+          "INSTALL": [
+            "spark2-client"
+          ],
+          "PATCH": [
+            "spark2-client"
+          ],
+          "STANDARD": [
+            "spark2-client"
+          ]
+        },
+        "SPARK2_JOBHISTORYSERVER": {
+          "STACK-SELECT-PACKAGE": "spark2-historyserver",
+          "INSTALL": [
+            "spark2-historyserver"
+          ],
+          "PATCH": [
+            "spark2-historyserver"
+          ],
+          "STANDARD": [
+            "spark2-historyserver"
+          ]
+        },
+        "SPARK2_THRIFTSERVER": {
+          "STACK-SELECT-PACKAGE": "spark2-thriftserver",
+          "INSTALL": [
+            "spark2-thriftserver"
+          ],
+          "PATCH": [
+            "spark2-thriftserver"
+          ],
+          "STANDARD": [
+            "spark2-thriftserver"
+          ]
+        }
+      },
+      "SQOOP": {
+        "SQOOP": {
+          "STACK-SELECT-PACKAGE": "sqoop-client",
+          "INSTALL": [
+            "sqoop-client"
+          ],
+          "PATCH": [
+            "sqoop-client"
+          ],
+          "STANDARD": [
+            "sqoop-client"
+          ]
+        }
+      },
+      "STORM": {
+        "NIMBUS": {
+          "STACK-SELECT-PACKAGE": "storm-nimbus",
+          "INSTALL": [
+            "storm-client",
+            "storm-nimbus"
+          ],
+          "PATCH": [
+            "storm-client",
+            "storm-nimbus"
+          ],
+          "STANDARD": [
+            "storm-client",
+            "storm-nimbus"
+          ]
+        },
+        "SUPERVISOR": {
+          "STACK-SELECT-PACKAGE": "storm-supervisor",
+          "INSTALL": [
+            "storm-supervisor"
+          ],
+          "PATCH": [
+            "storm-supervisor"
+          ],
+          "STANDARD": [
+            "storm-client",
+            "storm-supervisor"
+          ]
+        },
+        "DRPC_SERVER": {
+          "STACK-SELECT-PACKAGE": "storm-client",
+          "INSTALL": [
+            "storm-client"
+          ],
+          "PATCH": [
+            "storm-client"
+          ],
+          "STANDARD": [
+            "storm-client"
+          ]
+        },
+        "STORM_UI_SERVER": {
+          "STACK-SELECT-PACKAGE": "storm-client",
+          "INSTALL": [
+            "storm-client"
+          ],
+          "PATCH": [
+            "storm-client"
+          ],
+          "STANDARD": [
+            "storm-client"
+          ]
+        }
+      },
+      "SYSTEMML": {
+        "SYSTEMML": {
+          "STACK-SELECT-PACKAGE": "systemml-client",
+          "INSTALL": [
+            "systemml-client"
+          ],
+          "PATCH": [
+            "systemml-client"
+          ],
+          "STANDARD": [
+            "systemml-client"
+          ]
+        }
+      },
+      "TEZ": {
+        "TEZ_CLIENT": {
+          "STACK-SELECT-PACKAGE": "hadoop-client",
+          "INSTALL": [
+            "hadoop-client"
+          ],
+          "PATCH": [
+            "INVALID"
+          ],
+          "STANDARD": [
+            "hadoop-client"
+          ]
+        }
+      },
+      "TITAN": {
+        "TITAN_CLIENT": {
+          "STACK-SELECT-PACKAGE": "titan-client",
+          "INSTALL": [
+            "titan-client"
+          ],
+          "PATCH": [
+            "titan-client"
+          ],
+          "STANDARD": [
+            "titan-client"
+          ]
+        },
+        "TITAN_SERVER": {
+          "STACK-SELECT-PACKAGE": "titan-server",
+          "INSTALL": [
+            "titan-server"
+          ],
+          "PATCH": [
+            "titan-server"
+          ],
+          "STANDARD": [
+            "titan-server"
+          ]
+        }
+      },
+      "YARN": {
+        "APP_TIMELINE_SERVER": {
+          "STACK-SELECT-PACKAGE": "hadoop-yarn-timelineserver",
+          "INSTALL": [
+            "hadoop-yarn-timelineserver"
+          ],
+          "PATCH": [
+            "hadoop-yarn-timelineserver"
+          ],
+          "STANDARD": [
+            "hadoop-yarn-timelineserver"
+          ]
+        },
+        "NODEMANAGER": {
+          "STACK-SELECT-PACKAGE": "hadoop-yarn-nodemanager",
+          "INSTALL": [
+            "hadoop-yarn-nodemanager"
+          ],
+          "PATCH": [
+            "hadoop-yarn-nodemanager"
+          ],
+          "STANDARD": [
+            "hadoop-yarn-nodemanager"
+          ]
+        },
+        "RESOURCEMANAGER": {
+          "STACK-SELECT-PACKAGE": "hadoop-yarn-resourcemanager",
+          "INSTALL": [
+            "hadoop-yarn-resourcemanager"
+          ],
+          "PATCH": [
+            "hadoop-yarn-resourcemanager"
+          ],
+          "STANDARD": [
+            "hadoop-yarn-resourcemanager"
+          ]
+        },
+        "YARN_CLIENT": {
+          "STACK-SELECT-PACKAGE": "hadoop-client",
+          "INSTALL": [
+            "hadoop-client"
+          ],
+          "PATCH": [
+            "INVALID"
+          ],
+          "STANDARD": [
+            "hadoop-client"
+          ]
+        }
+      },
+      "ZEPPELIN": {
+        "ZEPPELIN_MASTER": {
+          "STACK-SELECT-PACKAGE": "zeppelin-server",
+          "INSTALL": [
+            "zeppelin-server"
+          ],
+          "PATCH": [
+            "zeppelin-server"
+          ],
+          "STANDARD": [
+            "zeppelin-server"
+          ]
+        }
+      },
+      "ZOOKEEPER": {
+        "ZOOKEEPER_CLIENT": {
+          "STACK-SELECT-PACKAGE": "zookeeper-client",
+          "INSTALL": [
+            "zookeeper-client"
+          ],
+          "PATCH": [
+            "zookeeper-client"
+          ],
+          "STANDARD": [
+            "zookeeper-client"
+          ]
+        },
+        "ZOOKEEPER_SERVER": {
+          "STACK-SELECT-PACKAGE": "zookeeper-server",
+          "INSTALL": [
+            "zookeeper-server"
+          ],
+          "PATCH": [
+            "zookeeper-server"
+          ],
+          "STANDARD": [
+            "zookeeper-server"
+          ]
+        }
+      }
+    },
+    "conf-select": {
+      "accumulo": [
+        {
+          "conf_dir": "/etc/accumulo/conf",
+          "current_dir": "{0}/current/accumulo-client/conf"
+        }
+      ],
+      "atlas": [
+        {
+          "conf_dir": "/etc/atlas/conf",
+          "current_dir": "{0}/current/atlas-client/conf"
+        }
+      ],
+      "druid": [
+        {
+          "conf_dir": "/etc/druid/conf",
+          "current_dir": "{0}/current/druid-overlord/conf"
+        }
+      ],
+      "falcon": [
+        {
+          "conf_dir": "/etc/falcon/conf",
+          "current_dir": "{0}/current/falcon-client/conf"
+        }
+      ],
+      "flume": [
+        {
+          "conf_dir": "/etc/flume/conf",
+          "current_dir": "{0}/current/flume-server/conf"
+        }
+      ],
+      "hadoop": [
+        {
+          "conf_dir": "/etc/hadoop/conf",
+          "current_dir": "{0}/current/hadoop-client/conf"
+        }
+      ],
+      "hbase": [
+        {
+          "conf_dir": "/etc/hbase/conf",
+          "current_dir": "{0}/current/hbase-client/conf"
+        }
+      ],
+      "hive": [
+        {
+          "conf_dir": "/etc/hive/conf",
+          "current_dir": "{0}/current/hive-client/conf"
+        }
+      ],
+      "hive2": [
+        {
+          "conf_dir": "/etc/hive2/conf",
+          "current_dir": "{0}/current/hive-server2-hive2/conf"
+        }
+      ],
+      "hive-hcatalog": [
+        {
+          "conf_dir": "/etc/hive-webhcat/conf",
+          "prefix": "/etc/hive-webhcat",
+          "current_dir": "{0}/current/hive-webhcat/etc/webhcat"
+        },
+        {
+          "conf_dir": "/etc/hive-hcatalog/conf",
+          "prefix": "/etc/hive-hcatalog",
+          "current_dir": "{0}/current/hive-webhcat/etc/hcatalog"
+        }
+      ],
+      "kafka": [
+        {
+          "conf_dir": "/etc/kafka/conf",
+          "current_dir": "{0}/current/kafka-broker/conf"
+        }
+      ],
+      "knox": [
+        {
+          "conf_dir": "/etc/knox/conf",
+          "current_dir": "{0}/current/knox-server/conf"
+        }
+      ],
+      "mahout": [
+        {
+          "conf_dir": "/etc/mahout/conf",
+          "current_dir": "{0}/current/mahout-client/conf"
+        }
+      ],
+      "nifi": [
+        {
+          "conf_dir": "/etc/nifi/conf",
+          "current_dir": "{0}/current/nifi/conf"
+        }
+      ],
+      "oozie": [
+        {
+          "conf_dir": "/etc/oozie/conf",
+          "current_dir": "{0}/current/oozie-client/conf"
+        }
+      ],
+      "phoenix": [
+        {
+          "conf_dir": "/etc/phoenix/conf",
+          "current_dir": "{0}/current/phoenix-client/conf"
+        }
+      ],
+      "pig": [
+        {
+          "conf_dir": "/etc/pig/conf",
+          "current_dir": "{0}/current/pig-client/conf"
+        }
+      ],
+      "ranger-admin": [
+        {
+          "conf_dir": "/etc/ranger/admin/conf",
+          "current_dir": "{0}/current/ranger-admin/conf"
+        }
+      ],
+      "ranger-kms": [
+        {
+          "conf_dir": "/etc/ranger/kms/conf",
+          "current_dir": "{0}/current/ranger-kms/conf"
+        }
+      ],
+      "ranger-tagsync": [
+        {
+          "conf_dir": "/etc/ranger/tagsync/conf",
+          "current_dir": "{0}/current/ranger-tagsync/conf"
+        }
+      ],
+      "ranger-usersync": [
+        {
+          "conf_dir": "/etc/ranger/usersync/conf",
+          "current_dir": "{0}/current/ranger-usersync/conf"
+        }
+      ],
+      "slider": [
+        {
+          "conf_dir": "/etc/slider/conf",
+          "current_dir": "{0}/current/slider-client/conf"
+        }
+      ],
+      "spark": [
+        {
+          "conf_dir": "/etc/spark/conf",
+          "current_dir": "{0}/current/spark-client/conf"
+        }
+      ],
+      "spark2": [
+        {
+          "conf_dir": "/etc/spark2/conf",
+          "current_dir": "{0}/current/spark2-client/conf"
+        }
+      ],
+      "sqoop": [
+        {
+          "conf_dir": "/etc/sqoop/conf",
+          "current_dir": "{0}/current/sqoop-client/conf"
+        }
+      ],
+      "storm": [
+        {
+          "conf_dir": "/etc/storm/conf",
+          "current_dir": "{0}/current/storm-client/conf"
+        }
+      ],
+      "storm-slider-client": [
+        {
+          "conf_dir": "/etc/storm-slider-client/conf",
+          "current_dir": "{0}/current/storm-slider-client/conf"
+        }
+      ],
+      "superset": [
+        {
+          "conf_dir": "/etc/druid-superset/conf",
+          "current_dir": "{0}/current/druid-superset/conf"
+        }
+      ],
+      "tez": [
+        {
+          "conf_dir": "/etc/tez/conf",
+          "current_dir": "{0}/current/tez-client/conf"
+        }
+      ],
+      "zeppelin": [
+        {
+          "conf_dir": "/etc/zeppelin/conf",
+          "current_dir": "{0}/current/zeppelin-server/conf"
+        }
+      ],
+      "zookeeper": [
+        {
+          "conf_dir": "/etc/zookeeper/conf",
+          "current_dir": "{0}/current/zookeeper-client/conf"
+        }
+      ]
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_select_packages.json
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_select_packages.json
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_select_packages.json
deleted file mode 100644
index 2747188..0000000
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_select_packages.json
+++ /dev/null
@@ -1,952 +0,0 @@
-{
-  "HDP": {
-    "stack-select": {
-      "ACCUMULO": {
-        "ACCUMULO_CLIENT": {
-          "STACK-SELECT-PACKAGE": "accumulo-client",
-          "INSTALL": [
-            "accumulo-client"
-          ],
-          "PATCH": [
-            "accumulo-client"
-          ],
-          "STANDARD": [
-            "accumulo-client"
-          ]
-        },
-        "ACCUMULO_GC": {
-          "STACK-SELECT-PACKAGE": "accumulo-gc",
-          "INSTALL": [
-            "accumulo-gc"
-          ],
-          "PATCH": [
-            "accumulo-gc"
-          ],
-          "STANDARD": [
-            "accumulo-gc",
-            "accumulo-client"
-          ]
-        },
-        "ACCUMULO_MASTER": {
-          "STACK-SELECT-PACKAGE": "accumulo-master",
-          "INSTALL": [
-            "accumulo-master"
-          ],
-          "PATCH": [
-            "accumulo-master"
-          ],
-          "STANDARD": [
-            "accumulo-master",
-            "accumulo-client"
-          ]
-        },
-        "ACCUMULO_MONITOR": {
-          "STACK-SELECT-PACKAGE": "accumulo-monitor",
-          "INSTALL": [
-            "accumulo-monitor"
-          ],
-          "PATCH": [
-            "accumulo-monitor"
-          ],
-          "STANDARD": [
-            "accumulo-monitor",
-            "accumulo-client"
-          ]
-        },
-        "ACCUMULO_TRACER": {
-          "STACK-SELECT-PACKAGE": "accumulo-tracer",
-          "INSTALL": [
-            "accumulo-tracer"
-          ],
-          "PATCH": [
-            "accumulo-tracer"
-          ],
-          "STANDARD": [
-            "accumulo-tracer",
-            "accumulo-client"
-          ]
-        },
-        "ACCUMULO_TSERVER": {
-          "STACK-SELECT-PACKAGE": "accumulo-tablet",
-          "INSTALL": [
-            "accumulo-tablet"
-          ],
-          "PATCH": [
-            "accumulo-tablet"
-          ],
-          "STANDARD": [
-            "accumulo-tablet",
-            "accumulo-client"
-          ]
-        }
-      },
-      "ATLAS": {
-        "ATLAS_CLIENT": {
-          "STACK-SELECT-PACKAGE": "atlas-client",
-          "INSTALL": [
-            "atlas-client"
-          ],
-          "PATCH": [
-            "atlas-client"
-          ],
-          "STANDARD": [
-            "atlas-client"
-          ]
-        },
-        "ATLAS_SERVER": {
-          "STACK-SELECT-PACKAGE": "atlas-server",
-          "INSTALL": [
-            "atlas-server"
-          ],
-          "PATCH": [
-            "atlas-server"
-          ],
-          "STANDARD": [
-            "atlas-server"
-          ]
-        }
-      },
-      "DRUID": {
-        "DRUID_COORDINATOR": {
-          "STACK-SELECT-PACKAGE": "druid-coordinator",
-          "INSTALL": [
-            "druid-coordinator"
-          ],
-          "PATCH": [
-            "druid-coordinator"
-          ],
-          "STANDARD": [
-            "druid-coordinator"
-          ]
-        },
-        "DRUID_OVERLORD": {
-          "STACK-SELECT-PACKAGE": "druid-overlord",
-          "INSTALL": [
-            "druid-overlord"
-          ],
-          "PATCH": [
-            "druid-overlord"
-          ],
-          "STANDARD": [
-            "druid-overlord"
-          ]
-        },
-        "DRUID_HISTORICAL": {
-          "STACK-SELECT-PACKAGE": "druid-historical",
-          "INSTALL": [
-            "druid-historical"
-          ],
-          "PATCH": [
-            "druid-historical"
-          ],
-          "STANDARD": [
-            "druid-historical"
-          ]
-        },
-        "DRUID_BROKER": {
-          "STACK-SELECT-PACKAGE": "druid-broker",
-          "INSTALL": [
-            "druid-broker"
-          ],
-          "PATCH": [
-            "druid-broker"
-          ],
-          "STANDARD": [
-            "druid-broker"
-          ]
-        },
-        "DRUID_MIDDLEMANAGER": {
-          "STACK-SELECT-PACKAGE": "druid-middlemanager",
-          "INSTALL": [
-            "druid-middlemanager"
-          ],
-          "PATCH": [
-            "druid-middlemanager"
-          ],
-          "STANDARD": [
-            "druid-middlemanager"
-          ]
-        },
-        "DRUID_ROUTER": {
-          "STACK-SELECT-PACKAGE": "druid-router",
-          "INSTALL": [
-            "druid-router"
-          ],
-          "PATCH": [
-            "druid-router"
-          ],
-          "STANDARD": [
-            "druid-router"
-          ]
-        },
-        "DRUID_SUPERSET": {
-          "STACK-SELECT-PACKAGE": "druid-superset",
-          "INSTALL": [
-            "druid-superset"
-          ],
-          "PATCH": [
-            "druid-superset"
-          ],
-          "STANDARD": [
-            "druid-superset"
-          ]
-        }
-      },
-      "FALCON": {
-        "FALCON_CLIENT": {
-          "STACK-SELECT-PACKAGE": "falcon-client",
-          "INSTALL": [
-            "falcon-client"
-          ],
-          "PATCH": [
-            "falcon-client"
-          ],
-          "STANDARD": [
-            "falcon-client"
-          ]
-        },
-        "FALCON_SERVER": {
-          "STACK-SELECT-PACKAGE": "falcon-server",
-          "INSTALL": [
-            "falcon-server"
-          ],
-          "PATCH": [
-            "falcon-server"
-          ],
-          "STANDARD": [
-            "falcon-server"
-          ]
-        }
-      },
-      "FLUME": {
-        "FLUME_HANDLER": {
-          "STACK-SELECT-PACKAGE": "flume-server",
-          "INSTALL": [
-            "flume-server"
-          ],
-          "PATCH": [
-            "flume-server"
-          ],
-          "STANDARD": [
-            "flume-server"
-          ]
-        }
-      },
-      "HBASE": {
-        "HBASE_CLIENT": {
-          "STACK-SELECT-PACKAGE": "hbase-client",
-          "INSTALL": [
-            "hbase-client"
-          ],
-          "PATCH": [
-            "hbase-client"
-          ],
-          "STANDARD": [
-            "hbase-client",
-            "phoenix-client",
-            "hadoop-client"
-          ]
-        },
-        "HBASE_MASTER": {
-          "STACK-SELECT-PACKAGE": "hbase-master",
-          "INSTALL": [
-            "hbase-master"
-          ],
-          "PATCH": [
-            "hbase-master"
-          ],
-          "STANDARD": [
-            "hbase-master"
-          ]
-        },
-        "HBASE_REGIONSERVER": {
-          "STACK-SELECT-PACKAGE": "hbase-regionserver",
-          "INSTALL": [
-            "hbase-regionserver"
-          ],
-          "PATCH": [
-            "hbase-regionserver"
-          ],
-          "STANDARD": [
-            "hbase-regionserver"
-          ]
-        },
-        "PHOENIX_QUERY_SERVER": {
-          "STACK-SELECT-PACKAGE": "phoenix-server",
-          "INSTALL": [
-            "phoenix-server"
-          ],
-          "PATCH": [
-            "phoenix-server"
-          ],
-          "STANDARD": [
-            "phoenix-server"
-          ]
-        }
-      },
-      "HDFS": {
-        "DATANODE": {
-          "STACK-SELECT-PACKAGE": "hadoop-hdfs-datanode",
-          "INSTALL": [
-            "hadoop-hdfs-datanode"
-          ],
-          "PATCH": [
-            "hadoop-hdfs-datanode"
-          ],
-          "STANDARD": [
-            "hadoop-hdfs-datanode"
-          ]
-        },
-        "HDFS_CLIENT": {
-          "STACK-SELECT-PACKAGE": "hadoop-client",
-          "INSTALL": [
-            "hadoop-client"
-          ],
-          "PATCH": [
-            "INVALID"
-          ],
-          "STANDARD": [
-            "hadoop-client"
-          ]
-        },
-        "NAMENODE": {
-          "STACK-SELECT-PACKAGE": "hadoop-hdfs-namenode",
-          "INSTALL": [
-            "hadoop-hdfs-namenode"
-          ],
-          "PATCH": [
-            "hadoop-hdfs-namenode"
-          ],
-          "STANDARD": [
-            "hadoop-hdfs-namenode"
-          ]
-        },
-        "NFS_GATEWAY": {
-          "STACK-SELECT-PACKAGE": "hadoop-hdfs-nfs3",
-          "INSTALL": [
-            "hadoop-hdfs-nfs3"
-          ],
-          "PATCH": [
-            "hadoop-hdfs-nfs3"
-          ],
-          "STANDARD": [
-            "hadoop-hdfs-nfs3"
-          ]
-        },
-        "JOURNALNODE": {
-          "STACK-SELECT-PACKAGE": "hadoop-hdfs-journalnode",
-          "INSTALL": [
-            "hadoop-hdfs-journalnode"
-          ],
-          "PATCH": [
-            "hadoop-hdfs-journalnode"
-          ],
-          "STANDARD": [
-            "hadoop-hdfs-journalnode"
-          ]
-        },
-        "SECONDARY_NAMENODE": {
-          "STACK-SELECT-PACKAGE": "hadoop-hdfs-secondarynamenode",
-          "INSTALL": [
-            "hadoop-hdfs-secondarynamenode"
-          ],
-          "PATCH": [
-            "hadoop-hdfs-secondarynamenode"
-          ],
-          "STANDARD": [
-            "hadoop-hdfs-secondarynamenode"
-          ]
-        },
-        "ZKFC": {
-          "STACK-SELECT-PACKAGE": "hadoop-hdfs-zkfc",
-          "INSTALL": [
-            "hadoop-hdfs-zkfc"
-          ],
-          "PATCH": [
-            "hadoop-hdfs-zkfc"
-          ],
-          "STANDARD": [
-            "hadoop-hdfs-zkfc"
-          ]
-        }
-      },
-      "HIVE": {
-        "HCAT": {
-          "STACK-SELECT-PACKAGE": "hive-webhcat",
-          "INSTALL": [
-            "hive-webhcat"
-          ],
-          "PATCH": [
-            "hive-webhcat"
-          ],
-          "STANDARD": [
-            "hive-webhcat"
-          ]
-        },
-        "HIVE_METASTORE": {
-          "STACK-SELECT-PACKAGE": "hive-metastore",
-          "INSTALL": [
-            "hive-metastore"
-          ],
-          "PATCH": [
-            "hive-metastore"
-          ],
-          "STANDARD": [
-            "hive-metastore"
-          ]
-        },
-        "HIVE_SERVER": {
-          "STACK-SELECT-PACKAGE": "hive-server2",
-          "INSTALL": [
-            "hive-server2"
-          ],
-          "PATCH": [
-            "hive-server2"
-          ],
-          "STANDARD": [
-            "hive-server2"
-          ]
-        },
-        "HIVE_SERVER_INTERACTIVE": {
-          "STACK-SELECT-PACKAGE": "hive-server2-hive2",
-          "INSTALL": [
-            "hive-server2-hive2"
-          ],
-          "PATCH": [
-            "hive-server2-hive2"
-          ],
-          "STANDARD": [
-            "hive-server2-hive2"
-          ]
-        },
-        "HIVE_CLIENT": {
-          "STACK-SELECT-PACKAGE": "hadoop-client",
-          "INSTALL": [
-            "hadoop-client"
-          ],
-          "PATCH": [
-            "INVALID"
-          ],
-          "STANDARD": [
-            "hadoop-client"
-          ]
-        },
-        "WEBHCAT_SERVER": {
-          "STACK-SELECT-PACKAGE": "hive-webhcat",
-          "INSTALL": [
-            "hive-webhcat"
-          ],
-          "PATCH": [
-            "hive-webhcat"
-          ],
-          "STANDARD": [
-            "hive-webhcat"
-          ]
-        }
-      },
-      "KAFKA": {
-        "KAFKA_BROKER": {
-          "STACK-SELECT-PACKAGE": "kafka-broker",
-          "INSTALL": [
-            "kafka-broker"
-          ],
-          "PATCH": [
-            "kafka-broker"
-          ],
-          "STANDARD": [
-            "kafka-broker"
-          ]
-        }
-      },
-      "KNOX": {
-        "KNOX_GATEWAY": {
-          "STACK-SELECT-PACKAGE": "knox-server",
-          "INSTALL": [
-            "knox-server"
-          ],
-          "PATCH": [
-            "knox-server"
-          ],
-          "STANDARD": [
-            "knox-server"
-          ]
-        }
-      },
-      "MAHOUT": {
-        "MAHOUT": {
-          "STACK-SELECT-PACKAGE": "mahout-client",
-          "INSTALL": [
-            "mahout-client"
-          ],
-          "PATCH": [
-            "mahout-client"
-          ],
-          "STANDARD": [
-            "mahout-client"
-          ]
-        }
-      },
-      "MAPREDUCE2": {
-        "HISTORYSERVER": {
-          "STACK-SELECT-PACKAGE": "hadoop-mapreduce-historyserver",
-          "INSTALL": [
-            "hadoop-mapreduce-historyserver"
-          ],
-          "PATCH": [
-            "hadoop-mapreduce-historyserver"
-          ],
-          "STANDARD": [
-            "hadoop-mapreduce-historyserver"
-          ]
-        },
-        "MAPREDUCE2_CLIENT": {
-          "STACK-SELECT-PACKAGE": "hadoop-client",
-          "INSTALL": [
-            "hadoop-client"
-          ],
-          "PATCH": [
-            "hadoop-mapreduce-INVALID"
-          ],
-          "STANDARD": [
-            "hadoop-client"
-          ]
-        }
-      },
-      "OOZIE": {
-        "OOZIE_CLIENT": {
-          "STACK-SELECT-PACKAGE": "oozie-client",
-          "INSTALL": [
-            "oozie-client"
-          ],
-          "PATCH": [
-            "oozie-client"
-          ],
-          "STANDARD": [
-            "oozie-client"
-          ]
-        },
-        "OOZIE_SERVER": {
-          "STACK-SELECT-PACKAGE": "oozie-server",
-          "INSTALL": [
-            "oozie-client",
-            "oozie-server"
-          ],
-          "PATCH": [
-            "oozie-server"
-          ],
-          "STANDARD": [
-            "oozie-client",
-            "oozie-server"
-          ]
-        }
-      },
-      "PIG": {
-        "PIG": {
-          "STACK-SELECT-PACKAGE": "hadoop-client",
-          "INSTALL": [
-            "hadoop-client"
-          ],
-          "PATCH": [
-            "INVALID"
-          ],
-          "STANDARD": [
-            "hadoop-client"
-          ]
-        }
-      },
-      "R4ML": {
-        "R4ML": {
-          "STACK-SELECT-PACKAGE": "r4ml-client",
-          "INSTALL": [
-            "r4ml-client"
-          ],
-          "PATCH": [
-            "r4ml-client"
-          ],
-          "STANDARD": [
-            "r4ml-client"
-          ]
-        }
-      },
-      "RANGER": {
-        "RANGER_ADMIN": {
-          "STACK-SELECT-PACKAGE": "ranger-admin",
-          "INSTALL": [
-            "ranger-admin"
-          ],
-          "PATCH": [
-            "ranger-admin"
-          ],
-          "STANDARD": [
-            "ranger-admin"
-          ]
-        },
-        "RANGER_TAGSYNC": {
-          "STACK-SELECT-PACKAGE": "ranger-tagsync",
-          "INSTALL": [
-            "ranger-tagsync"
-          ],
-          "PATCH": [
-            "ranger-tagsync"
-          ],
-          "STANDARD": [
-            "ranger-tagsync"
-          ]
-        },
-        "RANGER_USERSYNC": {
-          "STACK-SELECT-PACKAGE": "ranger-usersync",
-          "INSTALL": [
-            "ranger-usersync"
-          ],
-          "PATCH": [
-            "ranger-usersync"
-          ],
-          "STANDARD": [
-            "ranger-usersync"
-          ]
-        }
-      },
-      "RANGER_KMS": {
-        "RANGER_KMS_SERVER": {
-          "STACK-SELECT-PACKAGE": "ranger-kms",
-          "INSTALL": [
-            "ranger-kms"
-          ],
-          "PATCH": [
-            "ranger-kms"
-          ],
-          "STANDARD": [
-            "ranger-kms"
-          ]
-        }
-      },
-      "SLIDER": {
-        "SLIDER": {
-          "STACK-SELECT-PACKAGE": "slider-client",
-          "INSTALL": [
-            "slider-client"
-          ],
-          "PATCH": [
-            "slider-client"
-          ],
-          "STANDARD": [
-            "slider-client",
-            "hadoop-client"
-          ]
-        }
-      },
-      "SPARK": {
-        "LIVY_SERVER": {
-          "STACK-SELECT-PACKAGE": "livy-server",
-          "INSTALL": [
-            "livy-server"
-          ],
-          "PATCH": [
-            "livy-server"
-          ],
-          "STANDARD": [
-            "livy-server"
-          ]
-        },
-        "SPARK_CLIENT": {
-          "STACK-SELECT-PACKAGE": "spark-client",
-          "INSTALL": [
-            "spark-client"
-          ],
-          "PATCH": [
-            "spark-client"
-          ],
-          "STANDARD": [
-            "spark-client"
-          ]
-        },
-        "SPARK_JOBHISTORYSERVER": {
-          "STACK-SELECT-PACKAGE": "spark-historyserver",
-          "INSTALL": [
-            "spark-historyserver"
-          ],
-          "PATCH": [
-            "spark-historyserver"
-          ],
-          "STANDARD": [
-            "spark-historyserver"
-          ]
-        },
-        "SPARK_THRIFTSERVER": {
-          "STACK-SELECT-PACKAGE": "spark-thriftserver",
-          "INSTALL": [
-            "spark-thriftserver"
-          ],
-          "PATCH": [
-            "spark-thriftserver"
-          ],
-          "STANDARD": [
-            "spark-thriftserver"
-          ]
-        }
-      },
-      "SPARK2": {
-        "LIVY2_SERVER": {
-          "STACK-SELECT-PACKAGE": "livy2-server",
-          "INSTALL": [
-            "livy2-server"
-          ],
-          "PATCH": [
-            "livy2-server"
-          ],
-          "STANDARD": [
-            "livy2-server"
-          ]
-        },
-        "SPARK2_CLIENT": {
-          "STACK-SELECT-PACKAGE": "spark2-client",
-          "INSTALL": [
-            "spark2-client"
-          ],
-          "PATCH": [
-            "spark2-client"
-          ],
-          "STANDARD": [
-            "spark2-client"
-          ]
-        },
-        "SPARK2_JOBHISTORYSERVER": {
-          "STACK-SELECT-PACKAGE": "spark2-historyserver",
-          "INSTALL": [
-            "spark2-historyserver"
-          ],
-          "PATCH": [
-            "spark2-historyserver"
-          ],
-          "STANDARD": [
-            "spark2-historyserver"
-          ]
-        },
-        "SPARK2_THRIFTSERVER": {
-          "STACK-SELECT-PACKAGE": "spark2-thriftserver",
-          "INSTALL": [
-            "spark2-thriftserver"
-          ],
-          "PATCH": [
-            "spark2-thriftserver"
-          ],
-          "STANDARD": [
-            "spark2-thriftserver"
-          ]
-        }
-      },
-      "SQOOP": {
-        "SQOOP": {
-          "STACK-SELECT-PACKAGE": "sqoop-client",
-          "INSTALL": [
-            "sqoop-client"
-          ],
-          "PATCH": [
-            "sqoop-client"
-          ],
-          "STANDARD": [
-            "sqoop-client"
-          ]
-        }
-      },
-      "STORM": {
-        "NIMBUS": {
-          "STACK-SELECT-PACKAGE": "storm-nimbus",
-          "INSTALL": [
-            "storm-client",
-            "storm-nimbus"
-          ],
-          "PATCH": [
-            "storm-client",
-            "storm-nimbus"
-          ],
-          "STANDARD": [
-            "storm-client",
-            "storm-nimbus"
-          ]
-        },
-        "SUPERVISOR": {
-          "STACK-SELECT-PACKAGE": "storm-supervisor",
-          "INSTALL": [
-            "storm-supervisor"
-          ],
-          "PATCH": [
-            "storm-supervisor"
-          ],
-          "STANDARD": [
-            "storm-client",
-            "storm-supervisor"
-          ]
-        },
-        "DRPC_SERVER": {
-          "STACK-SELECT-PACKAGE": "storm-client",
-          "INSTALL": [
-            "storm-client"
-          ],
-          "PATCH": [
-            "storm-client"
-          ],
-          "STANDARD": [
-            "storm-client"
-          ]
-        },
-        "STORM_UI_SERVER": {
-          "STACK-SELECT-PACKAGE": "storm-client",
-          "INSTALL": [
-            "storm-client"
-          ],
-          "PATCH": [
-            "storm-client"
-          ],
-          "STANDARD": [
-            "storm-client"
-          ]
-        }
-      },
-      "SYSTEMML": {
-        "SYSTEMML": {
-          "STACK-SELECT-PACKAGE": "systemml-client",
-          "INSTALL": [
-            "systemml-client"
-          ],
-          "PATCH": [
-            "systemml-client"
-          ],
-          "STANDARD": [
-            "systemml-client"
-          ]
-        }
-      },
-      "TEZ": {
-        "TEZ_CLIENT": {
-          "STACK-SELECT-PACKAGE": "hadoop-client",
-          "INSTALL": [
-            "hadoop-client"
-          ],
-          "PATCH": [
-            "INVALID"
-          ],
-          "STANDARD": [
-            "hadoop-client"
-          ]
-        }
-      },
-      "TITAN": {
-        "TITAN_CLIENT": {
-          "STACK-SELECT-PACKAGE": "titan-client",
-          "INSTALL": [
-            "titan-client"
-          ],
-          "PATCH": [
-            "titan-client"
-          ],
-          "STANDARD": [
-            "titan-client"
-          ]
-        },
-        "TITAN_SERVER": {
-          "STACK-SELECT-PACKAGE": "titan-server",
-          "INSTALL": [
-            "titan-server"
-          ],
-          "PATCH": [
-            "titan-server"
-          ],
-          "STANDARD": [
-            "titan-server"
-          ]
-        }
-      },
-      "YARN": {
-        "APP_TIMELINE_SERVER": {
-          "STACK-SELECT-PACKAGE": "hadoop-yarn-timelineserver",
-          "INSTALL": [
-            "hadoop-yarn-timelineserver"
-          ],
-          "PATCH": [
-            "hadoop-yarn-timelineserver"
-          ],
-          "STANDARD": [
-            "hadoop-yarn-timelineserver"
-          ]
-        },
-        "NODEMANAGER": {
-          "STACK-SELECT-PACKAGE": "hadoop-yarn-nodemanager",
-          "INSTALL": [
-            "hadoop-yarn-nodemanager"
-          ],
-          "PATCH": [
-            "hadoop-yarn-nodemanager"
-          ],
-          "STANDARD": [
-            "hadoop-yarn-nodemanager"
-          ]
-        },
-        "RESOURCEMANAGER": {
-          "STACK-SELECT-PACKAGE": "hadoop-yarn-resourcemanager",
-          "INSTALL": [
-            "hadoop-yarn-resourcemanager"
-          ],
-          "PATCH": [
-            "hadoop-yarn-resourcemanager"
-          ],
-          "STANDARD": [
-            "hadoop-yarn-resourcemanager"
-          ]
-        },
-        "YARN_CLIENT": {
-          "STACK-SELECT-PACKAGE": "hadoop-client",
-          "INSTALL": [
-            "hadoop-client"
-          ],
-          "PATCH": [
-            "INVALID"
-          ],
-          "STANDARD": [
-            "hadoop-client"
-          ]
-        }
-      },
-      "ZEPPELIN": {
-        "ZEPPELIN_MASTER": {
-          "STACK-SELECT-PACKAGE": "zeppelin-server",
-          "INSTALL": [
-            "zeppelin-server"
-          ],
-          "PATCH": [
-            "zeppelin-server"
-          ],
-          "STANDARD": [
-            "zeppelin-server"
-          ]
-        }
-      },
-      "ZOOKEEPER": {
-        "ZOOKEEPER_CLIENT": {
-          "STACK-SELECT-PACKAGE": "zookeeper-client",
-          "INSTALL": [
-            "zookeeper-client"
-          ],
-          "PATCH": [
-            "zookeeper-client"
-          ],
-          "STANDARD": [
-            "zookeeper-client"
-          ]
-        },
-        "ZOOKEEPER_SERVER": {
-          "STACK-SELECT-PACKAGE": "zookeeper-server",
-          "INSTALL": [
-            "zookeeper-server"
-          ],
-          "PATCH": [
-            "zookeeper-server"
-          ],
-          "STANDARD": [
-            "zookeeper-server"
-          ]
-        }
-      }
-    }
-  }
-}
\ No newline at end of file

Reply via email to