http://git-wip-us.apache.org/repos/asf/ambari/blob/03918cf3/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params.py
 
b/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params.py
index aa60087..48c82ea 100644
--- 
a/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params.py
@@ -17,11 +17,12 @@ See the License for the specific language governing 
permissions and
 limitations under the License.
 
 """
-
-from resource_management.libraries.functions.version import 
format_hdp_stack_version, compare_versions
+from ambari_commons.os_check import OSCheck
+from resource_management.libraries.functions import format
+from resource_management.libraries.functions.version import 
format_hdp_stack_version
 from resource_management.libraries.functions.default import default
-from resource_management import *
-from ambari_commons import OSCheck
+from resource_management.libraries.functions import get_kinit_path
+from resource_management.libraries.script.script import Script
 
 if OSCheck.is_windows_family():
   from params_windows import *
@@ -42,17 +43,18 @@ stack_version_unformatted = 
str(config['hostLevelParams']['stack_version'])
 hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
 
 #hadoop params
-if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0:
+slider_bin_dir = "/usr/lib/slider/bin"
+if Script.is_hdp_stack_greater_or_equal("2.2"):
   slider_bin_dir = '/usr/hdp/current/slider-client/bin'
-else:
-  slider_bin_dir = "/usr/lib/slider/bin"
 
-hadoop_conf_dir = "/etc/hadoop/conf"
+hadoop_conf_dir = "/usr/hdp/current/hadoop-client/conf"
+slider_conf_dir = "/usr/hdp/current/slider-client/conf"
+
 smokeuser = config['configurations']['cluster-env']['smokeuser']
 smokeuser_principal = 
config['configurations']['cluster-env']['smokeuser_principal_name']
 security_enabled = config['configurations']['cluster-env']['security_enabled']
 smokeuser_keytab = config['configurations']['cluster-env']['smokeuser_keytab']
-kinit_path_local = 
functions.get_kinit_path(default('/configurations/kerberos-env/executable_search_paths',
 None))
+kinit_path_local = 
get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', 
None))
 slider_env_sh_template = config['configurations']['slider-env']['content']
 
 java64_home = config['hostLevelParams']['java_home']

http://git-wip-us.apache.org/repos/asf/ambari/blob/03918cf3/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params_linux.py
 
b/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params_linux.py
index 4ed93e5..98a408c 100644
--- 
a/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params_linux.py
+++ 
b/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params_linux.py
@@ -16,15 +16,11 @@ See the License for the specific language governing 
permissions and
 limitations under the License.
 
 """
-
-from resource_management.libraries.functions.version import 
format_hdp_stack_version, compare_versions
-from resource_management.libraries.functions.default import default
-from resource_management import *
-from ambari_commons import OSCheck
+from resource_management.libraries.script.script import Script
 
 # server configurations
 config = Script.get_config()
 
-slider_conf_dir = "/etc/slider/conf"
+slider_conf_dir = "/usr/hdp/current/slider-client/conf"
 storm_slider_conf_dir = '/usr/hdp/current/storm-slider-client/conf'
 slider_home_dir = '/usr/hdp/current/slider-client'
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/03918cf3/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
 
b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
index ddaebaf..f430743 100644
--- 
a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
@@ -18,12 +18,28 @@ limitations under the License.
 
 """
 
-from resource_management.libraries.functions.version import 
format_hdp_stack_version, compare_versions
-from resource_management.libraries.functions.default import default
-from resource_management import *
-from setup_spark import *
+
 import status_params
 
+from setup_spark import *
+
+import resource_management.libraries.functions
+from resource_management.libraries.functions import format
+from resource_management.libraries.functions.version import 
format_hdp_stack_version
+from resource_management.libraries.functions.default import default
+from resource_management.libraries.functions import get_kinit_path
+from resource_management.libraries.script.script import Script
+from resource_management.libraries.resources.hdfs_directory import 
HdfsDirectory
+
+# a map of the Ambari role to the component name
+# for use with /usr/hdp/current/<component>
+SERVER_ROLE_DIRECTORY_MAP = {
+  'SPARK_JOBHISTORYSERVER' : 'spark-historyserver',
+  'SPARK_CLIENT' : 'spark-client'
+}
+
+component_directory = 
Script.get_component_from_role(SERVER_ROLE_DIRECTORY_MAP, "SPARK_CLIENT")
+
 config = Script.get_config()
 tmp_dir = Script.get_tmp_dir()
 
@@ -42,29 +58,21 @@ version = default("/commandParams/version", None)
 # Commenting out for time being
 #stack_is_hdp22_or_further = hdp_stack_version != "" and 
compare_versions(hdp_stack_version, '2.2.1.0') >= 0
 
-stack_is_hdp22_or_further = hdp_stack_version != "" and 
compare_versions(hdp_stack_version, '2.2') >= 0
+spark_conf = '/etc/spark/conf'
+hadoop_conf_dir = "/etc/hadoop/conf"
 
-if stack_is_hdp22_or_further:
+if Script.is_hdp_stack_greater_or_equal("2.2"):
   hadoop_home = "/usr/hdp/current/hadoop-client"
+  hadoop_conf_dir = "/usr/hdp/current/hadoop-client/conf"
   hadoop_bin_dir = "/usr/hdp/current/hadoop-client/bin"
-  spark_conf = '/etc/spark/conf'
+  spark_conf = format("/usr/hdp/current/{component_directory}/conf")
   spark_log_dir = config['configurations']['spark-env']['spark_log_dir']
   spark_pid_dir = status_params.spark_pid_dir
-  spark_role_root = "spark-client"
+  spark_home = format("/usr/hdp/current/{component_directory}")
 
-  command_role = default("/role", "")
-
-  if command_role == "SPARK_CLIENT":
-    spark_role_root = "spark-client"
-  elif command_role == "SPARK_JOBHISTORYSERVER":
-    spark_role_root = "spark-historyserver"
-
-  spark_home = format("/usr/hdp/current/{spark_role_root}")
-else:
-  pass
 
 java_home = config['hostLevelParams']['java_home']
-hadoop_conf_dir = "/etc/hadoop/conf"
+
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 hdfs_principal_name = 
config['configurations']['hadoop-env']['hdfs_principal_name']
 hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
@@ -119,7 +127,7 @@ if spark_javaopts_properties.find('-Dhdp.version') == -1:
   spark_javaopts_properties = spark_javaopts_properties+ ' -Dhdp.version=' + 
str(hdp_full_version)
 
 security_enabled = config['configurations']['cluster-env']['security_enabled']
-kinit_path_local = 
functions.get_kinit_path(default('/configurations/kerberos-env/executable_search_paths',
 None))
+kinit_path_local = 
get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', 
None))
 spark_kerberos_keytab =  
config['configurations']['spark-defaults']['spark.history.kerberos.keytab']
 spark_kerberos_principal =  
config['configurations']['spark-defaults']['spark.history.kerberos.principal']
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/03918cf3/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/status_params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/status_params.py
 
b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/status_params.py
index 733edc1..ccd560e 100644
--- 
a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/status_params.py
+++ 
b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/status_params.py
@@ -18,7 +18,8 @@ limitations under the License.
 
 """
 
-from resource_management import *
+from resource_management.libraries.functions import format
+from resource_management.libraries.script.script import Script
 
 config = Script.get_config()
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/03918cf3/ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/package/scripts/params.py
 
b/ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/package/scripts/params.py
index beed137..ec71506 100644
--- 
a/ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/package/scripts/params.py
@@ -16,7 +16,6 @@ See the License for the specific language governing 
permissions and
 limitations under the License.
 
 """
-
 from ambari_commons import OSCheck
 from resource_management.libraries.functions.default import default
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/03918cf3/ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/package/scripts/params_linux.py
 
b/ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/package/scripts/params_linux.py
index 131946a..3a3f93e 100644
--- 
a/ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/package/scripts/params_linux.py
+++ 
b/ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/package/scripts/params_linux.py
@@ -17,11 +17,19 @@ limitations under the License.
 
 """
 
-from resource_management.libraries.functions.version import 
format_hdp_stack_version, compare_versions
+from resource_management.libraries.functions.version import 
format_hdp_stack_version
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions.get_kinit_path import 
get_kinit_path
 from resource_management.libraries.script import Script
 
+# a map of the Ambari role to the component name
+# for use with /usr/hdp/current/<component>
+SERVER_ROLE_DIRECTORY_MAP = {
+  'SQOOP' : 'sqoop-client'
+}
+
+component_directory = 
Script.get_component_from_role(SERVER_ROLE_DIRECTORY_MAP, "SQOOP")
+
 config = Script.get_config()
 ambari_server_hostname = config['clusterHostInfo']['ambari_server_host'][0]
 
@@ -33,23 +41,25 @@ hdp_stack_version = 
format_hdp_stack_version(stack_version_unformatted)
 # New Cluster Stack Version that is defined during the RESTART of a Rolling 
Upgrade
 version = default("/commandParams/version", None)
 
-#hadoop params
-if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0:
-  sqoop_conf_dir = '/etc/sqoop/conf'
+# default hadoop params
+sqoop_conf_dir = "/usr/lib/sqoop/conf"
+sqoop_lib = "/usr/lib/sqoop/lib"
+hadoop_home = '/usr/lib/hadoop'
+hbase_home = "/usr/lib/hbase"
+hive_home = "/usr/lib/hive"
+sqoop_bin_dir = "/usr/bin"
+zoo_conf_dir = "/etc/zookeeper"
+
+# HDP 2.2+ params
+if Script.is_hdp_stack_greater_or_equal("2.2"):
+  sqoop_conf_dir = '/usr/hdp/current/sqoop-client/conf'
   sqoop_lib = '/usr/hdp/current/sqoop-client/lib'
   hadoop_home = '/usr/hdp/current/hbase-client'
   hbase_home = '/usr/hdp/current/hbase-client'
   hive_home = '/usr/hdp/current/hive-client'
   sqoop_bin_dir = '/usr/hdp/current/sqoop-client/bin/'
-else:
-  sqoop_conf_dir = "/usr/lib/sqoop/conf"
-  sqoop_lib = "/usr/lib/sqoop/lib"
-  hadoop_home = '/usr/lib/hadoop'
-  hbase_home = "/usr/lib/hbase"
-  hive_home = "/usr/lib/hive"
-  sqoop_bin_dir = "/usr/bin"
+  zoo_conf_dir = "/usr/hdp/current/zookeeper-client/conf"
 
-zoo_conf_dir = "/etc/zookeeper"
 security_enabled = config['configurations']['cluster-env']['security_enabled']
 smokeuser = config['configurations']['cluster-env']['smokeuser']
 smokeuser_principal = 
config['configurations']['cluster-env']['smokeuser_principal_name']

http://git-wip-us.apache.org/repos/asf/ambari/blob/03918cf3/ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/params_linux.py
 
b/ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/params_linux.py
index fbfe623..8bcc84a 100644
--- 
a/ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/params_linux.py
+++ 
b/ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/params_linux.py
@@ -17,14 +17,16 @@ See the License for the specific language governing 
permissions and
 limitations under the License.
 
 """
+import re
+import json
+
+import status_params
+
 from ambari_commons.constants import AMBARI_SUDO_BINARY
-from resource_management.libraries.functions.version import 
format_hdp_stack_version, compare_versions
+from resource_management.libraries.functions import format
+from resource_management.libraries.functions.version import 
format_hdp_stack_version
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.script import Script
-from resource_management.libraries.functions import default, format
-import status_params
-import re
-import json
 
 def get_bare_principal(normalized_principal_name):
   """
@@ -51,27 +53,29 @@ tmp_dir = Script.get_tmp_dir()
 sudo = AMBARI_SUDO_BINARY
 
 stack_name = default("/hostLevelParams/stack_name", None)
-
 version = default("/commandParams/version", None)
 
+conf_dir = status_params.conf_dir
+
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
 hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
-stack_is_hdp22_or_further = hdp_stack_version != "" and 
compare_versions(hdp_stack_version, '2.2') >= 0
+stack_is_hdp22_or_further = Script.is_hdp_stack_greater_or_equal("2.2")
+
+# default hadoop params
+rest_lib_dir = "/usr/lib/storm/contrib/storm-rest"
+storm_bin_dir = "/usr/bin"
+storm_lib_dir = "/usr/lib/storm/lib/"
 
-#hadoop params
-if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0:
+# hadoop parameters for 2.2+
+if stack_is_hdp22_or_further:
   rest_lib_dir = '/usr/hdp/current/storm-client/contrib/storm-rest'
   storm_bin_dir = "/usr/hdp/current/storm-client/bin"
   storm_lib_dir = "/usr/hdp/current/storm-client/lib"
-else:
-  rest_lib_dir = "/usr/lib/storm/contrib/storm-rest"
-  storm_bin_dir = "/usr/bin"
-  storm_lib_dir = "/usr/lib/storm/lib/"
+
 
 storm_user = config['configurations']['storm-env']['storm_user']
 log_dir = config['configurations']['storm-env']['storm_log_dir']
 pid_dir = status_params.pid_dir
-conf_dir = "/etc/storm/conf"
 local_dir = config['configurations']['storm-site']['storm.local.dir']
 user_group = config['configurations']['cluster-env']['user_group']
 java64_home = config['hostLevelParams']['java_home']
@@ -102,7 +106,7 @@ if security_enabled:
   storm_jaas_principal = 
_storm_principal_name.replace('_HOST',_hostname_lowercase)
   storm_keytab_path = config['configurations']['storm-env']['storm_keytab']
 
-  if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 
0:
+  if stack_is_hdp22_or_further:
     storm_ui_keytab_path = 
config['configurations']['storm-env']['storm_ui_keytab']
     _storm_ui_jaas_principal_name = 
config['configurations']['storm-env']['storm_ui_principal_name']
     storm_ui_jaas_principal = 
_storm_ui_jaas_principal_name.replace('_HOST',_hostname_lowercase)
@@ -167,7 +171,7 @@ if has_ranger_admin:
   elif xa_audit_db_flavor.lower() == 'oracle':
     jdbc_jar_name = "ojdbc6.jar"
     jdbc_symlink_name = "oracle-jdbc-driver.jar"
-  elif nxa_audit_db_flavor.lower() == 'postgres':
+  elif xa_audit_db_flavor.lower() == 'postgres':
     jdbc_jar_name = "postgresql.jar"
     jdbc_symlink_name = "postgres-jdbc-driver.jar"
   elif xa_audit_db_flavor.lower() == 'sqlserver':

http://git-wip-us.apache.org/repos/asf/ambari/blob/03918cf3/ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/status_params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/status_params.py
 
b/ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/status_params.py
index a5fe494..99397ac 100644
--- 
a/ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/status_params.py
+++ 
b/ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/status_params.py
@@ -22,6 +22,18 @@ from resource_management.libraries.functions import 
get_kinit_path
 from resource_management.libraries.functions import default, format
 from ambari_commons import OSCheck
 
+# a map of the Ambari role to the component name
+# for use with /usr/hdp/current/<component>
+SERVER_ROLE_DIRECTORY_MAP = {
+  'NIMBUS' : 'storm-nimbus',
+  'SUPERVISOR' : 'storm-supervisor',
+  'STORM_UI_SERVER' : 'storm-client',
+  'DRPC_SERVER' : 'storm-client',
+  'STORM_SERVICE_CHECK' : 'storm-client'
+}
+
+component_directory = 
Script.get_component_from_role(SERVER_ROLE_DIRECTORY_MAP, "STORM_SERVICE_CHECK")
+
 config = Script.get_config()
 
 if OSCheck.is_windows_family():
@@ -36,19 +48,26 @@ else:
   pid_ui = format("{pid_dir}/ui.pid")
   pid_logviewer = format("{pid_dir}/logviewer.pid")
   pid_rest_api = format("{pid_dir}/restapi.pid")
-  pid_files = {"logviewer":pid_logviewer,
-               "ui": pid_ui,
-               "nimbus": pid_nimbus,
-               "supervisor": pid_supervisor,
-               "drpc": pid_drpc,
-               "rest_api": pid_rest_api}
+
+  pid_files = {
+    "logviewer":pid_logviewer,
+    "ui": pid_ui,
+    "nimbus": pid_nimbus,
+    "supervisor": pid_supervisor,
+    "drpc": pid_drpc,
+    "rest_api": pid_rest_api
+  }
 
   # Security related/required params
   hostname = config['hostname']
   security_enabled = 
config['configurations']['cluster-env']['security_enabled']
   kinit_path_local = 
get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', 
None))
   tmp_dir = Script.get_tmp_dir()
+
   conf_dir = "/etc/storm/conf"
+  if Script.is_hdp_stack_greater_or_equal("2.2"):
+    conf_dir = format("/usr/hdp/current/{component_directory}/conf")
+
   storm_user = config['configurations']['storm-env']['storm_user']
   storm_ui_principal = 
default('/configurations/storm-env/storm_ui_principal_name', None)
   storm_ui_keytab = default('/configurations/storm-env/storm_ui_keytab', None)

http://git-wip-us.apache.org/repos/asf/ambari/blob/03918cf3/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_linux.py
 
b/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_linux.py
index f7985c7..5db3422 100644
--- 
a/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_linux.py
+++ 
b/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_linux.py
@@ -17,9 +17,14 @@ See the License for the specific language governing 
permissions and
 limitations under the License.
 
 """
+import os
 
-from resource_management.libraries.functions.version import 
format_hdp_stack_version, compare_versions
-from resource_management import *
+from resource_management.libraries.functions.version import 
format_hdp_stack_version
+from resource_management.libraries.functions.default import default
+from resource_management.libraries.functions.format import format
+from resource_management.libraries.functions import get_kinit_path
+from resource_management.libraries.script.script import Script
+from resource_management.libraries.resources.hdfs_directory import 
HdfsDirectory
 
 # server configurations
 config = Script.get_config()
@@ -34,15 +39,27 @@ hdp_stack_version = 
format_hdp_stack_version(stack_version_unformatted)
 # New Cluster Stack Version that is defined during the RESTART of a Rolling 
Upgrade
 version = default("/commandParams/version", None)
 
-if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0:
+# default hadoop parameters
+hadoop_home = '/usr'
+hadoop_bin_dir = "/usr/bin"
+hadoop_conf_dir = "/etc/hadoop/conf"
+tez_etc_dir = "/etc/tez"
+config_dir = "/etc/tez/conf"
+path_to_tez_examples_jar = "/usr/lib/tez/tez-mapreduce-examples*.jar"
+
+# hadoop parameters for 2.2+
+if Script.is_hdp_stack_greater_or_equal("2.2"):
   hadoop_bin_dir = "/usr/hdp/current/hadoop-client/bin"
+  hadoop_conf_dir = "/usr/hdp/current/hadoop-client/conf"
   path_to_tez_examples_jar = "/usr/hdp/{hdp_version}/tez/tez-examples*.jar"
-else:
-  hadoop_bin_dir = "/usr/bin"
-  path_to_tez_examples_jar = "/usr/lib/tez/tez-mapreduce-examples*.jar"
-hadoop_conf_dir = "/etc/hadoop/conf"
 
-kinit_path_local = 
functions.get_kinit_path(default('/configurations/kerberos-env/executable_search_paths',
 None))
+# tez only started linking /usr/hdp/x.x.x.x/tez-client/conf in HDP 2.3+
+if Script.is_hdp_stack_greater_or_equal("2.3"):
+  # !!! use realpath for now since the symlink exists but is broken and a
+  # broken symlink messes with the DirectoryProvider class
+  config_dir = os.path.realpath("/usr/hdp/current/tez-client/conf")
+
+kinit_path_local = 
get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', 
None))
 security_enabled = config['configurations']['cluster-env']['security_enabled']
 smokeuser = config['configurations']['cluster-env']['smokeuser']
 smokeuser_principal = 
config['configurations']['cluster-env']['smokeuser_principal_name']
@@ -51,10 +68,6 @@ hdfs_user = 
config['configurations']['hadoop-env']['hdfs_user']
 hdfs_principal_name = 
config['configurations']['hadoop-env']['hdfs_principal_name']
 hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
 
-config_dir_prefix = "/etc/tez"
-config_dir = format("{config_dir_prefix}/conf")
-
-hadoop_home = '/usr'
 java64_home = config['hostLevelParams']['java_home']
 
 tez_user = config['configurations']['tez-env']['tez_user']

http://git-wip-us.apache.org/repos/asf/ambari/blob/03918cf3/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/tez.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/tez.py
 
b/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/tez.py
index ceafa56..0901120 100644
--- 
a/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/tez.py
+++ 
b/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/tez.py
@@ -27,14 +27,12 @@ from ambari_commons.os_family_impl import OsFamilyFuncImpl, 
OsFamilyImpl
 def tez():
   import params
 
-  Directory(params.config_dir_prefix,
-            mode=0755
-  )
+  Directory(params.tez_etc_dir, mode=0755)
+
   Directory(params.config_dir,
             owner = params.tez_user,
             group = params.user_group,
-            recursive = True
-  )
+            recursive = True)
 
   XmlConfig( "tez-site.xml",
              conf_dir = params.config_dir,
@@ -42,13 +40,11 @@ def tez():
              
configuration_attributes=params.config['configuration_attributes']['tez-site'],
              owner = params.tez_user,
              group = params.user_group,
-             mode = 0664
-  )
+             mode = 0664)
 
   File(format("{config_dir}/tez-env.sh"),
        owner=params.tez_user,
-       content=InlineTemplate(params.tez_env_sh_template)
-  )
+       content=InlineTemplate(params.tez_env_sh_template))
 
 
 @OsFamilyFuncImpl(os_family=OSConst.WINSRV_FAMILY)

http://git-wip-us.apache.org/repos/asf/ambari/blob/03918cf3/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
 
b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
index 96e6e30..004c786 100644
--- 
a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
+++ 
b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
@@ -19,11 +19,30 @@ Ambari Agent
 
 """
 import os
-from resource_management.libraries.functions.version import 
format_hdp_stack_version, compare_versions
+
+from resource_management.libraries.functions import format
+from resource_management.libraries.functions import get_kinit_path
+from resource_management.libraries.functions.version import 
format_hdp_stack_version
 from resource_management.libraries.functions.default import default
-from resource_management import *
+from resource_management.libraries.script.script import Script
+from resource_management.libraries.resources.hdfs_directory import 
HdfsDirectory
+
 import status_params
 
+# a map of the Ambari role to the component name
+# for use with /usr/hdp/current/<component>
+MAPR_SERVER_ROLE_DIRECTORY_MAP = {
+  'HISTORYSERVER' : 'hadoop-mapreduce-historyserver',
+  'MAPREDUCE2_CLIENT' : 'hadoop-mapreduce-client',
+}
+
+YARN_SERVER_ROLE_DIRECTORY_MAP = {
+  'APP_TIMELINE_SERVER' : 'hadoop-yarn-timelineserver',
+  'NODEMANAGER' : 'hadoop-yarn-nodemanager',
+  'RESOURCEMANAGER' : 'hadoop-yarn-resourcemanager',
+  'YARN_CLIENT' : 'hadoop-yarn-client'
+}
+
 # server configurations
 config = Script.get_config()
 tmp_dir = Script.get_tmp_dir()
@@ -39,46 +58,47 @@ version = default("/commandParams/version", None)
 
 hostname = config['hostname']
 
-#hadoop params
-if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0:
-  yarn_role_root = "hadoop-yarn-client"
-  mapred_role_root = "hadoop-mapreduce-client"
+# hadoop default parameters
+hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
+hadoop_bin = "/usr/lib/hadoop/sbin"
+hadoop_bin_dir = "/usr/bin"
+hadoop_conf_dir = "/etc/hadoop/conf"
+hadoop_yarn_home = '/usr/lib/hadoop-yarn'
+hadoop_mapred2_jar_location = "/usr/lib/hadoop-mapreduce"
+mapred_bin = "/usr/lib/hadoop-mapreduce/sbin"
+yarn_bin = "/usr/lib/hadoop-yarn/sbin"
+yarn_container_bin = "/usr/lib/hadoop-yarn/bin"
+
+# hadoop parameters for 2.2+
+if Script.is_hdp_stack_greater_or_equal("2.2"):
 
+  # MapR directory root
+  mapred_role_root = "hadoop-mapreduce-client"
   command_role = default("/role", "")
-  if command_role == "APP_TIMELINE_SERVER":
-    yarn_role_root = "hadoop-yarn-timelineserver"
-  elif command_role == "HISTORYSERVER":
-    mapred_role_root = "hadoop-mapreduce-historyserver"
-  elif command_role == "MAPREDUCE2_CLIENT":
-    mapred_role_root = "hadoop-mapreduce-client"
-  elif command_role == "NODEMANAGER":
-    yarn_role_root = "hadoop-yarn-nodemanager"
-  elif command_role == "RESOURCEMANAGER":
-    yarn_role_root = "hadoop-yarn-resourcemanager"
-  elif command_role == "YARN_CLIENT":
-    yarn_role_root = "hadoop-yarn-client"
-
-  hadoop_libexec_dir          = "/usr/hdp/current/hadoop-client/libexec"
-  hadoop_bin                  = "/usr/hdp/current/hadoop-client/sbin"
-  hadoop_bin_dir              = "/usr/hdp/current/hadoop-client/bin"
+  if command_role in MAPR_SERVER_ROLE_DIRECTORY_MAP:
+    mapred_role_root = MAPR_SERVER_ROLE_DIRECTORY_MAP[command_role]
+
+  # YARN directory root
+  yarn_role_root = "hadoop-yarn-client"
+  if command_role in YARN_SERVER_ROLE_DIRECTORY_MAP:
+    yarn_role_root = YARN_SERVER_ROLE_DIRECTORY_MAP[command_role]
+
+  hadoop_libexec_dir = "/usr/hdp/current/hadoop-client/libexec"
+  hadoop_bin = "/usr/hdp/current/hadoop-client/sbin"
+  hadoop_bin_dir = "/usr/hdp/current/hadoop-client/bin"
 
   hadoop_mapred2_jar_location = format("/usr/hdp/current/{mapred_role_root}")
-  mapred_bin                  = 
format("/usr/hdp/current/{mapred_role_root}/sbin")
+  mapred_bin = format("/usr/hdp/current/{mapred_role_root}/sbin")
+
+  hadoop_yarn_home = format("/usr/hdp/current/{yarn_role_root}")
+  yarn_bin = format("/usr/hdp/current/{yarn_role_root}/sbin")
+  yarn_container_bin = format("/usr/hdp/current/{yarn_role_root}/bin")
+
+  # the configuration direction for HDFS/YARN/MapR is the hadoop config
+  # directory, which is symlinked by hadoop-client only
+  hadoop_conf_dir = "/usr/hdp/current/hadoop-client/conf"
 
-  hadoop_yarn_home            = format("/usr/hdp/current/{yarn_role_root}")
-  yarn_bin                    = 
format("/usr/hdp/current/{yarn_role_root}/sbin")
-  yarn_container_bin          = format("/usr/hdp/current/{yarn_role_root}/bin")
-else:
-  hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
-  hadoop_bin = "/usr/lib/hadoop/sbin"
-  hadoop_bin_dir = "/usr/bin"
-  hadoop_yarn_home = '/usr/lib/hadoop-yarn'
-  hadoop_mapred2_jar_location = "/usr/lib/hadoop-mapreduce"
-  mapred_bin = "/usr/lib/hadoop-mapreduce/sbin"
-  yarn_bin = "/usr/lib/hadoop-yarn/sbin"
-  yarn_container_bin = "/usr/lib/hadoop-yarn/bin"
 
-hadoop_conf_dir = "/etc/hadoop/conf"
 limits_conf_dir = "/etc/security/limits.d"
 execute_path = os.environ['PATH'] + os.pathsep + hadoop_bin_dir + os.pathsep + 
yarn_container_bin
 
@@ -93,7 +113,7 @@ smokeuser_principal = 
config['configurations']['cluster-env']['smokeuser_princip
 security_enabled = config['configurations']['cluster-env']['security_enabled']
 smoke_user_keytab = config['configurations']['cluster-env']['smokeuser_keytab']
 yarn_executor_container_group = 
config['configurations']['yarn-site']['yarn.nodemanager.linux-container-executor.group']
-kinit_path_local = 
functions.get_kinit_path(default('/configurations/kerberos-env/executable_search_paths',
 None))
+kinit_path_local = 
get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', 
None))
 rm_hosts = config['clusterHostInfo']['rm_host']
 rm_host = rm_hosts[0]
 rm_port = 
config['configurations']['yarn-site']['yarn.resourcemanager.webapp.address'].split(':')[-1]

http://git-wip-us.apache.org/repos/asf/ambari/blob/03918cf3/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/status_params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/status_params.py
 
b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/status_params.py
index 61402a0..6832da5 100644
--- 
a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/status_params.py
+++ 
b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/status_params.py
@@ -52,6 +52,12 @@ else:
 
   # Security related/required params
   hadoop_conf_dir = "/etc/hadoop/conf"
+  if Script.is_hdp_stack_greater_or_equal("2.2"):
+    # the configuration direction for HDFS/YARN/MapR is the hadoop config
+    # directory, which is symlinked by hadoop-client only
+    hadoop_conf_dir = "/usr/hdp/current/hadoop-client/conf"
+
+
   hostname = config['hostname']
   kinit_path_local = 
functions.get_kinit_path(default('/configurations/kerberos-env/executable_search_paths',
 None))
   security_enabled = 
config['configurations']['cluster-env']['security_enabled']
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/03918cf3/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5.2.0/package/scripts/params_linux.py
 
b/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5.2.0/package/scripts/params_linux.py
index cf555fa..3ef0a8c 100644
--- 
a/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5.2.0/package/scripts/params_linux.py
+++ 
b/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5.2.0/package/scripts/params_linux.py
@@ -18,11 +18,13 @@ limitations under the License.
 Ambari Agent
 
 """
+import status_params
 
-from resource_management.libraries.functions.version import 
format_hdp_stack_version, compare_versions
+from resource_management.libraries.functions import format
+from resource_management.libraries.functions.version import 
format_hdp_stack_version
 from resource_management.libraries.functions.default import default
-from resource_management import *
-import status_params
+from resource_management.libraries.functions import get_kinit_path
+from resource_management.libraries.script.script import Script
 
 # server configurations
 config = Script.get_config()
@@ -33,29 +35,26 @@ hdp_stack_version = 
format_hdp_stack_version(stack_version_unformatted)
 
 stack_name = default("/hostLevelParams/stack_name", None)
 current_version = default("/hostLevelParams/current_version", None)
+component_directory = status_params.component_directory
 
 # New Cluster Stack Version that is defined during the RESTART of a Rolling 
Upgrade
 version = default("/commandParams/version", None)
 
-#hadoop params
-if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0:
-  role_root = "zookeeper-client"
-  command_role = default("/role", "")
-
-  if command_role == "ZOOKEEPER_SERVER":
-    role_root = "zookeeper-server"
+# default parameters
+zk_home = "/usr"
+zk_bin = "/usr/lib/zookeeper/bin"
+zk_cli_shell = "/usr/lib/zookeeper/bin/zkCli.sh"
+config_dir = "/etc/zookeeper/conf"
 
-  zk_home = format("/usr/hdp/current/{role_root}")
-  zk_bin = format("/usr/hdp/current/{role_root}/bin")
-  zk_cli_shell = format("/usr/hdp/current/{role_root}/bin/zkCli.sh")
-else:
-  zk_home = "/usr"
-  zk_bin = "/usr/lib/zookeeper/bin"
-  zk_cli_shell = "/usr/lib/zookeeper/bin/zkCli.sh"
+# hadoop parameters for 2.2+
+if Script.is_hdp_stack_greater_or_equal("2.2"):
+  zk_home = format("/usr/hdp/current/{component_directory}")
+  zk_bin = format("/usr/hdp/current/{component_directory}/bin")
+  zk_cli_shell = format("/usr/hdp/current/{component_directory}/bin/zkCli.sh")
+  config_dir = status_params.config_dir
 
 
-config_dir = "/etc/zookeeper/conf"
-zk_user =  config['configurations']['zookeeper-env']['zk_user']
+zk_user = config['configurations']['zookeeper-env']['zk_user']
 hostname = config['hostname']
 user_group = config['configurations']['cluster-env']['user_group']
 zk_env_sh_template = config['configurations']['zookeeper-env']['content']
@@ -91,7 +90,7 @@ security_enabled = 
config['configurations']['cluster-env']['security_enabled']
 smoke_user_keytab = config['configurations']['cluster-env']['smokeuser_keytab']
 smokeuser = config['configurations']['cluster-env']['smokeuser']
 smokeuser_principal = 
config['configurations']['cluster-env']['smokeuser_principal_name']
-kinit_path_local = 
functions.get_kinit_path(default('/configurations/kerberos-env/executable_search_paths',
 None))
+kinit_path_local = 
get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', 
None))
 
 #log4j.properties
 if ('zookeeper-log4j' in config['configurations']) and ('content' in 
config['configurations']['zookeeper-log4j']):

http://git-wip-us.apache.org/repos/asf/ambari/blob/03918cf3/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5.2.0/package/scripts/status_params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5.2.0/package/scripts/status_params.py
 
b/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5.2.0/package/scripts/status_params.py
index 7cd99b6..ae2a1fd 100644
--- 
a/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5.2.0/package/scripts/status_params.py
+++ 
b/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5.2.0/package/scripts/status_params.py
@@ -17,9 +17,20 @@ See the License for the specific language governing 
permissions and
 limitations under the License.
 
 """
-
-from resource_management import *
 from ambari_commons import OSCheck
+from resource_management.libraries.functions import format
+from resource_management.libraries.functions.default import default
+from resource_management.libraries.functions import get_kinit_path
+from resource_management.libraries.script.script import Script
+
+# a map of the Ambari role to the component name
+# for use with /usr/hdp/current/<component>
+SERVER_ROLE_DIRECTORY_MAP = {
+  'ZOOKEEPER_SERVER' : 'zookeeper-server',
+  'ZOOKEEPER_CLIENT' : 'zookeeper-client'
+}
+
+component_directory = 
Script.get_component_from_role(SERVER_ROLE_DIRECTORY_MAP, "ZOOKEEPER_CLIENT")
 
 config = Script.get_config()
 
@@ -32,7 +43,10 @@ else:
   # Security related/required params
   hostname = config['hostname']
   security_enabled = 
config['configurations']['cluster-env']['security_enabled']
-  kinit_path_local = 
functions.get_kinit_path(default('/configurations/kerberos-env/executable_search_paths',
 None))
+  kinit_path_local = 
get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', 
None))
   tmp_dir = Script.get_tmp_dir()
-  config_dir = "/etc/zookeeper/conf"
   zk_user =  config['configurations']['zookeeper-env']['zk_user']
+
+  config_dir = "/etc/zookeeper/conf"
+  if Script.is_hdp_stack_greater_or_equal("2.2"):
+    config_dir = format("/usr/hdp/current/{component_directory}/conf")

http://git-wip-us.apache.org/repos/asf/ambari/blob/03918cf3/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py
index 16f78b2..5c6c5bd 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py
@@ -29,27 +29,35 @@ sudo = AMBARI_SUDO_BINARY
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
 hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
 
-#hadoop params
-if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0:
+# default hadoop params
+mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
+hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
+hadoop_conf_dir = "/etc/hadoop/conf"
+hadoop_conf_empty_dir = "/etc/hadoop/conf.empty"
+
+# HDP 2.2+ params
+if Script.is_hdp_stack_greater_or_equal("2.2"):
   mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce-client/*"
   hadoop_libexec_dir = "/usr/hdp/current/hadoop-client/libexec"
-else:
-  mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
-  hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
+  hadoop_conf_dir = "/usr/hdp/current/hadoop-client/conf"
+
+  # not supported in HDP 2.2+
+  hadoop_conf_empty_dir = None
 
-hadoop_conf_dir = "/etc/hadoop/conf"
-hadoop_conf_empty_dir = "/etc/hadoop/conf.empty"
 versioned_hdp_root = '/usr/hdp/current'
+
 #security params
 security_enabled = config['configurations']['cluster-env']['security_enabled']
+
 #java params
 java_home = config['hostLevelParams']['java_home']
+
 #hadoop params
 hdfs_log_dir_prefix = 
config['configurations']['hadoop-env']['hdfs_log_dir_prefix']
 hadoop_pid_dir_prefix = 
config['configurations']['hadoop-env']['hadoop_pid_dir_prefix']
 hadoop_root_logger = 
config['configurations']['hadoop-env']['hadoop_root_logger']
 
-if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.0') >= 0 
and compare_versions(hdp_stack_version, '2.1') < 0  and not 
OSCheck.is_suse_family():
+if Script.is_hdp_stack_greater_or_equal("2.0") and 
Script.is_hdp_stack_less_than("2.1") and not OSCheck.is_suse_family():
   # deprecated rhel jsvc_path
   jsvc_path = "/usr/libexec/bigtop-utils"
 else:

http://git-wip-us.apache.org/repos/asf/ambari/blob/03918cf3/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/shared_initialization.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/shared_initialization.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/shared_initialization.py
index 19e9717..170f72e 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/shared_initialization.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/shared_initialization.py
@@ -34,10 +34,11 @@ def setup_config():
   import params
   stackversion = params.stack_version_unformatted
   if params.has_namenode or stackversion.find('Gluster') >= 0:
+    # create core-site only if the hadoop config diretory exists
     XmlConfig("core-site.xml",
               conf_dir=params.hadoop_conf_dir,
               configurations=params.config['configurations']['core-site'],
               
configuration_attributes=params.config['configuration_attributes']['core-site'],
               owner=params.hdfs_user,
-              group=params.user_group
-    )
+              group=params.user_group,
+              only_if=format("ls {hadoop_conf_dir}"))

http://git-wip-us.apache.org/repos/asf/ambari/blob/03918cf3/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
index 2262a20..f457438 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
@@ -69,11 +69,26 @@ def is_secure_port(port):
   else:
     return False
 
-#hadoop params
-if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0:
+# hadoop default params
+mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
+hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
+hadoop_home = "/usr/lib/hadoop"
+hadoop_secure_dn_user = hdfs_user
+hadoop_dir = "/etc/hadoop"
+versioned_hdp_root = '/usr/hdp/current'
+hadoop_conf_dir = "/etc/hadoop/conf"
+hadoop_conf_empty_dir = "/etc/hadoop/conf.empty"
+
+# HDP 2.2+ params
+if Script.is_hdp_stack_greater_or_equal("2.2"):
   mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce-client/*"
   hadoop_libexec_dir = "/usr/hdp/current/hadoop-client/libexec"
   hadoop_home = "/usr/hdp/current/hadoop-client"
+  hadoop_conf_dir = "/usr/hdp/current/hadoop-client/conf"
+
+  # not supported in HDP 2.2+
+  hadoop_conf_empty_dir = None
+
   if not security_enabled:
     hadoop_secure_dn_user = '""'
   else:
@@ -91,16 +106,6 @@ if hdp_stack_version != "" and 
compare_versions(hdp_stack_version, '2.2') >= 0:
       hadoop_secure_dn_user = hdfs_user
     else:
       hadoop_secure_dn_user = '""'
-else:
-  mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
-  hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
-  hadoop_home = "/usr/lib/hadoop"
-  hadoop_secure_dn_user = hdfs_user
-
-hadoop_dir = "/etc/hadoop"
-hadoop_conf_dir = "/etc/hadoop/conf"
-hadoop_conf_empty_dir = "/etc/hadoop/conf.empty"
-versioned_hdp_root = '/usr/hdp/current'
 
 #hadoop params
 hdfs_log_dir_prefix = 
config['configurations']['hadoop-env']['hdfs_log_dir_prefix']

http://git-wip-us.apache.org/repos/asf/ambari/blob/03918cf3/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/shared_initialization.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/shared_initialization.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/shared_initialization.py
index d4242f8..1e2df32 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/shared_initialization.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/shared_initialization.py
@@ -170,20 +170,19 @@ def setup_hadoop_env():
     else:
       tc_owner = params.hdfs_user
 
-    Directory(params.hadoop_dir,
-              mode=0755
-    )
-    Directory(params.hadoop_conf_empty_dir,
-              recursive=True,
-              owner="root",
-              group=params.user_group
-    )
-    Link(params.hadoop_conf_dir,
-         to=params.hadoop_conf_empty_dir,
-         not_if=format("ls {hadoop_conf_dir}")
-    )
-    File(os.path.join(params.hadoop_conf_dir, 'hadoop-env.sh'),
-         owner=tc_owner,
-         group=params.user_group,
-         content=InlineTemplate(params.hadoop_env_sh_template)
-    )
+    # create /etc/hadoop
+    Directory(params.hadoop_dir, mode=0755)
+
+    # HDP < 2.2 used a conf -> conf.empty symlink for /etc/hadoop/
+    if Script.is_hdp_stack_less_than("2.2"):
+      Directory(params.hadoop_conf_empty_dir, recursive=True, owner="root",
+        group=params.user_group )
+
+      Link(params.hadoop_conf_dir, to=params.hadoop_conf_empty_dir,
+         not_if=format("ls {hadoop_conf_dir}"))
+
+    # write out hadoop-env.sh, but only if the directory exists
+    if os.path.exists(params.hadoop_conf_dir):
+      File(os.path.join(params.hadoop_conf_dir, 'hadoop-env.sh'), 
owner=tc_owner,
+        group=params.user_group,
+        content=InlineTemplate(params.hadoop_env_sh_template))
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/03918cf3/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
index e89fd63..d37ec82 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
@@ -28,24 +28,29 @@ config = Script.get_config()
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
 hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
 
-#hadoop params
-if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0:
+# hadoop default params
+mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
+hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
+hadoop_lib_home = "/usr/lib/hadoop/lib"
+hadoop_bin = "/usr/lib/hadoop/sbin"
+hadoop_home = '/usr'
+create_lib_snappy_symlinks = True
+hadoop_conf_dir = "/etc/hadoop/conf"
+default_topology_script_file_path = "/etc/hadoop/conf/topology_script.py"
+
+# HDP 2.2+ params
+if Script.is_hdp_stack_greater_or_equal("2.2"):
   mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce-client/*"
   hadoop_libexec_dir = "/usr/hdp/current/hadoop-client/libexec"
   hadoop_lib_home = "/usr/hdp/current/hadoop-client/lib"
   hadoop_bin = "/usr/hdp/current/hadoop-client/sbin"
   hadoop_home = '/usr/hdp/current/hadoop-client'
   create_lib_snappy_symlinks = False
-else:
-  mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
-  hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
-  hadoop_lib_home = "/usr/lib/hadoop/lib"
-  hadoop_bin = "/usr/lib/hadoop/sbin"
-  hadoop_home = '/usr'
-  create_lib_snappy_symlinks = True
+  hadoop_conf_dir = "/usr/hdp/current/hadoop-client/conf"
+  default_topology_script_file_path = 
"/usr/hdp/current/hadoop-client/conf/topology_script.py"
 
 current_service = config['serviceName']
-hadoop_conf_dir = "/etc/hadoop/conf"
+
 #security params
 security_enabled = config['configurations']['cluster-env']['security_enabled']
 
@@ -183,7 +188,7 @@ all_ipv4_ips = default("/clusterHostInfo/all_ipv4_ips", [])
 slave_hosts = default("/clusterHostInfo/slave_hosts", [])
 
 #topology files
-net_topology_script_file_path = 
default("/configurations/core-site/net.topology.script.file.name","/etc/hadoop/conf/topology_script.py")
+net_topology_script_file_path = 
default("/configurations/core-site/net.topology.script.file.name",default_topology_script_file_path)
 net_topology_script_dir = os.path.dirname(net_topology_script_file_path)
 net_topology_mapping_data_file_name = 'topology_mappings.data'
 net_topology_mapping_data_file_path = os.path.join(net_topology_script_dir, 
net_topology_mapping_data_file_name)
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/03918cf3/ambari-server/src/test/python/stacks/2.0.6/FLUME/test_flume.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/FLUME/test_flume.py 
b/ambari-server/src/test/python/stacks/2.0.6/FLUME/test_flume.py
index 2317cf7..e7edaf7 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/FLUME/test_flume.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/FLUME/test_flume.py
@@ -102,7 +102,7 @@ class TestFlumeHandler(RMFTestCase):
   @patch("resource_management.libraries.script.Script.put_structured_out")
   @patch("sys.exit")
   def test_status_default(self, sys_exit_mock, structured_out_mock):
-    
+
     try:
       self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + 
"/scripts/flume_handler.py",
                        classname = "FlumeHandler",
@@ -113,7 +113,7 @@ class TestFlumeHandler(RMFTestCase):
     except:
       # expected since ComponentIsNotRunning gets raised
       pass
-    
+
     # test that the method was called with empty processes
     self.assertTrue(structured_out_mock.called)
     structured_out_mock.assert_called_with({'processes': []})
@@ -130,7 +130,7 @@ class TestFlumeHandler(RMFTestCase):
    script.load_structured_out()
 
    self.assertFalse("version" in script.structuredOut)
-    
+
 
   @patch("resource_management.libraries.script.Script.put_structured_out")
   @patch("glob.glob")
@@ -148,7 +148,7 @@ class TestFlumeHandler(RMFTestCase):
     except:
       # expected since ComponentIsNotRunning gets raised
       pass
-    
+
     self.assertTrue(structured_out_mock.called)
 
     # call_args[0] is a tuple, whose first element is the actual call argument
@@ -156,7 +156,7 @@ class TestFlumeHandler(RMFTestCase):
     self.assertTrue(struct_out.has_key('processes'))
 
     self.assertNoMoreResources()
-    
+
   @patch("resource_management.libraries.script.Script.put_structured_out")
   @patch("glob.glob")
   @patch("sys.exit")
@@ -173,13 +173,13 @@ class TestFlumeHandler(RMFTestCase):
     except:
       # expected since ComponentIsNotRunning gets raised
       pass
-      
+
     self.assertTrue(structured_out_mock.called)
 
     # call_args[0] is a tuple, whose first element is the actual call argument
     struct_out = structured_out_mock.call_args[0][0]
     self.assertTrue(struct_out.has_key('processes'))
-    self.assertNoMoreResources()    
+    self.assertNoMoreResources()
 
   def assert_configure_default(self):
 
@@ -400,24 +400,24 @@ class TestFlumeHandler(RMFTestCase):
                        hdp_stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES)
 
-    self.assertResourceCalled('Directory', '/etc/flume/conf', recursive=True)
+    self.assertResourceCalled('Directory', 
'/usr/hdp/current/flume-server/conf', recursive=True)
 
     self.assertResourceCalled('Directory', '/var/log/flume', owner = 'flume')
 
-    self.assertResourceCalled('Directory', '/etc/flume/conf/a1')
+    self.assertResourceCalled('Directory', 
'/usr/hdp/current/flume-server/conf/a1')
 
-    self.assertResourceCalled('PropertiesFile', 
'/etc/flume/conf/a1/flume.conf',
+    self.assertResourceCalled('PropertiesFile', 
'/usr/hdp/current/flume-server/conf/a1/flume.conf',
       mode = 0644,
       properties = build_flume(
         self.getConfig()['configurations']['flume-conf']['content'])['a1'])
 
     self.assertResourceCalled('File',
-      '/etc/flume/conf/a1/log4j.properties',
+      '/usr/hdp/current/flume-server/conf/a1/log4j.properties',
       content = Template('log4j.properties.j2', agent_name = 'a1'),
       mode = 0644)
 
     self.assertResourceCalled('File',
-      '/etc/flume/conf/a1/ambari-meta.json',
+      '/usr/hdp/current/flume-server/conf/a1/ambari-meta.json',
       content='{"channels_count": 1, "sinks_count": 1, "sources_count": 1}',
       mode = 0644)
 
@@ -425,7 +425,7 @@ class TestFlumeHandler(RMFTestCase):
 
     
self.assertTrue(content.get_content().find('/usr/hdp/current/hive-metastore') > 
-1)
 
-    self.assertResourceCalled('File', "/etc/flume/conf/a1/flume-env.sh",
+    self.assertResourceCalled('File', 
"/usr/hdp/current/flume-server/conf/a1/flume-env.sh",
                               owner="flume",
                               content=content)
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/03918cf3/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py 
b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py
index 6f73dbe..b56d15a 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py
@@ -443,7 +443,7 @@ class TestHBaseMaster(RMFTestCase):
     self.assertResourceCalled('Directory', '/etc/hbase',
       mode = 0755)
 
-    self.assertResourceCalled('Directory', '/etc/hbase/conf',
+    self.assertResourceCalled('Directory', 
'/usr/hdp/current/hbase-master/conf',
       owner = 'hbase',
       group = 'hadoop',
       recursive = True)
@@ -469,45 +469,47 @@ class TestHBaseMaster(RMFTestCase):
     self.assertResourceCalled('XmlConfig', 'hbase-site.xml',
       owner = 'hbase',
       group = 'hadoop',
-      conf_dir = '/etc/hbase/conf',
+      conf_dir = '/usr/hdp/current/hbase-master/conf',
       configurations = self.getConfig()['configurations']['hbase-site'],
       configuration_attributes = 
self.getConfig()['configuration_attributes']['hbase-site'])
+
     self.assertResourceCalled('XmlConfig', 'core-site.xml',
       owner = 'hbase',
       group = 'hadoop',
-      conf_dir = '/etc/hbase/conf',
+      conf_dir = '/usr/hdp/current/hbase-master/conf',
       configurations = self.getConfig()['configurations']['core-site'],
       configuration_attributes = 
self.getConfig()['configuration_attributes']['core-site'])
+
     self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
       owner = 'hbase',
       group = 'hadoop',
-      conf_dir = '/etc/hbase/conf',
+      conf_dir = '/usr/hdp/current/hbase-master/conf',
       configurations = self.getConfig()['configurations']['hdfs-site'],
       configuration_attributes = 
self.getConfig()['configuration_attributes']['hdfs-site'])
 
     self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
                               owner = 'hdfs',
                               group = 'hadoop',
-                              conf_dir = '/etc/hadoop/conf',
+                              conf_dir = '/usr/hdp/current/hadoop-client/conf',
                               configurations = 
self.getConfig()['configurations']['hdfs-site'],
                               configuration_attributes = 
self.getConfig()['configuration_attributes']['hdfs-site'])
 
     self.assertResourceCalled('XmlConfig', 'hbase-policy.xml',
       owner = 'hbase',
       group = 'hadoop',
-      conf_dir = '/etc/hbase/conf',
+      conf_dir = '/usr/hdp/current/hbase-master/conf',
       configurations = self.getConfig()['configurations']['hbase-policy'],
       configuration_attributes = 
self.getConfig()['configuration_attributes']['hbase-policy'])
 
-    self.assertResourceCalled('File', '/etc/hbase/conf/hbase-env.sh',
+    self.assertResourceCalled('File', 
'/usr/hdp/current/hbase-master/conf/hbase-env.sh',
       owner = 'hbase',
       content = 
InlineTemplate(self.getConfig()['configurations']['hbase-env']['content']))
 
-    self.assertResourceCalled('TemplateConfig', 
'/etc/hbase/conf/hadoop-metrics2-hbase.properties',
+    self.assertResourceCalled('TemplateConfig', 
'/usr/hdp/current/hbase-master/conf/hadoop-metrics2-hbase.properties',
       owner = 'hbase',
       template_tag = 'GANGLIA-MASTER')
 
-    self.assertResourceCalled('TemplateConfig', 
'/etc/hbase/conf/regionservers',
+    self.assertResourceCalled('TemplateConfig', 
'/usr/hdp/current/hbase-master/conf/regionservers',
       owner = 'hbase',
       template_tag = None)
 
@@ -520,7 +522,7 @@ class TestHBaseMaster(RMFTestCase):
       recursive = True)
 
     self.assertResourceCalled('File',
-                              '/etc/hbase/conf/log4j.properties',
+                              
'/usr/hdp/current/hbase-master/conf/log4j.properties',
                               mode=0644,
                               group='hadoop',
                               owner='hbase',
@@ -529,7 +531,7 @@ class TestHBaseMaster(RMFTestCase):
     self.assertResourceCalled('HdfsDirectory', 'hdfs://nn1/apps/hbase/data',
                               security_enabled = False,
                               keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
+                              conf_dir = '/usr/hdp/current/hadoop-client/conf',
                               hdfs_user = 'hdfs',
                               kinit_path_local = "/usr/bin/kinit",
                               owner = 'hbase',
@@ -539,7 +541,7 @@ class TestHBaseMaster(RMFTestCase):
     self.assertResourceCalled('HdfsDirectory', '/apps/hbase/staging',
                               security_enabled = False,
                               keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
+                              conf_dir = '/usr/hdp/current/hadoop-client/conf',
                               hdfs_user = 'hdfs',
                               kinit_path_local = "/usr/bin/kinit",
                               mode = 0711,
@@ -550,13 +552,13 @@ class TestHBaseMaster(RMFTestCase):
     self.assertResourceCalled('HdfsDirectory', None,
                               security_enabled = False,
                               keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
+                              conf_dir = '/usr/hdp/current/hadoop-client/conf',
                               hdfs_user = 'hdfs',
                               kinit_path_local = "/usr/bin/kinit",
                               bin_dir = '/usr/hdp/current/hadoop-client/bin',
                               action = ['create'])
 
-    self.assertResourceCalled('Execute', 
'/usr/hdp/current/hbase-master/bin/hbase-daemon.sh --config /etc/hbase/conf 
start master',
+    self.assertResourceCalled('Execute', 
'/usr/hdp/current/hbase-master/bin/hbase-daemon.sh --config 
/usr/hdp/current/hbase-master/conf start master',
       not_if = 'ls /var/run/hbase/hbase-hbase-master.pid >/dev/null 2>&1 && ps 
-p `cat /var/run/hbase/hbase-hbase-master.pid` >/dev/null 2>&1',
       user = 'hbase')
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/03918cf3/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_regionserver.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_regionserver.py 
b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_regionserver.py
index 0ab5fb7..ea8d9d4 100644
--- 
a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_regionserver.py
+++ 
b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_regionserver.py
@@ -370,7 +370,7 @@ class TestHbaseRegionServer(RMFTestCase):
     self.assertResourceCalled('Directory', '/etc/hbase',
       mode = 0755)
 
-    self.assertResourceCalled('Directory', '/etc/hbase/conf',
+    self.assertResourceCalled('Directory', 
'/usr/hdp/current/hbase-regionserver/conf',
       owner = 'hbase',
       group = 'hadoop',
       recursive = True)
@@ -396,46 +396,46 @@ class TestHbaseRegionServer(RMFTestCase):
     self.assertResourceCalled('XmlConfig', 'hbase-site.xml',
       owner = 'hbase',
       group = 'hadoop',
-      conf_dir = '/etc/hbase/conf',
+      conf_dir = '/usr/hdp/current/hbase-regionserver/conf',
       configurations = self.getConfig()['configurations']['hbase-site'],
       configuration_attributes = 
self.getConfig()['configuration_attributes']['hbase-site'])
     self.assertResourceCalled('XmlConfig', 'core-site.xml',
                               owner = 'hbase',
                               group = 'hadoop',
-                              conf_dir = '/etc/hbase/conf',
+                              conf_dir = 
'/usr/hdp/current/hbase-regionserver/conf',
                               configurations = 
self.getConfig()['configurations']['core-site'],
                               configuration_attributes = 
self.getConfig()['configuration_attributes']['core-site']
     )
     self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
       owner = 'hbase',
       group = 'hadoop',
-      conf_dir = '/etc/hbase/conf',
+      conf_dir = '/usr/hdp/current/hbase-regionserver/conf',
       configurations = self.getConfig()['configurations']['hdfs-site'],
       configuration_attributes = 
self.getConfig()['configuration_attributes']['hdfs-site'])
 
     self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
                               owner = 'hdfs',
                               group = 'hadoop',
-                              conf_dir = '/etc/hadoop/conf',
+                              conf_dir = '/usr/hdp/current/hadoop-client/conf',
                               configurations = 
self.getConfig()['configurations']['hdfs-site'],
                               configuration_attributes = 
self.getConfig()['configuration_attributes']['hdfs-site'])
 
     self.assertResourceCalled('XmlConfig', 'hbase-policy.xml',
       owner = 'hbase',
       group = 'hadoop',
-      conf_dir = '/etc/hbase/conf',
+      conf_dir = '/usr/hdp/current/hbase-regionserver/conf',
       configurations = self.getConfig()['configurations']['hbase-policy'],
       configuration_attributes = 
self.getConfig()['configuration_attributes']['hbase-policy'])
 
-    self.assertResourceCalled('File', '/etc/hbase/conf/hbase-env.sh',
+    self.assertResourceCalled('File', 
'/usr/hdp/current/hbase-regionserver/conf/hbase-env.sh',
       owner = 'hbase',
       content = 
InlineTemplate(self.getConfig()['configurations']['hbase-env']['content']))
 
-    self.assertResourceCalled('TemplateConfig', 
'/etc/hbase/conf/hadoop-metrics2-hbase.properties',
+    self.assertResourceCalled('TemplateConfig', 
'/usr/hdp/current/hbase-regionserver/conf/hadoop-metrics2-hbase.properties',
       owner = 'hbase',
       template_tag = 'GANGLIA-RS')
 
-    self.assertResourceCalled('TemplateConfig', 
'/etc/hbase/conf/regionservers',
+    self.assertResourceCalled('TemplateConfig', 
'/usr/hdp/current/hbase-regionserver/conf/regionservers',
       owner = 'hbase',
       template_tag = None)
 
@@ -448,7 +448,7 @@ class TestHbaseRegionServer(RMFTestCase):
       recursive = True)
 
     self.assertResourceCalled('File',
-                              '/etc/hbase/conf/log4j.properties',
+                              
'/usr/hdp/current/hbase-regionserver/conf/log4j.properties',
                               mode=0644,
                               group='hadoop',
                               owner='hbase',
@@ -457,7 +457,7 @@ class TestHbaseRegionServer(RMFTestCase):
     self.assertResourceCalled('HdfsDirectory', 'hdfs://nn1/apps/hbase/data',
                               security_enabled = False,
                               keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
+                              conf_dir = '/usr/hdp/current/hadoop-client/conf',
                               hdfs_user = 'hdfs',
                               kinit_path_local = "/usr/bin/kinit",
                               owner = 'hbase',
@@ -467,7 +467,7 @@ class TestHbaseRegionServer(RMFTestCase):
     self.assertResourceCalled('HdfsDirectory', '/apps/hbase/staging',
                               security_enabled = False,
                               keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
+                              conf_dir = '/usr/hdp/current/hadoop-client/conf',
                               hdfs_user = 'hdfs',
                               kinit_path_local = "/usr/bin/kinit",
                               mode = 0711,
@@ -478,13 +478,13 @@ class TestHbaseRegionServer(RMFTestCase):
     self.assertResourceCalled('HdfsDirectory', None,
                               security_enabled = False,
                               keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
+                              conf_dir = '/usr/hdp/current/hadoop-client/conf',
                               hdfs_user = 'hdfs',
                               kinit_path_local = "/usr/bin/kinit",
                               bin_dir = '/usr/hdp/current/hadoop-client/bin',
                               action = ['create'])
 
-    self.assertResourceCalled('Execute', 
'/usr/hdp/current/hbase-regionserver/bin/hbase-daemon.sh --config 
/etc/hbase/conf start regionserver',
+    self.assertResourceCalled('Execute', 
'/usr/hdp/current/hbase-regionserver/bin/hbase-daemon.sh --config 
/usr/hdp/current/hbase-regionserver/conf start regionserver',
       not_if = 'ls /var/run/hbase/hbase-hbase-regionserver.pid >/dev/null 2>&1 
&& ps -p `cat /var/run/hbase/hbase-hbase-regionserver.pid` >/dev/null 2>&1',
       user = 'hbase')
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/03918cf3/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_service_check.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_service_check.py 
b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_service_check.py
index f08a31a..368aa58 100644
--- 
a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_service_check.py
+++ 
b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_service_check.py
@@ -114,13 +114,13 @@ class TestServiceCheck(RMFTestCase):
       content = Template('hbase-smoke.sh.j2'),
       mode = 0755,
     )
-    self.assertResourceCalled('Execute', ' 
/usr/hdp/current/hbase-client/bin/hbase --config /etc/hbase/conf shell 
/tmp/hbase-smoke.sh',
+    self.assertResourceCalled('Execute', ' 
/usr/hdp/current/hbase-client/bin/hbase --config 
/usr/hdp/current/hbase-client/conf shell /tmp/hbase-smoke.sh',
       logoutput = True,
       tries = 3,
       user = 'ambari-qa',
       try_sleep = 5,
     )
-    self.assertResourceCalled('Execute', ' /tmp/hbaseSmokeVerify.sh 
/etc/hbase/conf  /usr/hdp/current/hbase-client/bin/hbase',
+    self.assertResourceCalled('Execute', ' /tmp/hbaseSmokeVerify.sh 
/usr/hdp/current/hbase-client/conf  /usr/hdp/current/hbase-client/bin/hbase',
       logoutput = True,
       tries = 3,
       user = 'ambari-qa',

http://git-wip-us.apache.org/repos/asf/ambari/blob/03918cf3/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_phoenix_queryserver.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_phoenix_queryserver.py 
b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_phoenix_queryserver.py
index 0427fe9..5f1d856 100644
--- 
a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_phoenix_queryserver.py
+++ 
b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_phoenix_queryserver.py
@@ -49,7 +49,7 @@ class TestPhoenixQueryServer(RMFTestCase):
     )
     self.assert_configure_default()
     self.assertResourceCalled('Execute', 
'/usr/hdp/current/phoenix-server/bin/queryserver.py start',
-                            environment = {'JAVA_HOME': 
'/usr/jdk64/jdk1.8.0_40', 'HBASE_CONF_DIR': '/etc/hbase/conf'},
+                            environment = {'JAVA_HOME': 
'/usr/jdk64/jdk1.8.0_40', 'HBASE_CONF_DIR': 
'/usr/hdp/current/hbase-regionserver/conf'},
                             user = 'hbase'
     )
     self.assertNoMoreResources()
@@ -66,7 +66,7 @@ class TestPhoenixQueryServer(RMFTestCase):
     self.assertResourceCalled('Execute', 
'/usr/hdp/current/phoenix-server/bin/queryserver.py stop',
         on_timeout = '! ( ls /var/run/hbase/phoenix-hbase-server.pid 
>/dev/null 2>&1 && ps -p `cat /var/run/hbase/phoenix-hbase-server.pid` 
>/dev/null 2>&1 ) || ambari-sudo.sh -H -E kill -9 `cat 
/var/run/hbase/phoenix-hbase-server.pid`',
         timeout = 30,
-        environment = {'JAVA_HOME': '/usr/jdk64/jdk1.8.0_40', 
'HBASE_CONF_DIR': '/etc/hbase/conf'},
+        environment = {'JAVA_HOME': '/usr/jdk64/jdk1.8.0_40', 
'HBASE_CONF_DIR': '/usr/hdp/current/hbase-regionserver/conf'},
         user = 'hbase'
     )
     
@@ -96,7 +96,7 @@ class TestPhoenixQueryServer(RMFTestCase):
     )
     self.assert_configure_secured()
     self.assertResourceCalled('Execute', 
'/usr/hdp/current/phoenix-server/bin/queryserver.py start',
-                          environment = {'JAVA_HOME': 
'/usr/jdk64/jdk1.8.0_40', 'HBASE_CONF_DIR': '/etc/hbase/conf'},
+                          environment = {'JAVA_HOME': 
'/usr/jdk64/jdk1.8.0_40', 'HBASE_CONF_DIR': 
'/usr/hdp/current/hbase-regionserver/conf'},
                           user = 'hbase'
     )
     self.assertNoMoreResources()
@@ -113,7 +113,7 @@ class TestPhoenixQueryServer(RMFTestCase):
     self.assertResourceCalled('Execute', 
'/usr/hdp/current/phoenix-server/bin/queryserver.py stop',
         on_timeout = '! ( ls /var/run/hbase/phoenix-hbase-server.pid 
>/dev/null 2>&1 && ps -p `cat /var/run/hbase/phoenix-hbase-server.pid` 
>/dev/null 2>&1 ) || ambari-sudo.sh -H -E kill -9 `cat 
/var/run/hbase/phoenix-hbase-server.pid`',
         timeout = 30,
-        environment = {'JAVA_HOME': '/usr/jdk64/jdk1.8.0_40', 
'HBASE_CONF_DIR': '/etc/hbase/conf'},
+        environment = {'JAVA_HOME': '/usr/jdk64/jdk1.8.0_40', 
'HBASE_CONF_DIR': '/usr/hdp/current/hbase-regionserver/conf'},
         user = 'hbase'
     )
     
@@ -133,7 +133,7 @@ class TestPhoenixQueryServer(RMFTestCase):
     self.assertResourceCalled('Directory', '/etc/hbase',
       mode = 0755)
 
-    self.assertResourceCalled('Directory', '/etc/hbase/conf',
+    self.assertResourceCalled('Directory', 
'/usr/hdp/current/hbase-regionserver/conf',
       owner = 'hbase',
       group = 'hadoop',
       recursive = True)
@@ -141,17 +141,17 @@ class TestPhoenixQueryServer(RMFTestCase):
     self.assertResourceCalled('XmlConfig', 'hbase-site.xml',
       owner = 'hbase',
       group = 'hadoop',
-      conf_dir = '/etc/hbase/conf',
+      conf_dir = '/usr/hdp/current/hbase-regionserver/conf',
       configurations = self.getConfig()['configurations']['hbase-site'],
       configuration_attributes = 
self.getConfig()['configuration_attributes']['hbase-site'])
     self.assertResourceCalled('XmlConfig', 'core-site.xml',
                               owner = 'hbase',
                               group = 'hadoop',
-                              conf_dir = '/etc/hbase/conf',
+                              conf_dir = 
'/usr/hdp/current/hbase-regionserver/conf',
                               configurations = 
self.getConfig()['configurations']['core-site'],
                               configuration_attributes = 
self.getConfig()['configuration_attributes']['core-site']
     )
-    self.assertResourceCalled('File', '/etc/hbase/conf/hbase-env.sh',
+    self.assertResourceCalled('File', 
'/usr/hdp/current/hbase-regionserver/conf/hbase-env.sh',
       owner = 'hbase',
       content = 
InlineTemplate(self.getConfig()['configurations']['hbase-env']['content']))
 
@@ -181,7 +181,7 @@ class TestPhoenixQueryServer(RMFTestCase):
     self.assertResourceCalled('Directory', '/etc/hbase',
                               mode = 0755
     )
-    self.assertResourceCalled('Directory', '/etc/hbase/conf',
+    self.assertResourceCalled('Directory', 
'/usr/hdp/current/hbase-regionserver/conf',
                               owner = 'hbase',
                               group = 'hadoop',
                               recursive = True,
@@ -207,47 +207,47 @@ class TestPhoenixQueryServer(RMFTestCase):
     self.assertResourceCalled('XmlConfig', 'hbase-site.xml',
                               owner = 'hbase',
                               group = 'hadoop',
-                              conf_dir = '/etc/hbase/conf',
+                              conf_dir = 
'/usr/hdp/current/hbase-regionserver/conf',
                               configurations = 
self.getConfig()['configurations']['hbase-site'],
                               configuration_attributes = 
self.getConfig()['configuration_attributes']['hbase-site']
     )
     self.assertResourceCalled('XmlConfig', 'core-site.xml',
                               owner = 'hbase',
                               group = 'hadoop',
-                              conf_dir = '/etc/hbase/conf',
+                              conf_dir = 
'/usr/hdp/current/hbase-regionserver/conf',
                               configurations = 
self.getConfig()['configurations']['core-site'],
                               configuration_attributes = 
self.getConfig()['configuration_attributes']['core-site']
     )
     self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
                               owner = 'hbase',
                               group = 'hadoop',
-                              conf_dir = '/etc/hbase/conf',
+                              conf_dir = 
'/usr/hdp/current/hbase-regionserver/conf',
                               configurations = 
self.getConfig()['configurations']['hdfs-site'],
                               configuration_attributes = 
self.getConfig()['configuration_attributes']['hdfs-site']
     )
     self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
                               owner = 'hdfs',
                               group = 'hadoop',
-                              conf_dir = '/etc/hadoop/conf',
+                              conf_dir = '/usr/hdp/current/hadoop-client/conf',
                               configurations = 
self.getConfig()['configurations']['hdfs-site'],
                               configuration_attributes = 
self.getConfig()['configuration_attributes']['hdfs-site']
     )
     self.assertResourceCalled('XmlConfig', 'hbase-policy.xml',
                               owner = 'hbase',
                               group = 'hadoop',
-                              conf_dir = '/etc/hbase/conf',
+                              conf_dir = 
'/usr/hdp/current/hbase-regionserver/conf',
                               configurations = 
self.getConfig()['configurations']['hbase-policy'],
                               configuration_attributes = 
self.getConfig()['configuration_attributes']['hbase-policy']
                               )
-    self.assertResourceCalled('File', '/etc/hbase/conf/hbase-env.sh',
+    self.assertResourceCalled('File', 
'/usr/hdp/current/hbase-regionserver/conf/hbase-env.sh',
                               owner = 'hbase',
                               content = 
InlineTemplate(self.getConfig()['configurations']['hbase-env']['content']),
                               )
-    self.assertResourceCalled('TemplateConfig', 
'/etc/hbase/conf/hadoop-metrics2-hbase.properties',
+    self.assertResourceCalled('TemplateConfig', 
'/usr/hdp/current/hbase-regionserver/conf/hadoop-metrics2-hbase.properties',
                               owner = 'hbase',
                               template_tag = 'GANGLIA-RS',
                               )
-    self.assertResourceCalled('TemplateConfig', 
'/etc/hbase/conf/regionservers',
+    self.assertResourceCalled('TemplateConfig', 
'/usr/hdp/current/hbase-regionserver/conf/regionservers',
                               owner = 'hbase',
                               template_tag = None,
                               )
@@ -260,7 +260,7 @@ class TestPhoenixQueryServer(RMFTestCase):
                               recursive = True,
                               )
     self.assertResourceCalled('File',
-                              '/etc/hbase/conf/log4j.properties',
+                              
'/usr/hdp/current/hbase-regionserver/conf/log4j.properties',
                               mode=0644,
                               group='hadoop',
                               owner='hbase',
@@ -271,7 +271,7 @@ class TestPhoenixQueryServer(RMFTestCase):
     self.assertResourceCalled('Directory', '/etc/hbase',
                               mode = 0755
     )
-    self.assertResourceCalled('Directory', '/etc/hbase/conf',
+    self.assertResourceCalled('Directory', 
'/usr/hdp/current/hbase-regionserver/conf',
                               owner = 'hbase',
                               group = 'hadoop',
                               recursive = True,
@@ -297,51 +297,51 @@ class TestPhoenixQueryServer(RMFTestCase):
     self.assertResourceCalled('XmlConfig', 'hbase-site.xml',
                               owner = 'hbase',
                               group = 'hadoop',
-                              conf_dir = '/etc/hbase/conf',
+                              conf_dir = 
'/usr/hdp/current/hbase-regionserver/conf',
                               configurations = 
self.getConfig()['configurations']['hbase-site'],
                               configuration_attributes = 
self.getConfig()['configuration_attributes']['hbase-site']
     )
     self.assertResourceCalled('XmlConfig', 'core-site.xml',
                               owner = 'hbase',
                               group = 'hadoop',
-                              conf_dir = '/etc/hbase/conf',
+                              conf_dir = 
'/usr/hdp/current/hbase-regionserver/conf',
                               configurations = 
self.getConfig()['configurations']['core-site'],
                               configuration_attributes = 
self.getConfig()['configuration_attributes']['core-site']
     )
     self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
                               owner = 'hbase',
                               group = 'hadoop',
-                              conf_dir = '/etc/hbase/conf',
+                              conf_dir = 
'/usr/hdp/current/hbase-regionserver/conf',
                               configurations = 
self.getConfig()['configurations']['hdfs-site'],
                               configuration_attributes = 
self.getConfig()['configuration_attributes']['hdfs-site']
     )
     self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
                               owner = 'hdfs',
                               group = 'hadoop',
-                              conf_dir = '/etc/hadoop/conf',
+                              conf_dir = '/usr/hdp/current/hadoop-client/conf',
                               configurations = 
self.getConfig()['configurations']['hdfs-site'],
                               configuration_attributes = 
self.getConfig()['configuration_attributes']['hdfs-site']
     )
     self.assertResourceCalled('XmlConfig', 'hbase-policy.xml',
                               owner = 'hbase',
                               group = 'hadoop',
-                              conf_dir = '/etc/hbase/conf',
+                              conf_dir = 
'/usr/hdp/current/hbase-regionserver/conf',
                               configurations = 
self.getConfig()['configurations']['hbase-policy'],
                               configuration_attributes = 
self.getConfig()['configuration_attributes']['hbase-policy']
     )
-    self.assertResourceCalled('File', '/etc/hbase/conf/hbase-env.sh',
+    self.assertResourceCalled('File', 
'/usr/hdp/current/hbase-regionserver/conf/hbase-env.sh',
                               owner = 'hbase',
                               content = 
InlineTemplate(self.getConfig()['configurations']['hbase-env']['content']),
                               )
-    self.assertResourceCalled('TemplateConfig', 
'/etc/hbase/conf/hadoop-metrics2-hbase.properties',
+    self.assertResourceCalled('TemplateConfig', 
'/usr/hdp/current/hbase-regionserver/conf/hadoop-metrics2-hbase.properties',
                               owner = 'hbase',
                               template_tag = 'GANGLIA-RS',
                               )
-    self.assertResourceCalled('TemplateConfig', 
'/etc/hbase/conf/regionservers',
+    self.assertResourceCalled('TemplateConfig', 
'/usr/hdp/current/hbase-regionserver/conf/regionservers',
                               owner = 'hbase',
                               template_tag = None,
                               )
-    self.assertResourceCalled('TemplateConfig', 
'/etc/hbase/conf/hbase_queryserver_jaas.conf',
+    self.assertResourceCalled('TemplateConfig', 
'/usr/hdp/current/hbase-regionserver/conf/hbase_queryserver_jaas.conf',
                               owner = 'hbase',
                               template_tag = None,
                               )
@@ -354,7 +354,7 @@ class TestPhoenixQueryServer(RMFTestCase):
                               recursive = True,
                               )
     self.assertResourceCalled('File',
-                              '/etc/hbase/conf/log4j.properties',
+                              
'/usr/hdp/current/hbase-regionserver/conf/log4j.properties',
                               mode=0644,
                               group='hadoop',
                               owner='hbase',

Reply via email to