AMBARI-7257 Use Versioned RPMS for HDP 2.2 stack and make it plugabable to be 
able to reuse the scripts for HDP 2.* (dsen)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/7d9feb6a
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/7d9feb6a
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/7d9feb6a

Branch: refs/heads/branch-alerts-dev
Commit: 7d9feb6afa6cd310fb8b11221ab67f01f048dd38
Parents: 8778556
Author: Dmytro Sen <d...@hortonworks.com>
Authored: Thu Sep 11 22:26:27 2014 +0300
Committer: Dmytro Sen <d...@hortonworks.com>
Committed: Thu Sep 11 22:26:27 2014 +0300

----------------------------------------------------------------------
 .../libraries/providers/execute_hadoop.py       |  9 +-
 .../libraries/providers/hdfs_directory.py       | 16 +++-
 .../libraries/resources/execute_hadoop.py       |  1 +
 .../libraries/resources/hdfs_directory.py       |  1 +
 .../2.0.6/hooks/after-INSTALL/scripts/params.py | 20 +++--
 .../hooks/before-INSTALL/scripts/params.py      |  3 +-
 .../hooks/before-START/files/checkForFormat.sh  |  3 +
 .../2.0.6/hooks/before-START/scripts/params.py  | 25 ++++--
 .../services/FLUME/package/scripts/flume.py     |  2 +-
 .../FLUME/package/scripts/flume_check.py        |  2 +-
 .../services/FLUME/package/scripts/params.py    | 14 +++-
 .../HBASE/package/files/hbaseSmokeVerify.sh     |  3 +-
 .../services/HBASE/package/scripts/params.py    | 37 ++++++--
 .../HBASE/package/scripts/service_check.py      |  6 +-
 .../HDFS/package/files/checkForFormat.sh        |  4 +-
 .../HDFS/package/scripts/hdfs_namenode.py       | 15 ++--
 .../services/HDFS/package/scripts/namenode.py   |  2 +-
 .../services/HDFS/package/scripts/params.py     | 34 +++++---
 .../HDFS/package/scripts/service_check.py       | 27 ++++--
 .../2.0.6/services/HIVE/package/scripts/hcat.py |  6 ++
 .../HIVE/package/scripts/hcat_service_check.py  |  8 +-
 .../2.0.6/services/HIVE/package/scripts/hive.py |  2 +
 .../HIVE/package/scripts/hive_service.py        |  9 +-
 .../HIVE/package/scripts/install_jars.py        |  6 +-
 .../services/HIVE/package/scripts/params.py     | 73 ++++++++++------
 .../package/templates/startHiveserver2.sh.j2    |  2 +-
 .../services/OOZIE/configuration/oozie-env.xml  |  2 +-
 .../services/OOZIE/package/files/oozieSmoke2.sh |  8 +-
 .../OOZIE/package/scripts/oozie_service.py      |  4 +-
 .../services/OOZIE/package/scripts/params.py    | 24 ++++--
 .../services/PIG/package/scripts/params.py      | 20 ++++-
 .../PIG/package/scripts/service_check.py        | 10 ++-
 .../services/SQOOP/package/scripts/params.py    | 10 ++-
 .../WEBHCAT/configuration/webhcat-env.xml       |  2 +-
 .../services/WEBHCAT/package/scripts/params.py  | 41 ++++++---
 .../services/WEBHCAT/package/scripts/webhcat.py | 11 ++-
 .../services/YARN/package/scripts/params.py     | 45 ++++++----
 .../YARN/package/scripts/resourcemanager.py     |  5 +-
 .../services/YARN/package/scripts/service.py    |  2 +-
 .../YARN/package/scripts/service_check.py       |  3 +-
 .../2.0.6/services/YARN/package/scripts/yarn.py | 14 ++--
 .../ZOOKEEPER/package/scripts/params.py         | 17 +++-
 .../services/FALCON/package/scripts/params.py   | 15 +++-
 .../services/STORM/package/scripts/params.py    |  5 +-
 .../main/resources/stacks/HDP/2.2/metainfo.xml  | 23 +++++
 .../resources/stacks/HDP/2.2/repos/repoinfo.xml | 82 ++++++++++++++++++
 .../stacks/HDP/2.2/role_command_order.json      | 88 ++++++++++++++++++++
 .../stacks/HDP/2.2/services/FALCON/metainfo.xml | 28 +++++++
 .../stacks/HDP/2.2/services/FLUME/metainfo.xml  | 40 +++++++++
 .../stacks/HDP/2.2/services/HBASE/metainfo.xml  | 42 ++++++++++
 .../services/HDFS/configuration/hadoop-env.xml  | 29 +++++++
 .../services/HDFS/configuration/hdfs-site.xml   | 34 ++++++++
 .../stacks/HDP/2.2/services/HDFS/metainfo.xml   | 68 +++++++++++++++
 .../stacks/HDP/2.2/services/HIVE/metainfo.xml   | 44 ++++++++++
 .../services/OOZIE/configuration/oozie-site.xml | 38 +++++++++
 .../stacks/HDP/2.2/services/OOZIE/metainfo.xml  | 28 +++++++
 .../stacks/HDP/2.2/services/PIG/metainfo.xml    | 41 +++++++++
 .../stacks/HDP/2.2/services/SQOOP/metainfo.xml  | 29 +++++++
 .../services/STORM/configuration/storm-env.xml  | 29 +++++++
 .../services/STORM/configuration/storm-site.xml | 54 ++++++++++++
 .../stacks/HDP/2.2/services/STORM/metainfo.xml  | 29 +++++++
 .../stacks/HDP/2.2/services/TEZ/metainfo.xml    | 40 +++++++++
 .../WEBHCAT/configuration/webhcat-site.xml      | 59 +++++++++++++
 .../HDP/2.2/services/WEBHCAT/metainfo.xml       | 44 ++++++++++
 .../YARN/configuration-mapred/mapred-site.xml   | 36 ++++++++
 .../services/YARN/configuration/yarn-site.xml   | 35 ++++++++
 .../stacks/HDP/2.2/services/YARN/metainfo.xml   | 71 ++++++++++++++++
 .../HDP/2.2/services/ZOOKEEPER/metainfo.xml     | 40 +++++++++
 .../stacks/1.3.2/HDFS/test_service_check.py     | 18 +++-
 .../stacks/2.0.6/HBASE/test_hbase_master.py     |  6 ++
 .../2.0.6/HBASE/test_hbase_regionserver.py      |  6 ++
 .../2.0.6/HBASE/test_hbase_service_check.py     | 10 +--
 .../python/stacks/2.0.6/HDFS/test_namenode.py   | 36 +++++---
 .../stacks/2.0.6/HDFS/test_service_check.py     | 18 +++-
 .../stacks/2.0.6/HIVE/test_hcat_client.py       | 10 ++-
 .../stacks/2.0.6/HIVE/test_hive_metastore.py    |  8 ++
 .../stacks/2.0.6/HIVE/test_hive_server.py       | 18 ++++
 .../2.0.6/HIVE/test_hive_service_check.py       |  7 ++
 .../stacks/2.0.6/OOZIE/test_oozie_server.py     | 10 ++-
 .../stacks/2.0.6/PIG/test_pig_service_check.py  | 12 ++-
 .../stacks/2.0.6/WEBHCAT/test_webhcat_server.py | 12 +++
 .../stacks/2.0.6/YARN/test_historyserver.py     | 12 +++
 .../stacks/2.0.6/YARN/test_nodemanager.py       | 12 +++
 .../2.0.6/YARN/test_yarn_service_check.py       |  9 +-
 .../stacks/2.1/FALCON/test_falcon_server.py     |  2 +
 .../stacks/2.1/HIVE/test_hive_metastore.py      |  7 ++
 86 files changed, 1636 insertions(+), 196 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-common/src/main/python/resource_management/libraries/providers/execute_hadoop.py
----------------------------------------------------------------------
diff --git 
a/ambari-common/src/main/python/resource_management/libraries/providers/execute_hadoop.py
 
b/ambari-common/src/main/python/resource_management/libraries/providers/execute_hadoop.py
index 8ab71ff..f367e99 100644
--- 
a/ambari-common/src/main/python/resource_management/libraries/providers/execute_hadoop.py
+++ 
b/ambari-common/src/main/python/resource_management/libraries/providers/execute_hadoop.py
@@ -19,6 +19,7 @@ limitations under the License.
 Ambari Agent
 
 """
+import os
 
 from resource_management import *
 
@@ -27,6 +28,7 @@ class ExecuteHadoopProvider(Provider):
     kinit__path_local = self.resource.kinit_path_local
     keytab = self.resource.keytab
     conf_dir = self.resource.conf_dir
+    bin_dir = self.resource.bin_dir
     command = self.resource.command
     principal = self.resource.principal
     
@@ -39,10 +41,15 @@ class ExecuteHadoopProvider(Provider):
           path = ['/bin'],
           user = self.resource.user
         )
-    
+
+      path = os.environ['PATH']
+      if bin_dir is not None:
+        path += os.pathsep + bin_dir
+
       Execute (format("hadoop --config {conf_dir} {command}"),
         user        = self.resource.user,
         tries       = self.resource.tries,
         try_sleep   = self.resource.try_sleep,
         logoutput   = self.resource.logoutput,
+        environment = {'PATH' : path}
       )

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-common/src/main/python/resource_management/libraries/providers/hdfs_directory.py
----------------------------------------------------------------------
diff --git 
a/ambari-common/src/main/python/resource_management/libraries/providers/hdfs_directory.py
 
b/ambari-common/src/main/python/resource_management/libraries/providers/hdfs_directory.py
index 6a40b6d..33cc1be 100644
--- 
a/ambari-common/src/main/python/resource_management/libraries/providers/hdfs_directory.py
+++ 
b/ambari-common/src/main/python/resource_management/libraries/providers/hdfs_directory.py
@@ -19,6 +19,7 @@ limitations under the License.
 Ambari Agent
 
 """
+import os
 
 from resource_management import *
 directories_list = [] #direcotries list for mkdir
@@ -68,6 +69,7 @@ class HdfsDirectoryProvider(Provider):
     secured = self.resource.security_enabled
     keytab_file = self.resource.keytab
     kinit_path = self.resource.kinit_path_local
+    bin_dir = self.resource.bin_dir
 
     chmod_commands = []
     chown_commands = []
@@ -76,7 +78,7 @@ class HdfsDirectoryProvider(Provider):
       mode = chmod_key[0]
       recursive = chmod_key[1]
       chmod_dirs_str = ' '.join(chmod_dirs)
-      chmod_commands.append(format("hadoop fs -chmod {recursive} {mode} 
{chmod_dirs_str}"))
+      chmod_commands.append(format("hadoop --config {hdp_conf_dir} fs -chmod 
{recursive} {mode} {chmod_dirs_str}"))
 
     for chown_key, chown_dirs in chown_map.items():
       owner = chown_key[0]
@@ -87,7 +89,7 @@ class HdfsDirectoryProvider(Provider):
         chown = owner
         if group:
           chown = format("{owner}:{group}")
-        chown_commands.append(format("hadoop fs -chown {recursive} {chown} 
{chown_dirs_str}"))
+        chown_commands.append(format("hadoop --config {hdp_conf_dir} fs -chown 
{recursive} {chown} {chown_dirs_str}"))
 
     if secured:
         Execute(format("{kinit_path} -kt {keytab_file} {hdfs_principal_name}"),
@@ -97,11 +99,17 @@ class HdfsDirectoryProvider(Provider):
     #for hadoop 2 we need to specify -p to create directories recursively
     parent_flag = '`rpm -q hadoop | grep -q "hadoop-1" || echo "-p"`'
 
-    Execute(format('hadoop fs -mkdir {parent_flag} {dir_list_str} && 
{chmod_cmd} && {chown_cmd}',
+    path = os.environ['PATH']
+    if bin_dir is not None:
+      path += os.pathsep + bin_dir
+
+    Execute(format('hadoop --config {hdp_conf_dir} fs -mkdir {parent_flag} 
{dir_list_str} && {chmod_cmd} && {chown_cmd}',
                    chmod_cmd=' && '.join(chmod_commands),
                    chown_cmd=' && '.join(chown_commands)),
             user=hdp_hdfs_user,
-            not_if=format("su - {hdp_hdfs_user} -c 'hadoop fs -ls 
{dir_list_str}'")
+            environment = {'PATH' : path},
+            not_if=format("su - {hdp_hdfs_user} -c 'export 
PATH=$PATH:{bin_dir} ; "
+                          "hadoop --config {hdp_conf_dir} fs -ls 
{dir_list_str}'")
     )
 
     directories_list[:] = []

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-common/src/main/python/resource_management/libraries/resources/execute_hadoop.py
----------------------------------------------------------------------
diff --git 
a/ambari-common/src/main/python/resource_management/libraries/resources/execute_hadoop.py
 
b/ambari-common/src/main/python/resource_management/libraries/resources/execute_hadoop.py
index 94daf5b..149548d 100644
--- 
a/ambari-common/src/main/python/resource_management/libraries/resources/execute_hadoop.py
+++ 
b/ambari-common/src/main/python/resource_management/libraries/resources/execute_hadoop.py
@@ -32,6 +32,7 @@ class ExecuteHadoop(Resource):
   user = ResourceArgument()
   logoutput = BooleanArgument(default=False)
   principal = ResourceArgument(default=lambda obj: obj.user)
+  bin_dir = ResourceArgument() # appended to $PATH
   
   conf_dir = ResourceArgument()
   

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-common/src/main/python/resource_management/libraries/resources/hdfs_directory.py
----------------------------------------------------------------------
diff --git 
a/ambari-common/src/main/python/resource_management/libraries/resources/hdfs_directory.py
 
b/ambari-common/src/main/python/resource_management/libraries/resources/hdfs_directory.py
index 63d9cc2..7888cd8 100644
--- 
a/ambari-common/src/main/python/resource_management/libraries/resources/hdfs_directory.py
+++ 
b/ambari-common/src/main/python/resource_management/libraries/resources/hdfs_directory.py
@@ -38,6 +38,7 @@ class HdfsDirectory(Resource):
   keytab = ResourceArgument()
   kinit_path_local = ResourceArgument()
   hdfs_user = ResourceArgument()
+  bin_dir = ResourceArgument(default="")
 
   #action 'create' immediately creates all pending directory in efficient 
manner
   #action 'create_delayed' add directory to list of pending directories

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py
index d537199..389d6ab 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py
@@ -19,17 +19,29 @@ limitations under the License.
 
 from resource_management import *
 from resource_management.core.system import System
-import os
 
 config = Script.get_config()
 
+#RPM versioning support
+rpm_version = default("/configurations/hadoop-env/rpm_version", None)
+
+#hadoop params
+if rpm_version is not None:
+  hadoop_conf_dir = format("/usr/hdp/{rpm_version}/etc/hadoop/conf")
+  hadoop_conf_empty_dir = 
format("/usr/hdp/{rpm_version}/etc/hadoop/conf.empty")
+  mapreduce_libs_path = format("/usr/hdp/{rpm_version}/hadoop-mapreduce/*")
+  hadoop_libexec_dir = format("/usr/hdp/{rpm_version}/hadoop/libexec")
+else:
+  hadoop_conf_dir = "/etc/hadoop/conf"
+  hadoop_conf_empty_dir = "/etc/hadoop/conf.empty"
+  mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
+  hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
+
 #security params
 security_enabled = config['configurations']['cluster-env']['security_enabled']
 #java params
 java_home = config['hostLevelParams']['java_home']
 #hadoop params
-hadoop_conf_dir = "/etc/hadoop/conf"
-hadoop_conf_empty_dir = "/etc/hadoop/conf.empty"
 hdfs_log_dir_prefix = 
config['configurations']['hadoop-env']['hdfs_log_dir_prefix']
 hadoop_pid_dir_prefix = 
config['configurations']['hadoop-env']['hadoop_pid_dir_prefix']
 hadoop_root_logger = 
config['configurations']['hadoop-env']['hadoop_root_logger']
@@ -56,8 +68,6 @@ ttnode_heapsize = "1024m"
 
 dtnode_heapsize = config['configurations']['hadoop-env']['dtnode_heapsize']
 mapred_pid_dir_prefix = 
default("/configurations/mapred-env/mapred_pid_dir_prefix","/var/run/hadoop-mapreduce")
-mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
-hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
 mapred_log_dir_prefix = 
default("/configurations/mapred-env/mapred_log_dir_prefix","/var/log/hadoop-mapreduce")
 
 #users and groups

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/params.py
index 01789a7..5700e28 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/params.py
@@ -19,7 +19,6 @@ limitations under the License.
 
 from resource_management import *
 from resource_management.core.system import System
-import os
 import json
 import collections
 
@@ -38,6 +37,8 @@ user_group = 
config['configurations']['cluster-env']['user_group']
 proxyuser_group = default("/configurations/hadoop-env/proxyuser_group","users")
 nagios_group = config['configurations']['nagios-env']['nagios_group']
 
+hdfs_log_dir_prefix = 
config['configurations']['hadoop-env']['hdfs_log_dir_prefix']
+
 #hosts
 hostname = config["hostname"]
 ambari_server_hostname = config['clusterHostInfo']['ambari_server_host'][0]

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/files/checkForFormat.sh
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/files/checkForFormat.sh
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/files/checkForFormat.sh
index f92f613..9036ab2 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/files/checkForFormat.sh
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/files/checkForFormat.sh
@@ -24,6 +24,8 @@ export hdfs_user=$1
 shift
 export conf_dir=$1
 shift
+export bin_dir=$1
+shift
 export mark_dir=$1
 shift
 export name_dirs=$*
@@ -50,6 +52,7 @@ if [[ ! -d $mark_dir ]] ; then
   done
 
   if [[ $EXIT_CODE == 0 ]] ; then
+    export PATH=$PATH:$bin_dir
     su - ${hdfs_user} -c "yes Y | hadoop --config ${conf_dir} ${command}"
   else
     echo "ERROR: Namenode directory(s) is non empty. Will not format the 
namenode. List of non-empty namenode dirs ${list_of_non_empty_dirs}"

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
index fc525a6..8fb2d90 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
@@ -23,6 +23,25 @@ import os
 
 config = Script.get_config()
 
+#RPM versioning support
+rpm_version = default("/configurations/hadoop-env/rpm_version", None)
+
+#hadoop params
+if rpm_version is not None:
+  hadoop_conf_dir = format("/usr/hdp/{rpm_version}/etc/hadoop/conf")
+  mapreduce_libs_path = format("/usr/hdp/{rpm_version}/hadoop-mapreduce/*")
+  hadoop_libexec_dir = format("/usr/hdp/{rpm_version}/hadoop/libexec")
+  hadoop_lib_home = format("/usr/hdp/{rpm_version}/hadoop/lib")
+  hadoop_bin = format("/usr/hdp/{rpm_version}/hadoop/sbin")
+  hadoop_home = format('/usr/hdp/{rpm_version}/hadoop')
+else:
+  hadoop_conf_dir = "/etc/hadoop/conf"
+  mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
+  hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
+  hadoop_lib_home = "/usr/lib/hadoop/lib"
+  hadoop_bin = "/usr/lib/hadoop/sbin"
+  hadoop_home = '/usr'
+
 #security params
 security_enabled = config['configurations']['cluster-env']['security_enabled']
 
@@ -72,11 +91,7 @@ if has_ganglia_server:
 
 if has_namenode:
   hadoop_tmp_dir = format("/tmp/hadoop-{hdfs_user}")
-hadoop_lib_home = "/usr/lib/hadoop/lib"
-hadoop_conf_dir = "/etc/hadoop/conf"
 hadoop_pid_dir_prefix = 
config['configurations']['hadoop-env']['hadoop_pid_dir_prefix']
-hadoop_home = "/usr"
-hadoop_bin = "/usr/lib/hadoop/sbin"
 
 task_log4j_properties_location = os.path.join(hadoop_conf_dir, 
"task-log4j.properties")
 
@@ -127,8 +142,6 @@ ttnode_heapsize = "1024m"
 
 dtnode_heapsize = config['configurations']['hadoop-env']['dtnode_heapsize']
 mapred_pid_dir_prefix = 
default("/configurations/mapred-env/mapred_pid_dir_prefix","/var/run/hadoop-mapreduce")
-mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
-hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
 mapred_log_dir_prefix = 
default("/configurations/mapred-env/mapred_log_dir_prefix","/var/log/hadoop-mapreduce")
 
 #log4j.properties

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/package/scripts/flume.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/package/scripts/flume.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/package/scripts/flume.py
index 6109d3e..1404d27 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/package/scripts/flume.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/package/scripts/flume.py
@@ -63,7 +63,7 @@ def flume(action = None):
       _set_desired_state('STARTED')
       
     flume_base = format('su -s /bin/bash {flume_user} -c "export 
JAVA_HOME={java_home}; '
-      '/usr/bin/flume-ng agent --name {{0}} --conf {{1}} --conf-file {{2}} 
{{3}}"')
+      '{flume_bin} agent --name {{0}} --conf {{1}} --conf-file {{2}} {{3}}"')
 
     for agent in cmd_target_names():
       flume_agent_conf_dir = params.flume_conf_dir + os.sep + agent

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/package/scripts/flume_check.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/package/scripts/flume_check.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/package/scripts/flume_check.py
index 3036e20..b93b8e8 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/package/scripts/flume_check.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/package/scripts/flume_check.py
@@ -31,7 +31,7 @@ class FlumeServiceCheck(Script):
       Execute(format("{kinit_path_local} -kt {http_keytab} 
{principal_replaced}"),
               user=params.smoke_user)
 
-    Execute(format('env JAVA_HOME={java_home} /usr/bin/flume-ng version'),
+    Execute(format('env JAVA_HOME={java_home} {flume_bin} version'),
             logoutput=True,
             tries = 3,
             try_sleep = 20)

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/package/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/package/scripts/params.py
index 128eed4..c1f8804 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/package/scripts/params.py
@@ -26,9 +26,19 @@ proxyuser_group =  
config['configurations']['hadoop-env']['proxyuser_group']
 
 security_enabled = False
 
-java_home = config['hostLevelParams']['java_home']
+#RPM versioning support
+rpm_version = default("/configurations/hadoop-env/rpm_version", None)
+
+#hadoop params
+if rpm_version is not None:
+  flume_conf_dir = format('/usr/hdp/{rpm_version}/etc/flume/conf')
+  flume_bin = format('/usr/hdp/{rpm_version}/flume/bin/flume-ng')
 
-flume_conf_dir = '/etc/flume/conf'
+else:
+  flume_conf_dir = '/etc/flume/conf'
+  flume_bin = '/usr/bin/flume-ng'
+
+java_home = config['hostLevelParams']['java_home']
 flume_log_dir = '/var/log/flume'
 flume_run_dir = '/var/run/flume'
 flume_user = 'flume'

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/files/hbaseSmokeVerify.sh
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/files/hbaseSmokeVerify.sh
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/files/hbaseSmokeVerify.sh
index eedffd3..5c320c0 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/files/hbaseSmokeVerify.sh
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/files/hbaseSmokeVerify.sh
@@ -21,7 +21,8 @@
 #
 conf_dir=$1
 data=$2
-echo "scan 'ambarismoketest'" | hbase --config $conf_dir shell > 
/tmp/hbase_chk_verify
+hbase_cmd=$3
+echo "scan 'ambarismoketest'" | $hbase_cmd --config $conf_dir shell > 
/tmp/hbase_chk_verify
 cat /tmp/hbase_chk_verify
 echo "Looking for $data"
 grep -q $data /tmp/hbase_chk_verify

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/params.py
index 364649c..d07ebd1 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/params.py
@@ -26,11 +26,32 @@ import status_params
 config = Script.get_config()
 exec_tmp_dir = Script.get_tmp_dir()
 
-hbase_conf_dir = "/etc/hbase/conf"
-daemon_script = "/usr/lib/hbase/bin/hbase-daemon.sh"
-region_mover = "/usr/lib/hbase/bin/region_mover.rb"
-region_drainer = "/usr/lib/hbase/bin/draining_servers.rb"
-hbase_cmd = "/usr/lib/hbase/bin/hbase"
+#RPM versioning support
+rpm_version = default("/configurations/hadoop-env/rpm_version", None)
+
+#hadoop params
+if rpm_version is not None:
+#RPM versioning support
+  rpm_version = default("/configurations/hadoop-env/rpm_version", None)
+
+#hadoop params
+if rpm_version is not None:
+  hadoop_conf_dir = format("/usr/hdp/{rpm_version}/etc/hadoop/conf")
+  hadoop_bin_dir = format("/usr/hdp/{rpm_version}/hadoop/bin")
+  hbase_conf_dir = format('/usr/hdp/{rpm_version}/etc/hbase/conf')
+  daemon_script = format('/usr/hdp/{rpm_version}/hbase/bin/hbase-daemon.sh')
+  region_mover = format('/usr/hdp/{rpm_version}/hbase/bin/region_mover.rb')
+  region_drainer = 
format('/usr/hdp/{rpm_version}hbase/bin/draining_servers.rb')
+  hbase_cmd = format('/usr/hdp/{rpm_version}/hbase/bin/hbase')
+else:
+  hadoop_conf_dir = "/etc/hadoop/conf"
+  hadoop_bin_dir = "/usr/bin"
+  hbase_conf_dir = "/etc/hbase/conf"
+  daemon_script = "/usr/lib/hbase/bin/hbase-daemon.sh"
+  region_mover = "/usr/lib/hbase/bin/region_mover.rb"
+  region_drainer = "/usr/lib/hbase/bin/draining_servers.rb"
+  hbase_cmd = "/usr/lib/hbase/bin/hbase"
+
 hbase_excluded_hosts = config['commandParams']['excluded_hosts']
 hbase_drain_only = config['commandParams']['mark_draining_only']
 hbase_included_hosts = config['commandParams']['included_hosts']
@@ -72,7 +93,7 @@ if 'slave_hosts' in config['clusterHostInfo']:
   rs_hosts = default('/clusterHostInfo/hbase_rs_hosts', 
'/clusterHostInfo/slave_hosts') #if hbase_rs_hosts not given it is assumed that 
region servers on same nodes as slaves
 else:
   rs_hosts = default('/clusterHostInfo/hbase_rs_hosts', 
'/clusterHostInfo/all_hosts') 
-  
+
 smoke_test_user = config['configurations']['cluster-env']['smokeuser']
 smokeuser_permissions = "RWXCA"
 service_check_data = functions.get_unique_id_and_date()
@@ -105,7 +126,6 @@ hbase_hdfs_root_dir = 
config['configurations']['hbase-site']['hbase.rootdir']
 hbase_staging_dir = "/apps/hbase/staging"
 #for create_hdfs_directory
 hostname = config["hostname"]
-hadoop_conf_dir = "/etc/hadoop/conf"
 hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 hdfs_principal_name = 
config['configurations']['hadoop-env']['hdfs_principal_name']
@@ -119,5 +139,6 @@ HdfsDirectory = functools.partial(
   hdfs_user=hdfs_user,
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
-  kinit_path_local = kinit_path_local
+  kinit_path_local = kinit_path_local,
+  bin_dir = hadoop_bin_dir
 )

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/service_check.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/service_check.py
index 8fb38f7..15a306b 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/service_check.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/service_check.py
@@ -44,7 +44,7 @@ class HbaseServiceCheck(Script):
     
     if params.security_enabled:    
       hbase_grant_premissions_file = 
format("{exec_tmp_dir}/hbase_grant_permissions.sh")
-      grantprivelegecmd = format("{kinit_cmd} hbase shell 
{hbase_grant_premissions_file}")
+      grantprivelegecmd = format("{kinit_cmd} {hbase_cmd} shell 
{hbase_grant_premissions_file}")
   
       File( hbase_grant_premissions_file,
         owner   = params.hbase_user,
@@ -57,8 +57,8 @@ class HbaseServiceCheck(Script):
         user = params.hbase_user,
       )
 
-    servicecheckcmd = format("{smokeuser_kinit_cmd} hbase --config 
{hbase_conf_dir} shell {hbase_servicecheck_file}")
-    smokeverifycmd = format("{smokeuser_kinit_cmd} 
{exec_tmp_dir}/hbaseSmokeVerify.sh {hbase_conf_dir} {service_check_data}")
+    servicecheckcmd = format("{smokeuser_kinit_cmd} {hbase_cmd} --config 
{hbase_conf_dir} shell {hbase_servicecheck_file}")
+    smokeverifycmd = format("{smokeuser_kinit_cmd} 
{exec_tmp_dir}/hbaseSmokeVerify.sh {hbase_conf_dir} {service_check_data} 
{hbase_cmd}")
   
     Execute( servicecheckcmd,
       tries     = 3,

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/files/checkForFormat.sh
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/files/checkForFormat.sh
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/files/checkForFormat.sh
index d22d901..c9a3828 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/files/checkForFormat.sh
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/files/checkForFormat.sh
@@ -24,6 +24,8 @@ export hdfs_user=$1
 shift
 export conf_dir=$1
 shift
+export bin_dir=$1
+shift
 export old_mark_dir=$1
 shift
 export mark_dir=$1
@@ -56,7 +58,7 @@ if [[ ! -d $mark_dir ]] ; then
   done
 
   if [[ $EXIT_CODE == 0 ]] ; then
-    su - ${hdfs_user} -c "yes Y | hadoop --config ${conf_dir} ${command}"
+    su - ${hdfs_user} -c "export PATH=$PATH:${bin_dir} ; yes Y | hadoop 
--config ${conf_dir} ${command}"
   else
     echo "ERROR: Namenode directory(s) is non empty. Will not format the 
namenode. List of non-empty namenode dirs ${list_of_non_empty_dirs}"
   fi

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs_namenode.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs_namenode.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs_namenode.py
index c4b48c6..68cf4fd 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs_namenode.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs_namenode.py
@@ -45,11 +45,11 @@ def namenode(action=None, do_format=True):
       create_log_dir=True
     )
     if params.dfs_ha_enabled:
-      dfs_check_nn_status_cmd = format("su - {hdfs_user} -c 'hdfs haadmin 
-getServiceState {namenode_id} | grep active > /dev/null'")
+      dfs_check_nn_status_cmd = format("su - {hdfs_user} -c 'export 
PATH=$PATH:{hadoop_bin_dir} ; hdfs --config {hadoop_conf_dir} haadmin 
-getServiceState {namenode_id} | grep active > /dev/null'")
     else:
       dfs_check_nn_status_cmd = None
 
-    namenode_safe_mode_off = format("su - {hdfs_user} -c 'hadoop dfsadmin 
-safemode get' | grep 'Safe mode is OFF'")
+    namenode_safe_mode_off = format("su - {hdfs_user} -c 'export 
PATH=$PATH:{hadoop_bin_dir} ; hadoop --config {hadoop_conf_dir} dfsadmin 
-safemode get' | grep 'Safe mode is OFF'")
 
     if params.security_enabled:
       Execute(format("{kinit_path_local} -kt {hdfs_user_keytab} 
{hdfs_principal_name}"),
@@ -110,14 +110,16 @@ def format_namenode(force=None):
   if not params.dfs_ha_enabled:
     if force:
       ExecuteHadoop('namenode -format',
-                    kinit_override=True)
+                    kinit_override=True,
+                    bin_dir=params.hadoop_bin_dir,
+                    conf_dir=hadoop_conf_dir)
     else:
       File(format("{tmp_dir}/checkForFormat.sh"),
            content=StaticFile("checkForFormat.sh"),
            mode=0755)
       Execute(format(
-        "{tmp_dir}/checkForFormat.sh {hdfs_user} {hadoop_conf_dir} 
{old_mark_dir} "
-        "{mark_dir} {dfs_name_dir}"),
+        "{tmp_dir}/checkForFormat.sh {hdfs_user} {hadoop_conf_dir} "
+        "{hadoop_bin_dir} {old_mark_dir} {mark_dir} {dfs_name_dir}"),
               not_if=format("test -d {old_mark_dir} || test -d {mark_dir}"),
               path="/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin"
       )
@@ -154,4 +156,5 @@ def decommission():
   ExecuteHadoop(nn_refresh_cmd,
                 user=hdfs_user,
                 conf_dir=conf_dir,
-                kinit_override=True)
+                kinit_override=True,
+                bin_dir=params.hadoop_bin_dir)

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/namenode.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/namenode.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/namenode.py
index 8dae3eb..a0b07aa 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/namenode.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/namenode.py
@@ -88,7 +88,7 @@ class NameNode(Script):
     
     
     def startRebalancingProcess(threshold):
-      rebalanceCommand = format('hadoop --config {hadoop_conf_dir} balancer 
-threshold {threshold}')
+      rebalanceCommand = format('export PATH=$PATH:{hadoop_bin_dir} ; hadoop 
--config {hadoop_conf_dir} balancer -threshold {threshold}')
       return ['su','-',params.hdfs_user,'-c', rebalanceCommand]
     
     command = startRebalancingProcess(threshold)

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
index 47ee8ca..60198c7 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
@@ -24,6 +24,28 @@ import os
 config = Script.get_config()
 tmp_dir = Script.get_tmp_dir()
 
+#RPM versioning support
+rpm_version = default("/configurations/hadoop-env/rpm_version", None)
+
+#hadoop params
+if rpm_version is not None:
+  hadoop_conf_dir = format("/usr/hdp/{rpm_version}/etc/hadoop/conf")
+  hadoop_conf_empty_dir = 
format("/usr/hdp/{rpm_version}/etc/hadoop/conf.empty")
+  mapreduce_libs_path = format("/usr/hdp/{rpm_version}/hadoop-mapreduce/*")
+  hadoop_libexec_dir = format("/usr/hdp/{rpm_version}/hadoop/libexec")
+  hadoop_bin = format("/usr/hdp/{rpm_version}/hadoop/sbin")
+  hadoop_bin_dir = format("/usr/hdp/{rpm_version}/hadoop/bin")
+  limits_conf_dir = format("/usr/hdp/{rpm_version}/etc/security/limits.d")
+else:
+  hadoop_conf_dir = "/etc/hadoop/conf"
+  hadoop_conf_empty_dir = "/etc/hadoop/conf.empty"
+  mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
+  hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
+  hadoop_bin = "/usr/lib/hadoop/sbin"
+  hadoop_bin_dir = "/usr/bin"
+  limits_conf_dir = "/etc/security/limits.d"
+
+execute_path = os.environ['PATH'] + os.pathsep + hadoop_bin_dir
 ulimit_cmd = "ulimit -c unlimited; "
 
 #security params
@@ -100,9 +122,7 @@ proxyuser_group =  
config['configurations']['hadoop-env']['proxyuser_group']
 nagios_group = config['configurations']['nagios-env']['nagios_group']
 
 #hadoop params
-hadoop_conf_dir = "/etc/hadoop/conf"
 hadoop_pid_dir_prefix = status_params.hadoop_pid_dir_prefix
-hadoop_bin = "/usr/lib/hadoop/sbin"
 
 hdfs_log_dir_prefix = 
config['configurations']['hadoop-env']['hdfs_log_dir_prefix']
 hadoop_root_logger = 
config['configurations']['hadoop-env']['hadoop_root_logger']
@@ -110,8 +130,6 @@ hadoop_root_logger = 
config['configurations']['hadoop-env']['hadoop_root_logger'
 dfs_domain_socket_path = 
config['configurations']['hdfs-site']['dfs.domain.socket.path']
 dfs_domain_socket_dir = os.path.dirname(dfs_domain_socket_path)
 
-hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
-
 jn_edits_dir = 
config['configurations']['hdfs-site']['dfs.journalnode.edits.dir']
 
 dfs_name_dir = config['configurations']['hdfs-site']['dfs.namenode.name.dir']
@@ -171,11 +189,10 @@ HdfsDirectory = functools.partial(
   hdfs_user=hdfs_user,
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
-  kinit_path_local = kinit_path_local
+  kinit_path_local = kinit_path_local,
+  bin_dir = hadoop_bin_dir
 )
 
-limits_conf_dir = "/etc/security/limits.d"
-
 io_compression_codecs = 
config['configurations']['core-site']['io.compression.codecs']
 if not "com.hadoop.compression.lzo" in io_compression_codecs:
   exclude_packages = ["lzo", "hadoop-lzo", "hadoop-lzo-native", "liblzo2-2"]
@@ -184,8 +201,6 @@ else:
 name_node_params = default("/commandParams/namenode", None)
 
 #hadoop params
-hadoop_conf_empty_dir = "/etc/hadoop/conf.empty"
-
 hadoop_env_sh_template = config['configurations']['hadoop-env']['content']
 
 #hadoop-env.sh
@@ -209,5 +224,4 @@ ttnode_heapsize = "1024m"
 
 dtnode_heapsize = config['configurations']['hadoop-env']['dtnode_heapsize']
 mapred_pid_dir_prefix = 
default("/configurations/mapred-env/mapred_pid_dir_prefix","/var/run/hadoop-mapreduce")
-mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
 mapred_log_dir_prefix = 
default("/configurations/mapred-env/mapred_log_dir_prefix","/var/log/hadoop-mapreduce")

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/service_check.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/service_check.py
index 66f2ae1..18f58bd 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/service_check.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/service_check.py
@@ -31,13 +31,14 @@ class HdfsServiceCheck(Script):
 
     safemode_command = "dfsadmin -safemode get | grep OFF"
 
-    create_dir_cmd = format("fs -mkdir {dir} ; hadoop fs -chmod 777 {dir}")
-    test_dir_exists = format("hadoop fs -test -e {dir}")
+    create_dir_cmd = format("fs -mkdir {dir}")
+    chmod_command = format("fs -chmod 777 {dir}")
+    test_dir_exists = format("hadoop --config {hadoop_conf_dir} fs -test -e 
{dir}")
     cleanup_cmd = format("fs -rm {tmp_file}")
     #cleanup put below to handle retries; if retrying there wil be a stale file
     #that needs cleanup; exit code is fn of second command
     create_file_cmd = format(
-      "{cleanup_cmd}; hadoop fs -put /etc/passwd {tmp_file}")
+      "{cleanup_cmd}; hadoop --config {hadoop_conf_dir} fs -put /etc/passwd 
{tmp_file}")
     test_cmd = format("fs -test -e {tmp_file}")
     if params.security_enabled:
       Execute(format(
@@ -48,7 +49,8 @@ class HdfsServiceCheck(Script):
                   logoutput=True,
                   conf_dir=params.hadoop_conf_dir,
                   try_sleep=3,
-                  tries=20
+                  tries=20,
+                  bin_dir=params.hadoop_bin_dir
     )
     ExecuteHadoop(create_dir_cmd,
                   user=params.smoke_user,
@@ -56,21 +58,32 @@ class HdfsServiceCheck(Script):
                   not_if=test_dir_exists,
                   conf_dir=params.hadoop_conf_dir,
                   try_sleep=3,
-                  tries=5
+                  tries=5,
+                  bin_dir=params.hadoop_bin_dir
+    )
+    ExecuteHadoop(chmod_command,
+                  user=params.smoke_user,
+                  logoutput=True,
+                  conf_dir=params.hadoop_conf_dir,
+                  try_sleep=3,
+                  tries=5,
+                  bin_dir=params.hadoop_bin_dir
     )
     ExecuteHadoop(create_file_cmd,
                   user=params.smoke_user,
                   logoutput=True,
                   conf_dir=params.hadoop_conf_dir,
                   try_sleep=3,
-                  tries=5
+                  tries=5,
+                  bin_dir=params.hadoop_bin_dir
     )
     ExecuteHadoop(test_cmd,
                   user=params.smoke_user,
                   logoutput=True,
                   conf_dir=params.hadoop_conf_dir,
                   try_sleep=3,
-                  tries=5
+                  tries=5,
+                  bin_dir=params.hadoop_bin_dir
     )
     if params.has_journalnode_hosts:
       journalnode_port = params.journalnode_port

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hcat.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hcat.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hcat.py
index 53a62ce..eb05481 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hcat.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hcat.py
@@ -25,6 +25,12 @@ import sys
 def hcat():
   import params
 
+  Directory(params.hive_conf_dir,
+            owner=params.hcat_user,
+            group=params.user_group,
+  )
+
+
   Directory(params.hcat_conf_dir,
             owner=params.hcat_user,
             group=params.user_group,

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hcat_service_check.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hcat_service_check.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hcat_service_check.py
index ec8faa9..ede7e27 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hcat_service_check.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hcat_service_check.py
@@ -45,6 +45,7 @@ def hcat_service_check():
             user=params.smokeuser,
             try_sleep=5,
             path=['/usr/sbin', '/usr/local/nin', '/bin', '/usr/bin'],
+            environment = {'PATH' : params.execute_path},
             logoutput=True)
 
     if params.security_enabled:
@@ -55,7 +56,8 @@ def hcat_service_check():
                     security_enabled=params.security_enabled,
                     kinit_path_local=params.kinit_path_local,
                     keytab=params.hdfs_user_keytab,
-                    principal=params.hdfs_principal_name
+                    principal=params.hdfs_principal_name,
+                    bin_dir=params.hive_bin
       )
     else:
       ExecuteHadoop(test_cmd,
@@ -64,7 +66,8 @@ def hcat_service_check():
                     conf_dir=params.hadoop_conf_dir,
                     security_enabled=params.security_enabled,
                     kinit_path_local=params.kinit_path_local,
-                    keytab=params.hdfs_user_keytab
+                    keytab=params.hdfs_user_keytab,
+                    bin_dir=params.hive_bin
       )
 
     cleanup_cmd = format("{kinit_cmd} {tmp_dir}/hcatSmoke.sh hcatsmoke{unique} 
cleanup")
@@ -72,6 +75,7 @@ def hcat_service_check():
     Execute(cleanup_cmd,
             tries=3,
             user=params.smokeuser,
+            environment = {'PATH' : params.execute_path },
             try_sleep=5,
             path=['/usr/sbin', '/usr/local/nin', '/bin', '/usr/bin'],
             logoutput=True

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hive.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hive.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hive.py
index 0b7fcb4..e6e5eb8 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hive.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hive.py
@@ -188,6 +188,7 @@ def jdbc_connector():
     Execute(cmd,
             not_if=format("test -f {target}"),
             creates=params.target,
+            environment= {'PATH' : params.execute_path },
             path=["/bin", "/usr/bin/"])
   elif params.hive_jdbc_driver == "org.postgresql.Driver":
     cmd = format("hive mkdir -p {artifact_dir} ; cp 
/usr/share/java/{jdbc_jar_name} {target}")
@@ -195,6 +196,7 @@ def jdbc_connector():
     Execute(cmd,
             not_if=format("test -f {target}"),
             creates=params.target,
+            environment= {'PATH' : params.execute_path },
             path=["/bin", "usr/bin/"])
 
   elif params.hive_jdbc_driver == "oracle.jdbc.driver.OracleDriver":

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hive_service.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hive_service.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hive_service.py
index 8507816..d88d0b0 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hive_service.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hive_service.py
@@ -49,6 +49,7 @@ def hive_service(
     
     Execute(demon_cmd,
             user=params.hive_user,
+            environment= {'PATH' : params.execute_path, 'HADOOP_HOME' : 
params.hadoop_home },
             not_if=process_id_exists
     )
 
@@ -103,8 +104,10 @@ def hive_service(
 def check_fs_root():
   import params  
   fs_root_url = format("{fs_root}{hive_apps_whs_dir}")
-  cmd = "/usr/lib/hive/bin/metatool -listFSRoot 2>/dev/null | grep hdfs://"
+  cmd = format("metatool -listFSRoot 2>/dev/null | grep hdfs://")
   code, out = call(cmd, user=params.hive_user)
   if code == 0 and fs_root_url.strip() != out.strip():
-    cmd = format("/usr/lib/hive/bin/metatool -updateLocation 
{fs_root}{hive_apps_whs_dir} {out}")
-    Execute(cmd, user=params.hive_user)
\ No newline at end of file
+    cmd = format("metatool -updateLocation {fs_root}{hive_apps_whs_dir} {out}")
+    Execute(cmd,
+            environment= {'PATH' : params.execute_path },
+            user=params.hive_user)
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/install_jars.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/install_jars.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/install_jars.py
index b6d542d..3548de7 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/install_jars.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/install_jars.py
@@ -69,7 +69,8 @@ def install_tez_jars():
                     owner=params.tez_user,
                     dest_dir=app_dir_path,
                     kinnit_if_needed=kinit_if_needed,
-                    hdfs_user=params.hdfs_user
+                    hdfs_user=params.hdfs_user,
+                    hadoop_conf_dir=params.hadoop_conf_dir
       )
     pass
 
@@ -79,7 +80,8 @@ def install_tez_jars():
                     owner=params.tez_user,
                     dest_dir=lib_dir_path,
                     kinnit_if_needed=kinit_if_needed,
-                    hdfs_user=params.hdfs_user
+                    hdfs_user=params.hdfs_user,
+                    hadoop_conf_dir=params.hadoop_conf_dir
       )
     pass
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/params.py
index b1a4a49..a38c12a 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/params.py
@@ -26,6 +26,53 @@ import os
 config = Script.get_config()
 tmp_dir = Script.get_tmp_dir()
 
+#RPM versioning support
+rpm_version = default("/configurations/hadoop-env/rpm_version", None)
+
+hdp_stack_version = config['hostLevelParams']['stack_version']
+
+#hadoop params
+if rpm_version is not None:
+  hadoop_conf_dir = format("/usr/hdp/{rpm_version}/etc/hadoop/conf")
+  hadoop_bin_dir = format("/usr/hdp/{rpm_version}/hadoop/bin")
+  hadoop_home = format('/usr/hdp/{rpm_version}/hadoop')
+  hive_conf_dir = format('/usr/hdp/{rpm_version}/etc/hive/conf')
+  hive_client_conf_dir = format('/usr/hdp/{rpm_version}/etc/hive/conf')
+  hive_server_conf_dir = format('/usr/hdp/{rpm_version}/etc/hive/conf.server')
+  hive_bin = format('/usr/hdp/{rpm_version}/hive/bin')
+  hive_lib = format('/usr/hdp/{rpm_version}/hive/lib')
+  tez_local_api_jars = format('/usr/hdp/{rpm_version}/tez/tez*.jar')
+  tez_local_lib_jars = format('/usr/hdp/{rpm_version}/tez/lib/*.jar')
+
+  if str(hdp_stack_version).startswith('2.0'):
+    hcat_conf_dir = format('/usr/hdp/{rpm_version}/etc/hcatalog/conf')
+    hcat_lib = format('/usr/hdp/{rpm_version}/hive/hcatalog/share/hcatalog')
+  # for newer versions
+  else:
+    hcat_conf_dir = format('/usr/hdp/{rpm_version}/etc/hive-hcatalog/conf')
+    hcat_lib = 
format('/usr/hdp/{rpm_version}/hive/hive-hcatalog/share/hcatalog')
+
+else:
+  hadoop_conf_dir = "/etc/hadoop/conf"
+  hadoop_bin_dir = "/usr/bin"
+  hadoop_home = '/usr'
+  hive_conf_dir = "/etc/hive/conf"
+  hive_bin = '/usr/lib/hive/bin'
+  hive_lib = '/usr/lib/hive/lib/'
+  hive_client_conf_dir = "/etc/hive/conf"
+  hive_server_conf_dir = '/etc/hive/conf.server'
+  tez_local_api_jars = '/usr/lib/tez/tez*.jar'
+  tez_local_lib_jars = '/usr/lib/tez/lib/*.jar'
+
+  if str(hdp_stack_version).startswith('2.0'):
+    hcat_conf_dir = '/etc/hcatalog/conf'
+    hcat_lib = '/usr/lib/hcatalog/share/hcatalog'
+  # for newer versions
+  else:
+    hcat_conf_dir = '/etc/hive-hcatalog/conf'
+    hcat_lib = '/usr/lib/hive-hcatalog/share/hcatalog'
+
+execute_path = os.environ['PATH'] + os.pathsep + hive_bin
 hive_metastore_user_name = 
config['configurations']['hive-site']['javax.jdo.option.ConnectionUserName']
 hive_jdbc_connection_url = 
config['configurations']['hive-site']['javax.jdo.option.ConnectionURL']
 
@@ -34,7 +81,6 @@ hive_metastore_db_type = 
config['configurations']['hive-env']['hive_database_typ
 
 #users
 hive_user = config['configurations']['hive-env']['hive_user']
-hive_lib = '/usr/lib/hive/lib/'
 #JDBC driver jar name
 hive_jdbc_driver = 
config['configurations']['hive-site']['javax.jdo.option.ConnectionDriverName']
 if hive_jdbc_driver == "com.mysql.jdbc.Driver":
@@ -51,11 +97,9 @@ check_db_connection_jar_name = "DBConnectionVerification.jar"
 check_db_connection_jar = 
format("/usr/lib/ambari-agent/{check_db_connection_jar_name}")
 
 #common
-hdp_stack_version = config['hostLevelParams']['stack_version']
 hive_metastore_port = 
get_port_from_url(config['configurations']['hive-site']['hive.metastore.uris']) 
#"9083"
 hive_var_lib = '/var/lib/hive'
 ambari_server_hostname = config['clusterHostInfo']['ambari_server_host'][0]
-hive_bin = '/usr/lib/hive/bin'
 hive_server_host = config['clusterHostInfo']['hive_server_host'][0]
 hive_server_port = 
default('/configurations/hive-site/hive.server2.thrift.port',"10000")
 hive_url = format("jdbc:hive2://{hive_server_host}:{hive_server_port}")
@@ -77,8 +121,6 @@ hive_log_dir = 
config['configurations']['hive-env']['hive_log_dir']
 hive_pid_dir = status_params.hive_pid_dir
 hive_pid = status_params.hive_pid
 #Default conf dir for client
-hive_client_conf_dir = "/etc/hive/conf"
-hive_server_conf_dir = "/etc/hive/conf.server"
 hive_conf_dirs_list = [hive_server_conf_dir, hive_client_conf_dir]
 
 if 'role' in config and config['role'] in ["HIVE_SERVER", "HIVE_METASTORE"]:
@@ -92,8 +134,6 @@ hive_database_name = 
config['configurations']['hive-env']['hive_database_name']
 #Starting hiveserver2
 start_hiveserver2_script = 'startHiveserver2.sh.j2'
 
-hadoop_home = '/usr'
-
 ##Starting metastore
 start_metastore_script = 'startMetastore.sh'
 hive_metastore_pid = status_params.hive_metastore_pid
@@ -133,15 +173,6 @@ else:
 
 ########## HCAT
 
-if str(hdp_stack_version).startswith('2.0'):
-  hcat_conf_dir = '/etc/hcatalog/conf'
-  hcat_lib = '/usr/lib/hcatalog/share/hcatalog'
-# for newer versions
-else:
-  hcat_conf_dir = '/etc/hive-hcatalog/conf'
-  hcat_lib = '/usr/lib/hive-hcatalog/share/hcatalog'
-
-
 hcat_dbroot = hcat_lib
 
 hcat_user = config['configurations']['hive-env']['hcat_user']
@@ -150,8 +181,6 @@ webhcat_user = 
config['configurations']['hive-env']['webhcat_user']
 hcat_pid_dir = status_params.hcat_pid_dir
 hcat_log_dir = config['configurations']['hive-env']['hcat_log_dir']
 
-hadoop_conf_dir = '/etc/hadoop/conf'
-
 #hive-log4j.properties.template
 if (('hive-log4j' in config['configurations']) and ('content' in 
config['configurations']['hive-log4j'])):
   log4j_props = config['configurations']['hive-log4j']['content']
@@ -172,7 +201,6 @@ hive_hdfs_user_mode = 0700
 hive_apps_whs_dir = 
config['configurations']['hive-site']["hive.metastore.warehouse.dir"]
 #for create_hdfs_directory
 hostname = config["hostname"]
-hadoop_conf_dir = "/etc/hadoop/conf"
 hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 hdfs_principal_name = 
config['configurations']['hadoop-env']['hdfs_principal_name']
@@ -180,8 +208,6 @@ kinit_path_local = functions.get_kinit_path(["/usr/bin", 
"/usr/kerberos/bin", "/
 
 # Tez libraries
 tez_lib_uris = default("/configurations/tez-site/tez.lib.uris", None)
-tez_local_api_jars = '/usr/lib/tez/tez*.jar'
-tez_local_lib_jars = '/usr/lib/tez/lib/*.jar'
 tez_user = config['configurations']['tez-env']['tez_user']
 
 if System.get_instance().os_family == "ubuntu":
@@ -205,13 +231,12 @@ else:
 import functools
 #create partial functions with common arguments for every HdfsDirectory call
 #to create hdfs directory we need to call params.HdfsDirectory in code
-#create partial functions with common arguments for every HdfsDirectory call
-#to create hdfs directory we need to call params.HdfsDirectory in code
 HdfsDirectory = functools.partial(
   HdfsDirectory,
   conf_dir=hadoop_conf_dir,
   hdfs_user=hdfs_user,
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
-  kinit_path_local = kinit_path_local
+  kinit_path_local = kinit_path_local,
+  bin_dir = hadoop_bin_dir
 )

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/templates/startHiveserver2.sh.j2
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/templates/startHiveserver2.sh.j2
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/templates/startHiveserver2.sh.j2
index a8fe21c..3ddf50f 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/templates/startHiveserver2.sh.j2
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/templates/startHiveserver2.sh.j2
@@ -25,5 +25,5 @@ HIVE_SERVER2_OPTS=" -hiveconf hive.log.file=hiveserver2.log 
-hiveconf hive.log.d
 HIVE_SERVER2_OPTS="${HIVE_SERVER2_OPTS} -hiveconf 
hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateUserAuthenticator
 -hiveconf 
hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory
 "
 {% endif %}
 
-HIVE_CONF_DIR=$4 /usr/lib/hive/bin/hiveserver2 -hiveconf hive.metastore.uris=" 
" ${HIVE_SERVER2_OPTS} > $1 2> $2 &
+HIVE_CONF_DIR=$4 {{hive_bin}}/hiveserver2 -hiveconf hive.metastore.uris=" " 
${HIVE_SERVER2_OPTS} > $1 2> $2 &
 echo $!|cat>$3

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/configuration/oozie-env.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/configuration/oozie-env.xml
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/configuration/oozie-env.xml
index fc47a70..9631f0d 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/configuration/oozie-env.xml
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/configuration/oozie-env.xml
@@ -122,7 +122,7 @@ export OOZIE_ADMIN_PORT={{oozie_server_admin_port}}
 # The base URL for callback URLs to Oozie
 #
 # export 
OOZIE_BASE_URL="http://${OOZIE_HTTP_HOSTNAME}:${OOZIE_HTTP_PORT}/oozie";
-export JAVA_LIBRARY_PATH=/usr/lib/hadoop/lib/native/Linux-amd64-64
+export JAVA_LIBRARY_PATH={{hadoop_lib_home}}/native/Linux-amd64-64
     </value>
   </property>
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/files/oozieSmoke2.sh
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/files/oozieSmoke2.sh
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/files/oozieSmoke2.sh
index 0a80d0f..6d43880 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/files/oozieSmoke2.sh
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/files/oozieSmoke2.sh
@@ -93,10 +93,10 @@ else
   kinitcmd=""
 fi
 
-su - ${smoke_test_user} -c "hdfs dfs -rm -r examples"
-su - ${smoke_test_user} -c "hdfs dfs -rm -r input-data"
-su - ${smoke_test_user} -c "hdfs dfs -copyFromLocal 
$OOZIE_EXAMPLES_DIR/examples examples"
-su - ${smoke_test_user} -c "hdfs dfs -copyFromLocal 
$OOZIE_EXAMPLES_DIR/examples/input-data input-data"
+su - ${smoke_test_user} -c "hdfs --config ${hadoop_conf_dir} dfs -rm -r 
examples"
+su - ${smoke_test_user} -c "hdfs --config ${hadoop_conf_dir} dfs -rm -r 
input-data"
+su - ${smoke_test_user} -c "hdfs --config ${hadoop_conf_dir} dfs 
-copyFromLocal $OOZIE_EXAMPLES_DIR/examples examples"
+su - ${smoke_test_user} -c "hdfs --config ${hadoop_conf_dir} dfs 
-copyFromLocal $OOZIE_EXAMPLES_DIR/examples/input-data input-data"
 
 cmd="${kinitcmd}source ${oozie_conf_dir}/oozie-env.sh ; /usr/bin/oozie 
-Doozie.auth.token.cache=false job -oozie $OOZIE_SERVER -config 
$OOZIE_EXAMPLES_DIR/examples/apps/map-reduce/job.properties  -run"
 echo $cmd

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/oozie_service.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/oozie_service.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/oozie_service.py
index 78661b0..bbbedbe 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/oozie_service.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/oozie_service.py
@@ -37,7 +37,7 @@ def oozie_service(action = 'start'): # 'start' or 'stop'
       db_connection_check_command = None
       
     cmd1 =  format("cd {oozie_tmp_dir} && /usr/lib/oozie/bin/ooziedb.sh create 
-sqlfile oozie.sql -run")
-    cmd2 =  format("{kinit_if_needed} {put_shared_lib_to_hdfs_cmd} ; hadoop 
dfs -chmod -R 755 {oozie_hdfs_user_dir}/share")
+    cmd2 =  format("{kinit_if_needed} {put_shared_lib_to_hdfs_cmd} ; hadoop 
--config {hadoop_conf_dir} dfs -chmod -R 755 {oozie_hdfs_user_dir}/share")
 
     if not os.path.isfile(params.jdbc_driver_jar) and params.jdbc_driver_name 
== "org.postgresql.Driver":
       print "ERROR: jdbc file " + params.jdbc_driver_jar + " is unavailable. 
Please, follow next steps:\n" \
@@ -58,7 +58,7 @@ def oozie_service(action = 'start'): # 'start' or 'stop'
     
     Execute( cmd2,
       user = params.oozie_user,       
-      not_if = format("{kinit_if_needed} hadoop dfs -ls /user/oozie/share | 
awk 'BEGIN {{count=0;}} /share/ {{count++}} END {{if (count > 0) {{exit 0}} 
else {{exit 1}}}}'")
+      not_if = format("{kinit_if_needed} hadoop --config {hadoop_conf_dir} dfs 
-ls /user/oozie/share | awk 'BEGIN {{count=0;}} /share/ {{count++}} END {{if 
(count > 0) {{exit 0}} else {{exit 1}}}}'")
     )
     
     Execute( start_cmd,

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/params.py
index a484c0e..ac26ede 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/params.py
@@ -25,15 +25,28 @@ import status_params
 config = Script.get_config()
 tmp_dir = Script.get_tmp_dir()
 
+#RPM versioning support
+rpm_version = default("/configurations/hadoop-env/rpm_version", None)
+
+#hadoop params
+if rpm_version is not None:
+  hadoop_conf_dir = format("/usr/hdp/{rpm_version}/etc/hadoop/conf")
+  hadoop_bin_dir = format("/usr/hdp/{rpm_version}/hadoop/bin")
+  hadoop_lib_home = format("/usr/hdp/{rpm_version}/hadoop/lib")
+  mapreduce_libs_path = format("/usr/hdp/{rpm_version}/hadoop-mapreduce/*")
+else:
+  hadoop_conf_dir = "/etc/hadoop/conf"
+  hadoop_bin_dir = "/usr/bin"
+  hadoop_lib_home = "/usr/lib/hadoop/lib"
+  mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
+
 oozie_user = config['configurations']['oozie-env']['oozie_user']
 smokeuser = config['configurations']['cluster-env']['smokeuser']
 conf_dir = "/etc/oozie/conf"
-hadoop_conf_dir = "/etc/hadoop/conf"
 user_group = config['configurations']['cluster-env']['user_group']
 jdk_location = config['hostLevelParams']['jdk_location']
 check_db_connection_jar_name = "DBConnectionVerification.jar"
 check_db_connection_jar = 
format("/usr/lib/ambari-agent/{check_db_connection_jar_name}")
-hadoop_prefix = "/usr"
 oozie_tmp_dir = "/var/tmp/oozie"
 oozie_hdfs_user_dir = format("/user/{oozie_user}")
 oozie_pid_dir = status_params.oozie_pid_dir
@@ -53,7 +66,6 @@ oozie_keytab = 
config['configurations']['oozie-env']['oozie_keytab']
 oozie_env_sh_template = config['configurations']['oozie-env']['content']
 
 oracle_driver_jar_name = "ojdbc6.jar"
-java_share_dir = "/usr/share/java"
 
 java_home = config['hostLevelParams']['java_home']
 oozie_metastore_user_name = 
config['configurations']['oozie-site']['oozie.service.JPAService.jdbc.username']
@@ -71,7 +83,7 @@ oozie_shared_lib = "/usr/lib/oozie/share"
 fs_root = config['configurations']['core-site']['fs.defaultFS']
 
 if str(hdp_stack_version).startswith('2.0') or 
str(hdp_stack_version).startswith('2.1'):
-  put_shared_lib_to_hdfs_cmd = format("hadoop dfs -put {oozie_shared_lib} 
{oozie_hdfs_user_dir}")
+  put_shared_lib_to_hdfs_cmd = format("hadoop --config {hadoop_conf_dir} dfs 
-put {oozie_shared_lib} {oozie_hdfs_user_dir}")
 # for newer
 else:
   put_shared_lib_to_hdfs_cmd = format("{oozie_setup_sh} sharelib create -fs 
{fs_root} -locallib {oozie_shared_lib}")
@@ -103,7 +115,6 @@ oozie_hdfs_user_dir = format("/user/{oozie_user}")
 oozie_hdfs_user_mode = 0775
 #for create_hdfs_directory
 hostname = config["hostname"]
-hadoop_conf_dir = "/etc/hadoop/conf"
 hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 hdfs_principal_name = 
config['configurations']['hadoop-env']['hdfs_principal_name']
@@ -117,5 +128,6 @@ HdfsDirectory = functools.partial(
   hdfs_user=hdfs_user,
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
-  kinit_path_local = kinit_path_local
+  kinit_path_local = kinit_path_local,
+  bin_dir = hadoop_bin_dir
 )

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/PIG/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/PIG/package/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/PIG/package/scripts/params.py
index 1b522b8..d1f8b75 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/PIG/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/PIG/package/scripts/params.py
@@ -25,8 +25,23 @@ from resource_management import *
 config = Script.get_config()
 tmp_dir = Script.get_tmp_dir()
 
-pig_conf_dir = "/etc/pig/conf"
-hadoop_conf_dir = "/etc/hadoop/conf"
+#RPM versioning support
+rpm_version = default("/configurations/hadoop-env/rpm_version", None)
+
+#hadoop params
+if rpm_version is not None:
+  hadoop_conf_dir = format("/usr/hdp/{rpm_version}/etc/hadoop/conf")
+  hadoop_bin_dir = format("/usr/hdp/{rpm_version}/hadoop/bin")
+  hadoop_home = format('/usr/hdp/{rpm_version}/hadoop')
+  pig_conf_dir = format('/usr/hdp/{rpm_version}/etc/pig/conf')
+  pig_bin_dir = format('/usr/hdp/{rpm_version}/pig/bin')
+else:
+  hadoop_conf_dir = "/etc/hadoop/conf"
+  hadoop_bin_dir = "/usr/bin"
+  hadoop_home = '/usr'
+  pig_conf_dir = "/etc/pig/conf"
+  pig_bin_dir = ""
+
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 hdfs_principal_name = 
config['configurations']['hadoop-env']['hdfs_principal_name']
 smokeuser = config['configurations']['cluster-env']['smokeuser']
@@ -38,7 +53,6 @@ pig_env_sh_template = 
config['configurations']['pig-env']['content']
 
 # not supporting 32 bit jdk.
 java64_home = config['hostLevelParams']['java_home']
-hadoop_home = "/usr"
 
 pig_properties = config['configurations']['pig-properties']['content']
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/PIG/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/PIG/package/scripts/service_check.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/PIG/package/scripts/service_check.py
index 8431b6d..7619bd6 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/PIG/package/scripts/service_check.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/PIG/package/scripts/service_check.py
@@ -31,7 +31,7 @@ class PigServiceCheck(Script):
 
     cleanup_cmd = format("dfs -rmr {output_file} {input_file}")
     #cleanup put below to handle retries; if retrying there wil be a stale 
file that needs cleanup; exit code is fn of second command
-    create_file_cmd = format("{cleanup_cmd}; hadoop dfs -put /etc/passwd 
{input_file} ") #TODO: inconsistent that second command needs hadoop
+    create_file_cmd = format("{cleanup_cmd}; hadoop --config {hadoop_conf_dir} 
dfs -put /etc/passwd {input_file} ") #TODO: inconsistent that second command 
needs hadoop
     test_cmd = format("fs -test -e {output_file}")
 
     ExecuteHadoop( create_file_cmd,
@@ -42,7 +42,8 @@ class PigServiceCheck(Script):
       # for kinit run
       keytab = params.smoke_user_keytab,
       security_enabled = params.security_enabled,
-      kinit_path_local = params.kinit_path_local
+      kinit_path_local = params.kinit_path_local,
+      bin_dir = params.hadoop_bin_dir
     )
 
     File( format("{tmp_dir}/pigSmoke.sh"),
@@ -53,13 +54,14 @@ class PigServiceCheck(Script):
     Execute( format("pig {tmp_dir}/pigSmoke.sh"),
       tries     = 3,
       try_sleep = 5,
-      path      = '/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin',
+      path      = 
format('{pig_bin_dir}:/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'),
       user      = params.smokeuser
     )
 
     ExecuteHadoop( test_cmd,
       user      = params.smokeuser,
-      conf_dir = params.hadoop_conf_dir
+      conf_dir = params.hadoop_conf_dir,
+      bin_dir = params.hadoop_bin_dir
     )
 
 if __name__ == "__main__":

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/SQOOP/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/SQOOP/package/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/SQOOP/package/scripts/params.py
index 144a587..9170fdc 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/SQOOP/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/SQOOP/package/scripts/params.py
@@ -21,6 +21,15 @@ from resource_management import *
 
 config = Script.get_config()
 
+#RPM versioning support
+rpm_version = default("/configurations/hadoop-env/rpm_version", None)
+
+#hadoop params
+if rpm_version is not None:
+  zoo_conf_dir = format('/usr/hdp/{rpm_version}/etc/zookeeper')
+else:
+  zoo_conf_dir = "/etc/zookeeper"
+
 security_enabled = config['configurations']['cluster-env']['security_enabled']
 smokeuser = config['configurations']['cluster-env']['smokeuser']
 user_group = config['configurations']['cluster-env']['user_group']
@@ -29,7 +38,6 @@ sqoop_env_sh_template = 
config['configurations']['sqoop-env']['content']
 sqoop_conf_dir = "/usr/lib/sqoop/conf"
 hbase_home = "/usr"
 hive_home = "/usr"
-zoo_conf_dir = "/etc/zookeeper"
 sqoop_lib = "/usr/lib/sqoop/lib"
 sqoop_user = config['configurations']['sqoop-env']['sqoop_user']
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/configuration/webhcat-env.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/configuration/webhcat-env.xml
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/configuration/webhcat-env.xml
index 304bbb7..1dba691 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/configuration/webhcat-env.xml
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/configuration/webhcat-env.xml
@@ -47,7 +47,7 @@ CONSOLE_LOG={{templeton_log_dir}}/webhcat-console.log
 #HCAT_PREFIX=hive_prefix
 
 # Set HADOOP_HOME to point to a specific hadoop install directory
-export HADOOP_HOME=/usr/lib/hadoop
+export HADOOP_HOME={{hadoop_home}}
     </value>
   </property>
   

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/params.py
index a7959f0..f37ac27 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/params.py
@@ -26,16 +26,36 @@ import status_params
 config = Script.get_config()
 tmp_dir = Script.get_tmp_dir()
 
-hcat_user = config['configurations']['hive-env']['hcat_user']
-webhcat_user = config['configurations']['hive-env']['webhcat_user']
+#RPM versioning support
+rpm_version = default("/configurations/hadoop-env/rpm_version", None)
 
-if str(config['hostLevelParams']['stack_version']).startswith('2.0'):
-  config_dir = '/etc/hcatalog/conf'
-  webhcat_bin_dir = '/usr/lib/hcatalog/sbin'
-# for newer versions
+#hadoop params
+hdp_stack_version = config['hostLevelParams']['stack_version']
+if rpm_version is not None:
+  hadoop_bin_dir = format("/usr/hdp/{rpm_version}/hadoop/bin")
+  hadoop_home = format('/usr/hdp/{rpm_version}/hadoop')
+  hadoop_streeming_jars = 
format("/usr/hdp/{rpm_version}/hadoop-mapreduce/hadoop-streaming-*.jar")
+  if str(hdp_stack_version).startswith('2.0'):
+    config_dir = format('/usr/hdp/{rpm_version}/etc/hcatalog/conf')
+    webhcat_bin_dir = format('/usr/hdp/{rpm_version}/hive/hcatalog/sbin')
+  # for newer versions
+  else:
+    config_dir = format('/usr/hdp/{rpm_version}/etc/hive-webhcat/conf')
+    webhcat_bin_dir = format('/usr/hdp/{rpm_version}/hive/hive-hcatalog/sbin')
 else:
-  config_dir = '/etc/hive-webhcat/conf'
-  webhcat_bin_dir = '/usr/lib/hive-hcatalog/sbin'
+  hadoop_bin_dir = "/usr/bin"
+  hadoop_home = '/usr'
+  hadoop_streeming_jars = '/usr/lib/hadoop-mapreduce/hadoop-streaming-*.jar'
+  if str(hdp_stack_version).startswith('2.0'):
+    config_dir = '/etc/hcatalog/conf'
+    webhcat_bin_dir = '/usr/lib/hcatalog/sbin'
+  # for newer versions
+  else:
+    config_dir = '/etc/hive-webhcat/conf'
+    webhcat_bin_dir = '/usr/lib/hive-hcatalog/sbin'
+
+hcat_user = config['configurations']['hive-env']['hcat_user']
+webhcat_user = config['configurations']['hive-env']['webhcat_user']
 
 webhcat_env_sh_template = config['configurations']['webhcat-env']['content']
 templeton_log_dir = config['configurations']['hive-env']['hcat_log_dir']
@@ -46,7 +66,6 @@ pid_file = status_params.pid_file
 hadoop_conf_dir = 
config['configurations']['webhcat-site']['templeton.hadoop.conf.dir']
 templeton_jar = config['configurations']['webhcat-site']['templeton.jar']
 
-hadoop_home = '/usr'
 user_group = config['configurations']['cluster-env']['user_group']
 
 webhcat_server_host = config['clusterHostInfo']['webhcat_server_host']
@@ -64,7 +83,6 @@ webhcat_hdfs_user_mode = 0755
 webhcat_apps_dir = "/apps/webhcat"
 #for create_hdfs_directory
 hostname = config["hostname"]
-hadoop_conf_dir = "/etc/hadoop/conf"
 security_param = "true" if security_enabled else "false"
 hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
@@ -79,5 +97,6 @@ HdfsDirectory = functools.partial(
   hdfs_user=hdfs_user,
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
-  kinit_path_local = kinit_path_local
+  kinit_path_local = kinit_path_local,
+  bin_dir = hadoop_bin_dir
 )

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/webhcat.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/webhcat.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/webhcat.py
index 3092735..c56ae5f 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/webhcat.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/webhcat.py
@@ -84,12 +84,13 @@ def webhcat():
             path='/bin'
     )
 
-  CopyFromLocal('/usr/lib/hadoop-mapreduce/hadoop-streaming-*.jar',
+  CopyFromLocal(params.hadoop_streeming_jars,
                 owner=params.webhcat_user,
                 mode=0755,
                 dest_dir=params.webhcat_apps_dir,
                 kinnit_if_needed=kinit_if_needed,
-                hdfs_user=params.hdfs_user
+                hdfs_user=params.hdfs_user,
+                hadoop_conf_dir=params.hadoop_conf_dir
   )
 
   CopyFromLocal('/usr/share/HDP-webhcat/pig.tar.gz',
@@ -97,7 +98,8 @@ def webhcat():
                 mode=0755,
                 dest_dir=params.webhcat_apps_dir,
                 kinnit_if_needed=kinit_if_needed,
-                hdfs_user=params.hdfs_user
+                hdfs_user=params.hdfs_user,
+                hadoop_conf_dir=params.hadoop_conf_dir
   )
 
   CopyFromLocal('/usr/share/HDP-webhcat/hive.tar.gz',
@@ -105,5 +107,6 @@ def webhcat():
                 mode=0755,
                 dest_dir=params.webhcat_apps_dir,
                 kinnit_if_needed=kinit_if_needed,
-                hdfs_user=params.hdfs_user
+                hdfs_user=params.hdfs_user,
+                hadoop_conf_dir=params.hadoop_conf_dir
   )

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/params.py
index 313ed94..f8d670e 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/params.py
@@ -18,6 +18,7 @@ limitations under the License.
 Ambari Agent
 
 """
+import os
 
 from resource_management import *
 import status_params
@@ -26,7 +27,34 @@ import status_params
 config = Script.get_config()
 tmp_dir = Script.get_tmp_dir()
 
-config_dir = "/etc/hadoop/conf"
+#RPM versioning support
+rpm_version = default("/configurations/hadoop-env/rpm_version", None)
+
+#hadoop params
+if rpm_version is not None:
+  hadoop_conf_dir = format("/usr/hdp/{rpm_version}/etc/hadoop/conf")
+  hadoop_libexec_dir = format("/usr/hdp/{rpm_version}/hadoop/libexec")
+  hadoop_bin = format("/usr/hdp/{rpm_version}/hadoop/sbin")
+  hadoop_bin_dir = format("/usr/hdp/{rpm_version}/hadoop/bin")
+  limits_conf_dir = format("/usr/hdp/{rpm_version}/etc/security/limits.d")
+  hadoop_yarn_home = format('/usr/hdp/{rpm_version}/hadoop-yarn')
+  hadoop_mapred2_jar_location = 
format('/usr/hdp/{rpm_version}/hadoop-mapreduce')
+  mapred_bin = format('/usr/hdp/{rpm_version}/hadoop-mapreduce/sbin')
+  yarn_bin = format('/usr/hdp/{rpm_version}/hadoop-yarn/sbin')
+  yarn_container_bin = format('/usr/hdp/{rpm_version}/hadoop-yarn/bin')
+else:
+  hadoop_conf_dir = "/etc/hadoop/conf"
+  hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
+  hadoop_bin = "/usr/lib/hadoop/sbin"
+  hadoop_bin_dir = "/usr/bin"
+  limits_conf_dir = "/etc/security/limits.d"
+  hadoop_yarn_home = '/usr/lib/hadoop-yarn'
+  hadoop_mapred2_jar_location = "/usr/lib/hadoop-mapreduce"
+  mapred_bin = "/usr/lib/hadoop-mapreduce/sbin"
+  yarn_bin = "/usr/lib/hadoop-yarn/sbin"
+  yarn_container_bin = "/usr/lib/hadoop-yarn/bin"
+
+execute_path = os.environ['PATH'] + os.pathsep + hadoop_bin_dir
 
 ulimit_cmd = "ulimit -c unlimited;"
 
@@ -49,8 +77,6 @@ rm_nodes_exclude_path = 
default("/configurations/yarn-site/yarn.resourcemanager.
 java64_home = config['hostLevelParams']['java_home']
 hadoop_ssl_enabled = default("/configurations/core-site/hadoop.ssl.enabled", 
False)
 
-hadoop_libexec_dir = '/usr/lib/hadoop/libexec'
-hadoop_yarn_home = '/usr/lib/hadoop-yarn'
 yarn_heapsize = config['configurations']['yarn-env']['yarn_heapsize']
 resourcemanager_heapsize = 
config['configurations']['yarn-env']['resourcemanager_heapsize']
 nodemanager_heapsize = 
config['configurations']['yarn-env']['nodemanager_heapsize']
@@ -77,8 +103,6 @@ hs_webui_address = 
config['configurations']['mapred-site']['mapreduce.jobhistory
 nm_local_dirs = 
config['configurations']['yarn-site']['yarn.nodemanager.local-dirs']
 nm_log_dirs = 
config['configurations']['yarn-site']['yarn.nodemanager.log-dirs']
 
-
-hadoop_mapred2_jar_location = "/usr/lib/hadoop-mapreduce"
 distrAppJarName = "hadoop-yarn-applications-distributedshell-2.*.jar"
 hadoopMapredExamplesJarName = "hadoop-mapreduce-examples-2.*.jar"
 
@@ -90,13 +114,7 @@ yarn_log_dir = format("{yarn_log_dir_prefix}/{yarn_user}")
 mapred_job_summary_log = 
format("{mapred_log_dir_prefix}/{mapred_user}/hadoop-mapreduce.jobsummary.log")
 yarn_job_summary_log = 
format("{yarn_log_dir_prefix}/{yarn_user}/hadoop-mapreduce.jobsummary.log")
 
-mapred_bin = "/usr/lib/hadoop-mapreduce/sbin"
-yarn_bin = "/usr/lib/hadoop-yarn/sbin"
-
 user_group = config['configurations']['cluster-env']['user_group']
-limits_conf_dir = "/etc/security/limits.d"
-hadoop_conf_dir = "/etc/hadoop/conf"
-yarn_container_bin = "/usr/lib/hadoop-yarn/bin"
 
 #exclude file
 exclude_hosts = default("/clusterHostInfo/decom_nm_hosts", [])
@@ -128,7 +146,6 @@ jobhistory_heapsize = 
default("/configurations/mapred-env/jobhistory_heapsize",
 
 #for create_hdfs_directory
 hostname = config["hostname"]
-hadoop_conf_dir = "/etc/hadoop/conf"
 hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 hdfs_principal_name = 
config['configurations']['hadoop-env']['hdfs_principal_name']
@@ -142,11 +159,11 @@ HdfsDirectory = functools.partial(
   hdfs_user=hdfs_user,
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
-  kinit_path_local = kinit_path_local
+  kinit_path_local = kinit_path_local,
+  bin_dir = hadoop_bin_dir
 )
 update_exclude_file_only = config['commandParams']['update_exclude_file_only']
 
-hadoop_bin = "/usr/lib/hadoop/sbin"
 mapred_tt_group = 
default("/configurations/mapred-site/mapreduce.tasktracker.group", user_group)
 
 #taskcontroller.cfg

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/resourcemanager.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/resourcemanager.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/resourcemanager.py
index af678d0..4d40d68 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/resourcemanager.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/resourcemanager.py
@@ -78,10 +78,10 @@ class Resourcemanager(Script):
     env.set_params(params)
     rm_kinit_cmd = params.rm_kinit_cmd
     yarn_user = params.yarn_user
-    conf_dir = params.config_dir
+    conf_dir = params.hadoop_conf_dir
     user_group = params.user_group
 
-    yarn_refresh_cmd = format("{rm_kinit_cmd} /usr/bin/yarn --config 
{conf_dir} rmadmin -refreshNodes")
+    yarn_refresh_cmd = format("{rm_kinit_cmd} yarn --config {conf_dir} rmadmin 
-refreshNodes")
 
     File(params.exclude_file_path,
          content=Template("exclude_hosts_list.j2"),
@@ -91,6 +91,7 @@ class Resourcemanager(Script):
 
     if params.update_exclude_file_only == False:
       Execute(yarn_refresh_cmd,
+            environment= {'PATH' : params.execute_path },
             user=yarn_user)
       pass
     pass

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/service.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/service.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/service.py
index 42a7138..466f637 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/service.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/service.py
@@ -35,7 +35,7 @@ def service(componentName, action='start', 
serviceName='yarn'):
     pid_file = format("{yarn_pid_dir}/yarn-{yarn_user}-{componentName}.pid")
     usr = params.yarn_user
 
-  cmd = format("export HADOOP_LIBEXEC_DIR={hadoop_libexec_dir} && {daemon} 
--config {config_dir}")
+  cmd = format("export HADOOP_LIBEXEC_DIR={hadoop_libexec_dir} && {daemon} 
--config {hadoop_conf_dir}")
 
   if action == 'start':
     daemon_cmd = format("{ulimit_cmd} {cmd} start {componentName}")

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d9feb6a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/service_check.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/service_check.py
index 2ed67ab..7e535a5 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/service_check.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/service_check.py
@@ -27,7 +27,7 @@ class ServiceCheck(Script):
     import params
     env.set_params(params)
 
-    run_yarn_check_cmd = "/usr/bin/yarn node -list"
+    run_yarn_check_cmd = format("yarn --config {hadoop_conf_dir} node -list")
 
     component_type = 'rm'
     if params.hadoop_ssl_enabled:
@@ -60,6 +60,7 @@ class ServiceCheck(Script):
     )
 
     Execute(run_yarn_check_cmd,
+            environment= {'PATH' : params.execute_path },
             user=params.smokeuser
     )
 

Reply via email to