AMBARI-7407 Move service configs to /etc/ for versioned RPM's (dsen)

Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/d7ed4316
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/d7ed4316
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/d7ed4316

Branch: refs/heads/branch-alerts-dev
Commit: d7ed4316202b6af7e6157ec97f9f137b43ad4eb0
Parents: 5da5019
Author: Dmytro Sen <d...@hortonworks.com>
Authored: Fri Sep 19 19:12:06 2014 +0300
Committer: Dmytro Sen <d...@hortonworks.com>
Committed: Fri Sep 19 19:12:06 2014 +0300

----------------------------------------------------------------------
 .../resource_management/TestCopyFromLocal.py    |  8 +--
 .../TestExecuteHadoopResource.py                | 12 ++--
 .../resource_management/TestExecuteResource.py  |  3 +-
 .../core/providers/system.py                    | 12 ++--
 .../python/resource_management/core/shell.py    | 24 ++++---
 .../libraries/providers/copy_from_local.py      |  4 ++
 .../libraries/providers/execute_hadoop.py       |  6 +-
 .../libraries/providers/hdfs_directory.py       |  6 +-
 .../libraries/resources/copy_from_local.py      |  1 +
 .../2.0.6/hooks/after-INSTALL/scripts/params.py | 11 ++-
 .../scripts/shared_initialization.py            |  4 ++
 .../2.0.6/hooks/before-START/scripts/params.py  | 13 ++--
 .../services/FLUME/package/scripts/flume.py     |  2 +-
 .../services/FLUME/package/scripts/params.py    |  6 +-
 .../services/HBASE/package/scripts/params.py    | 16 ++---
 .../services/HDFS/package/scripts/params.py     | 18 +++--
 .../HDFS/package/scripts/service_check.py       |  2 +-
 .../2.0.6/services/HIVE/package/scripts/hcat.py |  2 +
 .../HIVE/package/scripts/hcat_service_check.py  | 10 ++-
 .../HIVE/package/scripts/install_jars.py        |  2 +
 .../services/HIVE/package/scripts/params.py     | 70 ++++++++------------
 .../services/HIVE/package/scripts/webhcat.py    |  4 ++
 .../services/PIG/package/scripts/params.py      | 12 ++--
 .../2.0.6/services/PIG/package/scripts/pig.py   |  1 +
 .../services/SQOOP/package/scripts/params.py    | 11 ++-
 .../package/scripts/mapred_service_check.py     |  4 ++
 .../services/YARN/package/scripts/params.py     | 24 +++----
 .../YARN/package/scripts/service_check.py       |  2 +-
 .../ZOOKEEPER/package/scripts/params.py         |  7 +-
 .../services/FALCON/package/scripts/params.py   | 12 ++--
 .../services/STORM/package/scripts/params.py    | 11 ++-
 .../stacks/HDP/2.2/services/FALCON/metainfo.xml |  2 +-
 .../stacks/HDP/2.2/services/FLUME/metainfo.xml  |  2 +-
 .../stacks/HDP/2.2/services/HBASE/metainfo.xml  |  2 +-
 .../services/HDFS/configuration/hadoop-env.xml  |  2 +-
 .../services/HDFS/configuration/hdfs-site.xml   | 34 ----------
 .../stacks/HDP/2.2/services/HDFS/metainfo.xml   |  4 +-
 .../HIVE/configuration/webhcat-site.xml         | 14 ++--
 .../stacks/HDP/2.2/services/HIVE/metainfo.xml   |  6 +-
 .../services/OOZIE/configuration/oozie-site.xml | 13 ----
 .../stacks/HDP/2.2/services/OOZIE/metainfo.xml  |  6 +-
 .../stacks/HDP/2.2/services/PIG/metainfo.xml    |  2 +-
 .../services/SLIDER/package/scripts/params.py   |  4 +-
 .../stacks/HDP/2.2/services/SQOOP/metainfo.xml  |  2 +-
 .../services/STORM/configuration/storm-env.xml  | 29 --------
 .../services/STORM/configuration/storm-site.xml |  6 +-
 .../stacks/HDP/2.2/services/STORM/metainfo.xml  |  2 +-
 .../stacks/HDP/2.2/services/TEZ/metainfo.xml    |  2 +-
 .../HDP/2.2/services/WEBHCAT/metainfo.xml       | 43 ------------
 .../YARN/configuration-mapred/mapred-site.xml   | 11 ++-
 .../services/YARN/configuration/yarn-site.xml   | 11 +--
 .../stacks/HDP/2.2/services/YARN/metainfo.xml   |  6 +-
 .../HDP/2.2/services/ZOOKEEPER/metainfo.xml     |  2 +-
 .../stacks/1.3.2/HDFS/test_service_check.py     |  2 +-
 .../python/stacks/2.0.6/FLUME/test_flume.py     |  4 +-
 .../stacks/2.0.6/HDFS/test_service_check.py     |  2 +-
 .../stacks/2.0.6/HIVE/test_hcat_client.py       |  4 ++
 .../stacks/2.0.6/HIVE/test_hive_metastore.py    |  9 ++-
 .../stacks/2.0.6/HIVE/test_hive_server.py       | 12 ++--
 .../2.0.6/HIVE/test_hive_service_check.py       | 17 ++---
 .../stacks/2.0.6/HIVE/test_webhcat_server.py    |  8 +++
 .../python/stacks/2.0.6/PIG/test_pig_client.py  |  2 +
 .../2.0.6/YARN/test_mapreduce2_service_check.py |  8 +++
 .../2.0.6/YARN/test_yarn_service_check.py       |  4 +-
 .../stacks/2.1/HIVE/test_hive_metastore.py      |  8 +--
 65 files changed, 249 insertions(+), 356 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/d7ed4316/ambari-agent/src/test/python/resource_management/TestCopyFromLocal.py
----------------------------------------------------------------------
diff --git 
a/ambari-agent/src/test/python/resource_management/TestCopyFromLocal.py 
b/ambari-agent/src/test/python/resource_management/TestCopyFromLocal.py
index 6862db3..3991bae 100644
--- a/ambari-agent/src/test/python/resource_management/TestCopyFromLocal.py
+++ b/ambari-agent/src/test/python/resource_management/TestCopyFromLocal.py
@@ -36,10 +36,10 @@ class TestCopyFromLocal(TestCase):
       call_arg_list = execute_hadoop_mock.call_args_list
       self.assertEqual('fs -copyFromLocal /user/testdir/*.files /apps/test/',
                        call_arg_list[0][0][0].command)
-      self.assertEquals({'not_if': "su - user1 -c ' hadoop fs -ls 
/apps/test//*.files' >/dev/null 2>&1", 'user': 'user1', 'conf_dir': 
'/etc/hadoop/conf'},
+      self.assertEquals({'not_if': "su - user1 -c ' hadoop fs -ls 
/apps/test//*.files' >/dev/null 2>&1", 'user': 'user1', 'bin_dir': '/usr/bin', 
'conf_dir': '/etc/hadoop/conf'},
                         call_arg_list[0][0][0].arguments)
       self.assertEquals('fs -chown user1 /apps/test//*.files', 
call_arg_list[1][0][0].command)
-      self.assertEquals({'user': 'hdfs', 'conf_dir': '/etc/hadoop/conf'}, 
call_arg_list[1][0][0].arguments)
+      self.assertEquals({'user': 'hdfs', 'bin_dir': '/usr/bin', 'conf_dir': 
'/etc/hadoop/conf'}, call_arg_list[1][0][0].arguments)
 
 
   
@patch("resource_management.libraries.providers.execute_hadoop.ExecuteHadoopProvider")
@@ -57,9 +57,9 @@ class TestCopyFromLocal(TestCase):
       call_arg_list = execute_hadoop_mock.call_args_list
       self.assertEqual('fs -copyFromLocal /user/testdir/*.files /apps/test/',
                        call_arg_list[0][0][0].command)
-      self.assertEquals({'not_if': "su - user1 -c ' hadoop fs -ls 
/apps/test//*.files' >/dev/null 2>&1", 'user': 'user1', 'conf_dir': 
'/etc/hadoop/conf'},
+      self.assertEquals({'not_if': "su - user1 -c ' hadoop fs -ls 
/apps/test//*.files' >/dev/null 2>&1", 'user': 'user1', 'bin_dir': '/usr/bin', 
'conf_dir': '/etc/hadoop/conf'},
                         call_arg_list[0][0][0].arguments)
       self.assertEquals('fs -chown user1:hdfs /apps/test//*.files', 
call_arg_list[1][0][0].command)
-      self.assertEquals({'user': 'hdfs', 'conf_dir': '/etc/hadoop/conf'}, 
call_arg_list[1][0][0].arguments)
+      self.assertEquals({'user': 'hdfs', 'bin_dir': '/usr/bin', 'conf_dir': 
'/etc/hadoop/conf'}, call_arg_list[1][0][0].arguments)
 
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/d7ed4316/ambari-agent/src/test/python/resource_management/TestExecuteHadoopResource.py
----------------------------------------------------------------------
diff --git 
a/ambari-agent/src/test/python/resource_management/TestExecuteHadoopResource.py 
b/ambari-agent/src/test/python/resource_management/TestExecuteHadoopResource.py
index 28453a5..3354146 100644
--- 
a/ambari-agent/src/test/python/resource_management/TestExecuteHadoopResource.py
+++ 
b/ambari-agent/src/test/python/resource_management/TestExecuteHadoopResource.py
@@ -43,7 +43,7 @@ class TestExecuteHadoopResource(TestCase):
                         'tries': 1,
                         'user': 'user',
                         'try_sleep': 0,
-                        'environment': {'PATH': os.environ['PATH']}})
+                        'path': [None]})
 
 
   @patch("resource_management.core.providers.system.ExecuteProvider")
@@ -67,7 +67,7 @@ class TestExecuteHadoopResource(TestCase):
                         'tries': 1,
                         'user': 'user',
                         'try_sleep': 0,
-                        'environment': {'PATH': os.environ['PATH']}})
+                        'path': [None]})
 
 
   @patch("resource_management.core.providers.system.ExecuteProvider")
@@ -96,7 +96,7 @@ class TestExecuteHadoopResource(TestCase):
                         'tries': 2,
                         'user': 'user',
                         'try_sleep': 2,
-                        'environment': {'PATH': os.environ['PATH']}})
+                        'path': [None]})
 
 
   @patch("resource_management.core.providers.system.ExecuteProvider")
@@ -122,13 +122,13 @@ class TestExecuteHadoopResource(TestCase):
                         'tries': 1,
                         'user': 'user',
                         'try_sleep': 0,
-                        'environment': {'PATH': os.environ['PATH']}})
+                        'path': [None]})
       self.assertEqual(execute_mock.call_args_list[1][0][0].arguments,
                        {'logoutput': False,
                         'tries': 1,
                         'user': 'user',
                         'try_sleep': 0,
-                        'environment': {'PATH': os.environ['PATH']}})
+                        'path': [None]})
 
 
   @patch("resource_management.core.providers.system.ExecuteProvider")
@@ -181,7 +181,7 @@ class TestExecuteHadoopResource(TestCase):
                         'tries': 1,
                         'user': 'user',
                         'try_sleep': 0,
-                        'environment': {'PATH': os.environ['PATH']}})
+                        'path': [None]})
 
 
   @patch("resource_management.core.providers.system.ExecuteProvider")

http://git-wip-us.apache.org/repos/asf/ambari/blob/d7ed4316/ambari-agent/src/test/python/resource_management/TestExecuteResource.py
----------------------------------------------------------------------
diff --git 
a/ambari-agent/src/test/python/resource_management/TestExecuteResource.py 
b/ambari-agent/src/test/python/resource_management/TestExecuteResource.py
index f0a4539..93d7064 100644
--- a/ambari-agent/src/test/python/resource_management/TestExecuteResource.py
+++ b/ambari-agent/src/test/python/resource_management/TestExecuteResource.py
@@ -90,7 +90,8 @@ class TestExecuteResource(TestCase):
       execute_resource = Execute('echo "1"',
                                  path=["/test/one", "test/two"]
       )
-    self.assertEqual(execute_resource.environment["PATH"], 
'/test/one:test/two')
+    expected_command = 'export PATH=$PATH:/test/one:test/two ; echo "1"'
+    self.assertEqual(popen_mock.call_args_list[0][0][0][3], expected_command)
 
   @patch('time.sleep')
   @patch.object(subprocess, "Popen")

http://git-wip-us.apache.org/repos/asf/ambari/blob/d7ed4316/ambari-common/src/main/python/resource_management/core/providers/system.py
----------------------------------------------------------------------
diff --git 
a/ambari-common/src/main/python/resource_management/core/providers/system.py 
b/ambari-common/src/main/python/resource_management/core/providers/system.py
index c297c83..61194bb 100644
--- a/ambari-common/src/main/python/resource_management/core/providers/system.py
+++ b/ambari-common/src/main/python/resource_management/core/providers/system.py
@@ -222,19 +222,15 @@ class ExecuteProvider(Provider):
         return
 
     Logger.debug("Executing %s" % self.resource)
-    
-    if self.resource.path != []:
-      if not self.resource.environment:
-        self.resource.environment = {}
-      
-      self.resource.environment['PATH'] = os.pathsep.join(self.resource.path) 
-    
+
     for i in range (0, self.resource.tries):
       try:
         shell.checked_call(self.resource.command, 
logoutput=self.resource.logoutput,
                             cwd=self.resource.cwd, 
env=self.resource.environment,
                             preexec_fn=_preexec_fn(self.resource), 
user=self.resource.user,
-                            wait_for_finish=self.resource.wait_for_finish, 
timeout=self.resource.timeout)
+                            wait_for_finish=self.resource.wait_for_finish,
+                            timeout=self.resource.timeout,
+                            path=self.resource.path)
         break
       except Fail as ex:
         if i == self.resource.tries-1: # last try

http://git-wip-us.apache.org/repos/asf/ambari/blob/d7ed4316/ambari-common/src/main/python/resource_management/core/shell.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/core/shell.py 
b/ambari-common/src/main/python/resource_management/core/shell.py
index 80e2a38..a2e3af3 100644
--- a/ambari-common/src/main/python/resource_management/core/shell.py
+++ b/ambari-common/src/main/python/resource_management/core/shell.py
@@ -19,6 +19,7 @@ limitations under the License.
 Ambari Agent
 
 """
+import os
 
 __all__ = ["checked_call", "call", "quote_bash_args"]
 
@@ -31,15 +32,15 @@ from exceptions import ExecuteTimeoutException
 from resource_management.core.logger import Logger
 
 def checked_call(command, logoutput=False, 
-         cwd=None, env=None, preexec_fn=None, user=None, wait_for_finish=True, 
timeout=None):
-  return _call(command, logoutput, True, cwd, env, preexec_fn, user, 
wait_for_finish, timeout)
+         cwd=None, env=None, preexec_fn=None, user=None, wait_for_finish=True, 
timeout=None, path=None):
+  return _call(command, logoutput, True, cwd, env, preexec_fn, user, 
wait_for_finish, timeout, path)
 
 def call(command, logoutput=False, 
-         cwd=None, env=None, preexec_fn=None, user=None, wait_for_finish=True, 
timeout=None):
-  return _call(command, logoutput, False, cwd, env, preexec_fn, user, 
wait_for_finish, timeout)
+         cwd=None, env=None, preexec_fn=None, user=None, wait_for_finish=True, 
timeout=None, path=None):
+  return _call(command, logoutput, False, cwd, env, preexec_fn, user, 
wait_for_finish, timeout, path)
             
 def _call(command, logoutput=False, throw_on_failure=True, 
-         cwd=None, env=None, preexec_fn=None, user=None, wait_for_finish=True, 
timeout=None):
+         cwd=None, env=None, preexec_fn=None, user=None, wait_for_finish=True, 
timeout=None, path=None):
   """
   Execute shell command
   
@@ -54,12 +55,17 @@ def _call(command, logoutput=False, throw_on_failure=True,
   if isinstance(command, (list, tuple)):
     command = ' '.join(quote_bash_args(x) for x in command)
 
+  if path:
+    export_path_command = "export PATH=$PATH" + os.pathsep + 
os.pathsep.join(path) + " ; "
+  else:
+    export_path_command = ""
+
   if user:
-    command = ["su", "-", user, "-c", command]
+    subprocess_command = ["su", "-", user, "-c", export_path_command + command]
   else:
-    command = ["/bin/bash","--login","-c", command]
+    subprocess_command = ["/bin/bash","--login","-c", export_path_command + 
command]
 
-  proc = subprocess.Popen(command, stdout=subprocess.PIPE, 
stderr=subprocess.STDOUT,
+  proc = subprocess.Popen(subprocess_command, stdout=subprocess.PIPE, 
stderr=subprocess.STDOUT,
                           cwd=cwd, env=env, shell=False,
                           preexec_fn=preexec_fn)
 
@@ -86,7 +92,7 @@ def _call(command, logoutput=False, throw_on_failure=True,
     Logger.info(out)
   
   if throw_on_failure and code:
-    err_msg = Logger.get_protected_text(("Execution of '%s' returned %d. %s") 
% (command[-1], code, out))
+    err_msg = Logger.get_protected_text(("Execution of '%s' returned %d. %s") 
% (command, code, out))
     raise Fail(err_msg)
   
   return code, out

http://git-wip-us.apache.org/repos/asf/ambari/blob/d7ed4316/ambari-common/src/main/python/resource_management/libraries/providers/copy_from_local.py
----------------------------------------------------------------------
diff --git 
a/ambari-common/src/main/python/resource_management/libraries/providers/copy_from_local.py
 
b/ambari-common/src/main/python/resource_management/libraries/providers/copy_from_local.py
index 79d3e35..ff16e16 100644
--- 
a/ambari-common/src/main/python/resource_management/libraries/providers/copy_from_local.py
+++ 
b/ambari-common/src/main/python/resource_management/libraries/providers/copy_from_local.py
@@ -34,6 +34,7 @@ class CopyFromLocalProvider(Provider):
     mode = self.resource.mode
     hdfs_usr=self.resource.hdfs_user
     hadoop_conf_path = self.resource.hadoop_conf_dir
+    bin_dir = self.resource.hadoop_bin_dir
 
 
     if dest_file:
@@ -50,6 +51,7 @@ class CopyFromLocalProvider(Provider):
     ExecuteHadoop(copy_cmd,
                   not_if=unless_cmd,
                   user=owner,
+                  bin_dir=bin_dir,
                   conf_dir=hadoop_conf_path
                   )
 
@@ -66,6 +68,7 @@ class CopyFromLocalProvider(Provider):
 
       ExecuteHadoop(chown_cmd,
                     user=hdfs_usr,
+                    bin_dir=bin_dir,
                     conf_dir=hadoop_conf_path)
     pass
 
@@ -75,5 +78,6 @@ class CopyFromLocalProvider(Provider):
 
       ExecuteHadoop(chmod_cmd,
                     user=hdfs_usr,
+                    bin_dir=bin_dir,
                     conf_dir=hadoop_conf_path)
     pass

http://git-wip-us.apache.org/repos/asf/ambari/blob/d7ed4316/ambari-common/src/main/python/resource_management/libraries/providers/execute_hadoop.py
----------------------------------------------------------------------
diff --git 
a/ambari-common/src/main/python/resource_management/libraries/providers/execute_hadoop.py
 
b/ambari-common/src/main/python/resource_management/libraries/providers/execute_hadoop.py
index f367e99..efba0a0 100644
--- 
a/ambari-common/src/main/python/resource_management/libraries/providers/execute_hadoop.py
+++ 
b/ambari-common/src/main/python/resource_management/libraries/providers/execute_hadoop.py
@@ -28,7 +28,6 @@ class ExecuteHadoopProvider(Provider):
     kinit__path_local = self.resource.kinit_path_local
     keytab = self.resource.keytab
     conf_dir = self.resource.conf_dir
-    bin_dir = self.resource.bin_dir
     command = self.resource.command
     principal = self.resource.principal
     
@@ -42,14 +41,11 @@ class ExecuteHadoopProvider(Provider):
           user = self.resource.user
         )
 
-      path = os.environ['PATH']
-      if bin_dir is not None:
-        path += os.pathsep + bin_dir
 
       Execute (format("hadoop --config {conf_dir} {command}"),
         user        = self.resource.user,
         tries       = self.resource.tries,
         try_sleep   = self.resource.try_sleep,
         logoutput   = self.resource.logoutput,
-        environment = {'PATH' : path}
+        path        = self.resource.bin_dir
       )

http://git-wip-us.apache.org/repos/asf/ambari/blob/d7ed4316/ambari-common/src/main/python/resource_management/libraries/providers/hdfs_directory.py
----------------------------------------------------------------------
diff --git 
a/ambari-common/src/main/python/resource_management/libraries/providers/hdfs_directory.py
 
b/ambari-common/src/main/python/resource_management/libraries/providers/hdfs_directory.py
index 33cc1be..d14968e 100644
--- 
a/ambari-common/src/main/python/resource_management/libraries/providers/hdfs_directory.py
+++ 
b/ambari-common/src/main/python/resource_management/libraries/providers/hdfs_directory.py
@@ -99,15 +99,11 @@ class HdfsDirectoryProvider(Provider):
     #for hadoop 2 we need to specify -p to create directories recursively
     parent_flag = '`rpm -q hadoop | grep -q "hadoop-1" || echo "-p"`'
 
-    path = os.environ['PATH']
-    if bin_dir is not None:
-      path += os.pathsep + bin_dir
-
     Execute(format('hadoop --config {hdp_conf_dir} fs -mkdir {parent_flag} 
{dir_list_str} && {chmod_cmd} && {chown_cmd}',
                    chmod_cmd=' && '.join(chmod_commands),
                    chown_cmd=' && '.join(chown_commands)),
             user=hdp_hdfs_user,
-            environment = {'PATH' : path},
+            path=bin_dir,
             not_if=format("su - {hdp_hdfs_user} -c 'export 
PATH=$PATH:{bin_dir} ; "
                           "hadoop --config {hdp_conf_dir} fs -ls 
{dir_list_str}'")
     )

http://git-wip-us.apache.org/repos/asf/ambari/blob/d7ed4316/ambari-common/src/main/python/resource_management/libraries/resources/copy_from_local.py
----------------------------------------------------------------------
diff --git 
a/ambari-common/src/main/python/resource_management/libraries/resources/copy_from_local.py
 
b/ambari-common/src/main/python/resource_management/libraries/resources/copy_from_local.py
index eaaeab5..54d003e 100644
--- 
a/ambari-common/src/main/python/resource_management/libraries/resources/copy_from_local.py
+++ 
b/ambari-common/src/main/python/resource_management/libraries/resources/copy_from_local.py
@@ -35,5 +35,6 @@ class CopyFromLocal(Resource):
   kinnit_if_needed = ResourceArgument(default='')
   hadoop_conf_dir = ResourceArgument(default='/etc/hadoop/conf')
   hdfs_user = ResourceArgument(default='hdfs')
+  hadoop_bin_dir = ResourceArgument(default='/usr/bin')
 
   actions = Resource.actions + ["run"]

http://git-wip-us.apache.org/repos/asf/ambari/blob/d7ed4316/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py
index 389d6ab..7b94817 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py
@@ -27,16 +27,15 @@ rpm_version = 
default("/configurations/hadoop-env/rpm_version", None)
 
 #hadoop params
 if rpm_version is not None:
-  hadoop_conf_dir = format("/usr/hdp/{rpm_version}/etc/hadoop/conf")
-  hadoop_conf_empty_dir = 
format("/usr/hdp/{rpm_version}/etc/hadoop/conf.empty")
-  mapreduce_libs_path = format("/usr/hdp/{rpm_version}/hadoop-mapreduce/*")
-  hadoop_libexec_dir = format("/usr/hdp/{rpm_version}/hadoop/libexec")
+  mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce/*"
+  hadoop_libexec_dir = "/usr/hdp/current/hadoop/libexec"
 else:
-  hadoop_conf_dir = "/etc/hadoop/conf"
-  hadoop_conf_empty_dir = "/etc/hadoop/conf.empty"
   mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
   hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
 
+hadoop_conf_dir = "/etc/hadoop/conf"
+hadoop_conf_empty_dir = "/etc/hadoop/conf.empty"
+versioned_hdp_root = '/usr/hdp/current'
 #security params
 security_enabled = config['configurations']['cluster-env']['security_enabled']
 #java params

http://git-wip-us.apache.org/repos/asf/ambari/blob/d7ed4316/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/shared_initialization.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/shared_initialization.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/shared_initialization.py
index 1ba0ae7..3aba886 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/shared_initialization.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/shared_initialization.py
@@ -39,6 +39,10 @@ def setup_hadoop_env():
          owner=tc_owner,
          content=InlineTemplate(params.hadoop_env_sh_template)
     )
+    if params.rpm_version is not None:
+      Execute(format('ln -s /usr/hdp/{rpm_version}* {versioned_hdp_root}'),
+              not_if=format('ls {versioned_hdp_root}')
+      )
 
 def setup_config():
   import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/d7ed4316/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
index 8fb2d90..d4ff0d3 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
@@ -28,20 +28,19 @@ rpm_version = 
default("/configurations/hadoop-env/rpm_version", None)
 
 #hadoop params
 if rpm_version is not None:
-  hadoop_conf_dir = format("/usr/hdp/{rpm_version}/etc/hadoop/conf")
-  mapreduce_libs_path = format("/usr/hdp/{rpm_version}/hadoop-mapreduce/*")
-  hadoop_libexec_dir = format("/usr/hdp/{rpm_version}/hadoop/libexec")
-  hadoop_lib_home = format("/usr/hdp/{rpm_version}/hadoop/lib")
-  hadoop_bin = format("/usr/hdp/{rpm_version}/hadoop/sbin")
-  hadoop_home = format('/usr/hdp/{rpm_version}/hadoop')
+  mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce/*"
+  hadoop_libexec_dir = "/usr/hdp/current/hadoop/libexec"
+  hadoop_lib_home = "/usr/hdp/current/hadoop/lib"
+  hadoop_bin = "/usr/hdp/current/hadoop/sbin"
+  hadoop_home = '/usr/hdp/current/hadoop'
 else:
-  hadoop_conf_dir = "/etc/hadoop/conf"
   mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
   hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
   hadoop_lib_home = "/usr/lib/hadoop/lib"
   hadoop_bin = "/usr/lib/hadoop/sbin"
   hadoop_home = '/usr'
 
+hadoop_conf_dir = "/etc/hadoop/conf"
 #security params
 security_enabled = config['configurations']['cluster-env']['security_enabled']
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/d7ed4316/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/package/scripts/flume.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/package/scripts/flume.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/package/scripts/flume.py
index 1404d27..45d0c8f 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/package/scripts/flume.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/package/scripts/flume.py
@@ -30,7 +30,7 @@ def flume(action = None):
     for n in find_expected_agent_names():
       os.unlink(os.path.join(params.flume_conf_dir, n, 'ambari-meta.json'))
 
-    Directory(params.flume_conf_dir)
+    Directory(params.flume_conf_dir, recursive=True)
     Directory(params.flume_log_dir, owner=params.flume_user)
 
     flume_agents = {}

http://git-wip-us.apache.org/repos/asf/ambari/blob/d7ed4316/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/package/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/package/scripts/params.py
index c1f8804..10389cd 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/package/scripts/params.py
@@ -31,13 +31,11 @@ rpm_version = 
default("/configurations/hadoop-env/rpm_version", None)
 
 #hadoop params
 if rpm_version is not None:
-  flume_conf_dir = format('/usr/hdp/{rpm_version}/etc/flume/conf')
-  flume_bin = format('/usr/hdp/{rpm_version}/flume/bin/flume-ng')
-
+  flume_bin = '/usr/hdp/current/flume/bin/flume-ng'
 else:
-  flume_conf_dir = '/etc/flume/conf'
   flume_bin = '/usr/bin/flume-ng'
 
+flume_conf_dir = '/etc/flume/conf'
 java_home = config['hostLevelParams']['java_home']
 flume_log_dir = '/var/log/flume'
 flume_run_dir = '/var/run/flume'

http://git-wip-us.apache.org/repos/asf/ambari/blob/d7ed4316/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/params.py
index d07ebd1..c31caf2 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/params.py
@@ -36,22 +36,20 @@ if rpm_version is not None:
 
 #hadoop params
 if rpm_version is not None:
-  hadoop_conf_dir = format("/usr/hdp/{rpm_version}/etc/hadoop/conf")
-  hadoop_bin_dir = format("/usr/hdp/{rpm_version}/hadoop/bin")
-  hbase_conf_dir = format('/usr/hdp/{rpm_version}/etc/hbase/conf')
-  daemon_script = format('/usr/hdp/{rpm_version}/hbase/bin/hbase-daemon.sh')
-  region_mover = format('/usr/hdp/{rpm_version}/hbase/bin/region_mover.rb')
-  region_drainer = 
format('/usr/hdp/{rpm_version}hbase/bin/draining_servers.rb')
-  hbase_cmd = format('/usr/hdp/{rpm_version}/hbase/bin/hbase')
+  hadoop_bin_dir = format("/usr/hdp/current/hadoop/bin")
+  daemon_script = format('/usr/hdp/current/hbase/bin/hbase-daemon.sh')
+  region_mover = format('/usr/hdp/current/hbase/bin/region_mover.rb')
+  region_drainer = format('/usr/hdp/currenthbase/bin/draining_servers.rb')
+  hbase_cmd = format('/usr/hdp/current/hbase/bin/hbase')
 else:
-  hadoop_conf_dir = "/etc/hadoop/conf"
   hadoop_bin_dir = "/usr/bin"
-  hbase_conf_dir = "/etc/hbase/conf"
   daemon_script = "/usr/lib/hbase/bin/hbase-daemon.sh"
   region_mover = "/usr/lib/hbase/bin/region_mover.rb"
   region_drainer = "/usr/lib/hbase/bin/draining_servers.rb"
   hbase_cmd = "/usr/lib/hbase/bin/hbase"
 
+hadoop_conf_dir = "/etc/hadoop/conf"
+hbase_conf_dir = "/etc/hbase/conf"
 hbase_excluded_hosts = config['commandParams']['excluded_hosts']
 hbase_drain_only = config['commandParams']['mark_draining_only']
 hbase_included_hosts = config['commandParams']['included_hosts']

http://git-wip-us.apache.org/repos/asf/ambari/blob/d7ed4316/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
index 60198c7..c56fac4 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
@@ -29,21 +29,19 @@ rpm_version = 
default("/configurations/hadoop-env/rpm_version", None)
 
 #hadoop params
 if rpm_version is not None:
-  hadoop_conf_dir = format("/usr/hdp/{rpm_version}/etc/hadoop/conf")
-  hadoop_conf_empty_dir = 
format("/usr/hdp/{rpm_version}/etc/hadoop/conf.empty")
-  mapreduce_libs_path = format("/usr/hdp/{rpm_version}/hadoop-mapreduce/*")
-  hadoop_libexec_dir = format("/usr/hdp/{rpm_version}/hadoop/libexec")
-  hadoop_bin = format("/usr/hdp/{rpm_version}/hadoop/sbin")
-  hadoop_bin_dir = format("/usr/hdp/{rpm_version}/hadoop/bin")
-  limits_conf_dir = format("/usr/hdp/{rpm_version}/etc/security/limits.d")
+  mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce/*"
+  hadoop_libexec_dir = "/usr/hdp/current/hadoop/libexec"
+  hadoop_bin = "/usr/hdp/current/hadoop/sbin"
+  hadoop_bin_dir = "/usr/hdp/current/hadoop/bin"
 else:
-  hadoop_conf_dir = "/etc/hadoop/conf"
-  hadoop_conf_empty_dir = "/etc/hadoop/conf.empty"
   mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
   hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
   hadoop_bin = "/usr/lib/hadoop/sbin"
   hadoop_bin_dir = "/usr/bin"
-  limits_conf_dir = "/etc/security/limits.d"
+
+hadoop_conf_dir = "/etc/hadoop/conf"
+hadoop_conf_empty_dir = "/etc/hadoop/conf.empty"
+limits_conf_dir = "/etc/security/limits.d"
 
 execute_path = os.environ['PATH'] + os.pathsep + hadoop_bin_dir
 ulimit_cmd = "ulimit -c unlimited; "

http://git-wip-us.apache.org/repos/asf/ambari/blob/d7ed4316/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/service_check.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/service_check.py
index ef9a361..f30a2c5 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/service_check.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/service_check.py
@@ -33,7 +33,7 @@ class HdfsServiceCheck(Script):
 
     create_dir_cmd = format("fs -mkdir {dir}")
     chmod_command = format("fs -chmod 777 {dir}")
-    test_dir_exists = format("su - {smoke_user} -c 'hadoop --config 
{hadoop_conf_dir} fs -test -e {dir}'")
+    test_dir_exists = format("su - {smoke_user} -c '{hadoop_bin_dir}/hadoop 
--config {hadoop_conf_dir} fs -test -e {dir}'")
     cleanup_cmd = format("fs -rm {tmp_file}")
     #cleanup put below to handle retries; if retrying there wil be a stale file
     #that needs cleanup; exit code is fn of second command

http://git-wip-us.apache.org/repos/asf/ambari/blob/d7ed4316/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hcat.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hcat.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hcat.py
index 583ca42..31c1673 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hcat.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hcat.py
@@ -26,12 +26,14 @@ def hcat():
   import params
 
   Directory(params.hive_conf_dir,
+            recursive=True,
             owner=params.hcat_user,
             group=params.user_group,
   )
 
 
   Directory(params.hcat_conf_dir,
+            recursive=True,
             owner=params.hcat_user,
             group=params.user_group,
   )

http://git-wip-us.apache.org/repos/asf/ambari/blob/d7ed4316/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hcat_service_check.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hcat_service_check.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hcat_service_check.py
index ede7e27..081352a 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hcat_service_check.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hcat_service_check.py
@@ -44,8 +44,7 @@ def hcat_service_check():
             tries=3,
             user=params.smokeuser,
             try_sleep=5,
-            path=['/usr/sbin', '/usr/local/nin', '/bin', '/usr/bin'],
-            environment = {'PATH' : params.execute_path},
+            path=['/usr/sbin', '/usr/local/nin', '/bin', '/usr/bin', 
params.execute_path],
             logoutput=True)
 
     if params.security_enabled:
@@ -57,7 +56,7 @@ def hcat_service_check():
                     kinit_path_local=params.kinit_path_local,
                     keytab=params.hdfs_user_keytab,
                     principal=params.hdfs_principal_name,
-                    bin_dir=params.hive_bin
+                    bin_dir=params.execute_path
       )
     else:
       ExecuteHadoop(test_cmd,
@@ -67,7 +66,7 @@ def hcat_service_check():
                     security_enabled=params.security_enabled,
                     kinit_path_local=params.kinit_path_local,
                     keytab=params.hdfs_user_keytab,
-                    bin_dir=params.hive_bin
+                    bin_dir=params.execute_path
       )
 
     cleanup_cmd = format("{kinit_cmd} {tmp_dir}/hcatSmoke.sh hcatsmoke{unique} 
cleanup")
@@ -75,8 +74,7 @@ def hcat_service_check():
     Execute(cleanup_cmd,
             tries=3,
             user=params.smokeuser,
-            environment = {'PATH' : params.execute_path },
             try_sleep=5,
-            path=['/usr/sbin', '/usr/local/nin', '/bin', '/usr/bin'],
+            path=['/usr/sbin', '/usr/local/nin', '/bin', '/usr/bin', 
params.execute_path],
             logoutput=True
     )

http://git-wip-us.apache.org/repos/asf/ambari/blob/d7ed4316/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/install_jars.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/install_jars.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/install_jars.py
index f09794c..67d97f5 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/install_jars.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/install_jars.py
@@ -72,6 +72,7 @@ def install_tez_jars():
                       dest_file=dest_file,
                       kinnit_if_needed=kinit_if_needed,
                       hdfs_user=params.hdfs_user,
+                      hadoop_bin_dir=params.hadoop_bin_dir,
                       hadoop_conf_dir=params.hadoop_conf_dir
         )
 
@@ -82,6 +83,7 @@ def install_tez_jars():
                     dest_dir=lib_dir_path,
                     kinnit_if_needed=kinit_if_needed,
                     hdfs_user=params.hdfs_user,
+                    hadoop_bin_dir=params.hadoop_bin_dir,
                     hadoop_conf_dir=params.hadoop_conf_dir
       )
     pass

http://git-wip-us.apache.org/repos/asf/ambari/blob/d7ed4316/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/params.py
index dd6e3c3..cda202d 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/params.py
@@ -33,56 +33,52 @@ hdp_stack_version = 
config['hostLevelParams']['stack_version']
 
 #hadoop params
 if rpm_version is not None:
-  hadoop_conf_dir = format("/usr/hdp/{rpm_version}/etc/hadoop/conf")
-  hadoop_bin_dir = format("/usr/hdp/{rpm_version}/hadoop/bin")
-  hadoop_home = format('/usr/hdp/{rpm_version}/hadoop')
-  hadoop_streeming_jars = 
format("/usr/hdp/{rpm_version}/hadoop-mapreduce/hadoop-streaming-*.jar")
-  hive_conf_dir = format('/usr/hdp/{rpm_version}/etc/hive/conf')
-  hive_client_conf_dir = format('/usr/hdp/{rpm_version}/etc/hive/conf')
-  hive_server_conf_dir = format('/usr/hdp/{rpm_version}/etc/hive/conf.server')
-  hive_bin = format('/usr/hdp/{rpm_version}/hive/bin')
-  hive_lib = format('/usr/hdp/{rpm_version}/hive/lib')
-  tez_local_api_jars = format('/usr/hdp/{rpm_version}/tez/tez*.jar')
-  tez_local_lib_jars = format('/usr/hdp/{rpm_version}/tez/lib/*.jar')
-
-  if str(hdp_stack_version).startswith('2.0'):
-    hcat_conf_dir = format('/usr/hdp/{rpm_version}/etc/hcatalog/conf')
-    config_dir = format('/usr/hdp/{rpm_version}/etc/hcatalog/conf')
-    hcat_lib = format('/usr/hdp/{rpm_version}/hive/hcatalog/share/hcatalog')
-    webhcat_bin_dir = format('/usr/hdp/{rpm_version}/hive/hcatalog/sbin')
-  # for newer versions
-  else:
-    hcat_conf_dir = format('/usr/hdp/{rpm_version}/etc/hive-hcatalog/conf')
-    config_dir = format('/usr/hdp/{rpm_version}/etc/hive-webhcat/conf')
-    hcat_lib = 
format('/usr/hdp/{rpm_version}/hive/hive-hcatalog/share/hcatalog')
-    webhcat_bin_dir = format('/usr/hdp/{rpm_version}/hive/hive-hcatalog/sbin')
+  hadoop_bin_dir = "/usr/hdp/current/hadoop/bin"
+  hadoop_home = '/usr/hdp/current/hadoop'
+  hadoop_streeming_jars = 
"/usr/hdp/current/hadoop-mapreduce/hadoop-streaming-*.jar"
+  hive_bin = '/usr/hdp/current/hive/bin'
+  hive_lib = '/usr/hdp/current/hive/lib'
+  tez_local_api_jars = '/usr/hdp/current/tez/tez*.jar'
+  tez_local_lib_jars = '/usr/hdp/current/tez/lib/*.jar'
+  tez_tar_file = "/usr/hdp/current/tez/lib/tez*.tar.gz"
+
+  hcat_lib = '/usr/hdp/current/hive/hive-hcatalog/share/hcatalog'
+  webhcat_bin_dir = '/usr/hdp/current/hive/hive-hcatalog/sbin'
 
 else:
-  hadoop_conf_dir = "/etc/hadoop/conf"
   hadoop_bin_dir = "/usr/bin"
   hadoop_home = '/usr'
   hadoop_streeming_jars = '/usr/lib/hadoop-mapreduce/hadoop-streaming-*.jar'
-  hive_conf_dir = "/etc/hive/conf"
   hive_bin = '/usr/lib/hive/bin'
   hive_lib = '/usr/lib/hive/lib/'
-  hive_client_conf_dir = "/etc/hive/conf"
-  hive_server_conf_dir = '/etc/hive/conf.server'
   tez_local_api_jars = '/usr/lib/tez/tez*.jar'
   tez_local_lib_jars = '/usr/lib/tez/lib/*.jar'
+  tez_tar_file = "/usr/lib/tez/tez*.tar.gz"
 
   if str(hdp_stack_version).startswith('2.0'):
-    hcat_conf_dir = '/etc/hcatalog/conf'
-    config_dir = '/etc/hcatalog/conf'
     hcat_lib = '/usr/lib/hcatalog/share/hcatalog'
     webhcat_bin_dir = '/usr/lib/hcatalog/sbin'
   # for newer versions
   else:
-    hcat_conf_dir = '/etc/hive-hcatalog/conf'
-    config_dir = '/etc/hive-webhcat/conf'
     hcat_lib = '/usr/lib/hive-hcatalog/share/hcatalog'
     webhcat_bin_dir = '/usr/lib/hive-hcatalog/sbin'
 
-execute_path = os.environ['PATH'] + os.pathsep + hive_bin
+hadoop_conf_dir = "/etc/hadoop/conf"
+hive_conf_dir = "/etc/hive/conf"
+hive_client_conf_dir = "/etc/hive/conf"
+hive_server_conf_dir = '/etc/hive/conf.server'
+
+
+
+if str(hdp_stack_version).startswith('2.0'):
+  hcat_conf_dir = '/etc/hcatalog/conf'
+  config_dir = '/etc/hcatalog/conf'
+# for newer versions
+else:
+  hcat_conf_dir = '/etc/hive-hcatalog/conf'
+  config_dir = '/etc/hive-webhcat/conf'
+
+execute_path = os.environ['PATH'] + os.pathsep + hive_bin + os.pathsep + 
hadoop_bin_dir
 hive_metastore_user_name = 
config['configurations']['hive-site']['javax.jdo.option.ConnectionUserName']
 hive_jdbc_connection_url = 
config['configurations']['hive-site']['javax.jdo.option.ConnectionURL']
 
@@ -224,8 +220,6 @@ if System.get_instance().os_family == "ubuntu":
 else:
   mysql_configname = '/etc/my.cnf'
 
-tez_tar_file = "/usr/lib/tez/tez*.tar.gz"
-
 # Hive security
 hive_authorization_enabled = 
config['configurations']['hive-site']['hive.security.authorization.enabled']
 
@@ -242,14 +236,6 @@ else:
 ########### WebHCat related params #####################
 ########################################################
 
-if str(config['hostLevelParams']['stack_version']).startswith('2.0'):
-  config_dir = '/etc/hcatalog/conf'
-  webhcat_bin_dir = '/usr/lib/hcatalog/sbin'
-# for newer versions
-else:
-  config_dir = '/etc/hive-webhcat/conf'
-  webhcat_bin_dir = '/usr/lib/hive-hcatalog/sbin'
-
 webhcat_env_sh_template = config['configurations']['webhcat-env']['content']
 templeton_log_dir = config['configurations']['hive-env']['hcat_log_dir']
 templeton_pid_dir = status_params.hcat_pid_dir

http://git-wip-us.apache.org/repos/asf/ambari/blob/d7ed4316/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/webhcat.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/webhcat.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/webhcat.py
index c56ae5f..287049b 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/webhcat.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/webhcat.py
@@ -56,6 +56,7 @@ def webhcat():
             recursive=True)
 
   Directory(params.config_dir,
+            recursive=True,
             owner=params.webhcat_user,
             group=params.user_group)
 
@@ -90,6 +91,7 @@ def webhcat():
                 dest_dir=params.webhcat_apps_dir,
                 kinnit_if_needed=kinit_if_needed,
                 hdfs_user=params.hdfs_user,
+                hadoop_bin_dir=params.hadoop_bin_dir,
                 hadoop_conf_dir=params.hadoop_conf_dir
   )
 
@@ -99,6 +101,7 @@ def webhcat():
                 dest_dir=params.webhcat_apps_dir,
                 kinnit_if_needed=kinit_if_needed,
                 hdfs_user=params.hdfs_user,
+                hadoop_bin_dir=params.hadoop_bin_dir,
                 hadoop_conf_dir=params.hadoop_conf_dir
   )
 
@@ -108,5 +111,6 @@ def webhcat():
                 dest_dir=params.webhcat_apps_dir,
                 kinnit_if_needed=kinit_if_needed,
                 hdfs_user=params.hdfs_user,
+                hadoop_bin_dir=params.hadoop_bin_dir,
                 hadoop_conf_dir=params.hadoop_conf_dir
   )

http://git-wip-us.apache.org/repos/asf/ambari/blob/d7ed4316/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/PIG/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/PIG/package/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/PIG/package/scripts/params.py
index d1f8b75..5d7ae16 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/PIG/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/PIG/package/scripts/params.py
@@ -30,18 +30,16 @@ rpm_version = 
default("/configurations/hadoop-env/rpm_version", None)
 
 #hadoop params
 if rpm_version is not None:
-  hadoop_conf_dir = format("/usr/hdp/{rpm_version}/etc/hadoop/conf")
-  hadoop_bin_dir = format("/usr/hdp/{rpm_version}/hadoop/bin")
-  hadoop_home = format('/usr/hdp/{rpm_version}/hadoop')
-  pig_conf_dir = format('/usr/hdp/{rpm_version}/etc/pig/conf')
-  pig_bin_dir = format('/usr/hdp/{rpm_version}/pig/bin')
+  hadoop_bin_dir = "/usr/hdp/current/hadoop/bin"
+  hadoop_home = '/usr/hdp/current/hadoop'
+  pig_bin_dir = '/usr/hdp/current/pig/bin'
 else:
-  hadoop_conf_dir = "/etc/hadoop/conf"
   hadoop_bin_dir = "/usr/bin"
   hadoop_home = '/usr'
-  pig_conf_dir = "/etc/pig/conf"
   pig_bin_dir = ""
 
+hadoop_conf_dir = "/etc/hadoop/conf"
+pig_conf_dir = "/etc/pig/conf"
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 hdfs_principal_name = 
config['configurations']['hadoop-env']['hdfs_principal_name']
 smokeuser = config['configurations']['cluster-env']['smokeuser']

http://git-wip-us.apache.org/repos/asf/ambari/blob/d7ed4316/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/PIG/package/scripts/pig.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/PIG/package/scripts/pig.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/PIG/package/scripts/pig.py
index 8326262..afdba8d 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/PIG/package/scripts/pig.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/PIG/package/scripts/pig.py
@@ -26,6 +26,7 @@ def pig():
   import params
 
   Directory( params.pig_conf_dir,
+    recursive = True,
     owner = params.hdfs_user,
     group = params.user_group
   )

http://git-wip-us.apache.org/repos/asf/ambari/blob/d7ed4316/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/SQOOP/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/SQOOP/package/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/SQOOP/package/scripts/params.py
index 5784f9d..e97bb9f 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/SQOOP/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/SQOOP/package/scripts/params.py
@@ -26,18 +26,17 @@ rpm_version = 
default("/configurations/hadoop-env/rpm_version", None)
 
 #hadoop params
 if rpm_version is not None:
-  zoo_conf_dir = format('/usr/hdp/{rpm_version}/etc/zookeeper')
-  sqoop_conf_dir = format('/usr/hdp/{rpm_version}/sqoop/conf')
-  sqoop_lib = format('/usr/hdp/{rpm_version}/sqoop/lib')
-  hbase_home = format('/usr/hdp/{rpm_version}/hbase')
-  hive_home = format('/usr/hdp/{rpm_version}/hive')
+  sqoop_conf_dir = '/usr/hdp/current/etc/sqoop/conf'
+  sqoop_lib = '/usr/hdp/current/sqoop/lib'
+  hbase_home = '/usr/hdp/current/hbase'
+  hive_home = '/usr/hdp/current/hive'
 else:
-  zoo_conf_dir = "/etc/zookeeper"
   sqoop_conf_dir = "/usr/lib/sqoop/conf"
   sqoop_lib = "/usr/lib/sqoop/lib"
   hbase_home = "/usr"
   hive_home = "/usr"
 
+zoo_conf_dir = "/etc/zookeeper"
 security_enabled = config['configurations']['cluster-env']['security_enabled']
 smokeuser = config['configurations']['cluster-env']['smokeuser']
 user_group = config['configurations']['cluster-env']['user_group']

http://git-wip-us.apache.org/repos/asf/ambari/blob/d7ed4316/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/mapred_service_check.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/mapred_service_check.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/mapred_service_check.py
index 4685716..a9f4367 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/mapred_service_check.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/mapred_service_check.py
@@ -46,6 +46,7 @@ class MapReduce2ServiceCheck(Script):
                   tries=1,
                   try_sleep=5,
                   user=params.smokeuser,
+                  bin_dir=params.execute_path,
                   conf_dir=params.hadoop_conf_dir
     )
 
@@ -53,6 +54,7 @@ class MapReduce2ServiceCheck(Script):
                   tries=1,
                   try_sleep=5,
                   user=params.smokeuser,
+                  bin_dir=params.execute_path,
                   conf_dir=params.hadoop_conf_dir
     )
 
@@ -60,12 +62,14 @@ class MapReduce2ServiceCheck(Script):
                   tries=1,
                   try_sleep=5,
                   user=params.smokeuser,
+                  bin_dir=params.execute_path,
                   conf_dir=params.hadoop_conf_dir,
                   logoutput=True
     )
 
     ExecuteHadoop(test_cmd,
                   user=params.smokeuser,
+                  bin_dir=params.execute_path,
                   conf_dir=params.hadoop_conf_dir
     )
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/d7ed4316/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/params.py
index f8d670e..86eee78 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/params.py
@@ -32,29 +32,27 @@ rpm_version = 
default("/configurations/hadoop-env/rpm_version", None)
 
 #hadoop params
 if rpm_version is not None:
-  hadoop_conf_dir = format("/usr/hdp/{rpm_version}/etc/hadoop/conf")
-  hadoop_libexec_dir = format("/usr/hdp/{rpm_version}/hadoop/libexec")
-  hadoop_bin = format("/usr/hdp/{rpm_version}/hadoop/sbin")
-  hadoop_bin_dir = format("/usr/hdp/{rpm_version}/hadoop/bin")
-  limits_conf_dir = format("/usr/hdp/{rpm_version}/etc/security/limits.d")
-  hadoop_yarn_home = format('/usr/hdp/{rpm_version}/hadoop-yarn')
-  hadoop_mapred2_jar_location = 
format('/usr/hdp/{rpm_version}/hadoop-mapreduce')
-  mapred_bin = format('/usr/hdp/{rpm_version}/hadoop-mapreduce/sbin')
-  yarn_bin = format('/usr/hdp/{rpm_version}/hadoop-yarn/sbin')
-  yarn_container_bin = format('/usr/hdp/{rpm_version}/hadoop-yarn/bin')
+  hadoop_libexec_dir = "/usr/hdp/current/hadoop/libexec"
+  hadoop_bin = "/usr/hdp/current/hadoop/sbin"
+  hadoop_bin_dir = "/usr/hdp/current/hadoop/bin"
+  hadoop_yarn_home = '/usr/hdp/current/hadoop-yarn'
+  hadoop_mapred2_jar_location = '/usr/hdp/current/hadoop-mapreduce'
+  mapred_bin = '/usr/hdp/current/hadoop-mapreduce/sbin'
+  yarn_bin = '/usr/hdp/current/hadoop-yarn/sbin'
+  yarn_container_bin = '/usr/hdp/current/hadoop-yarn/bin'
 else:
-  hadoop_conf_dir = "/etc/hadoop/conf"
   hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
   hadoop_bin = "/usr/lib/hadoop/sbin"
   hadoop_bin_dir = "/usr/bin"
-  limits_conf_dir = "/etc/security/limits.d"
   hadoop_yarn_home = '/usr/lib/hadoop-yarn'
   hadoop_mapred2_jar_location = "/usr/lib/hadoop-mapreduce"
   mapred_bin = "/usr/lib/hadoop-mapreduce/sbin"
   yarn_bin = "/usr/lib/hadoop-yarn/sbin"
   yarn_container_bin = "/usr/lib/hadoop-yarn/bin"
 
-execute_path = os.environ['PATH'] + os.pathsep + hadoop_bin_dir
+hadoop_conf_dir = "/etc/hadoop/conf"
+limits_conf_dir = "/etc/security/limits.d"
+execute_path = os.environ['PATH'] + os.pathsep + hadoop_bin_dir + os.pathsep + 
yarn_container_bin
 
 ulimit_cmd = "ulimit -c unlimited;"
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/d7ed4316/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/service_check.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/service_check.py
index 7e535a5..7189664 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/service_check.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/service_check.py
@@ -60,7 +60,7 @@ class ServiceCheck(Script):
     )
 
     Execute(run_yarn_check_cmd,
-            environment= {'PATH' : params.execute_path },
+            path=params.execute_path,
             user=params.smokeuser
     )
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/d7ed4316/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/ZOOKEEPER/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/ZOOKEEPER/package/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/ZOOKEEPER/package/scripts/params.py
index 6016b99..b691c3f 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/ZOOKEEPER/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/ZOOKEEPER/package/scripts/params.py
@@ -31,14 +31,13 @@ rpm_version = 
default("/configurations/hadoop-env/rpm_version", None)
 
 #hadoop params
 if rpm_version is not None:
-  config_dir = format('/usr/hdp/{rpm_version}/etc/zookeeper/conf')
-  zk_bin = format('/usr/hdp/{rpm_version}/zookeeper/bin')
-  smoke_script = format('/usr/hdp/{rpm_version}/zookeeper/bin/zkCli.sh')
+  zk_bin = '/usr/hdp/current/zookeeper/bin'
+  smoke_script = '/usr/hdp/current/zookeeper/bin/zkCli.sh'
 else:
-  config_dir = "/etc/zookeeper/conf"
   zk_bin = '/usr/lib/zookeeper/bin'
   smoke_script = "/usr/lib/zookeeper/bin/zkCli.sh"
 
+config_dir = "/etc/zookeeper/conf"
 zk_user =  config['configurations']['zookeeper-env']['zk_user']
 hostname = config['hostname']
 user_group = config['configurations']['cluster-env']['user_group']

http://git-wip-us.apache.org/repos/asf/ambari/blob/d7ed4316/ambari-server/src/main/resources/stacks/HDP/2.1/services/FALCON/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.1/services/FALCON/package/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.1/services/FALCON/package/scripts/params.py
index 734faf2..30548f1 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.1/services/FALCON/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.1/services/FALCON/package/scripts/params.py
@@ -28,18 +28,16 @@ rpm_version = 
default("/configurations/hadoop-env/rpm_version", None)
 
 #hadoop params
 if rpm_version is not None:
-  hadoop_conf_dir = format("/usr/hdp/{rpm_version}/etc/hadoop/conf")
-  hadoop_bin_dir = format("/usr/hdp/{rpm_version}/hadoop/bin")
-  falcon_webapp_dir = format("/usr/hdp/{rpm_version}/falcon/webapp")
-  falcon_home = format("/usr/hdp/{rpm_version}/falcon")
-  falcon_conf_dir = format("/usr/hdp/{rpm_version}/falcon/conf")
+  hadoop_bin_dir = "/usr/hdp/current/hadoop/bin"
+  falcon_webapp_dir = "/usr/hdp/current/falcon/webapp"
+  falcon_home = "/usr/hdp/current/falcon"
 else:
-  hadoop_conf_dir = "/etc/hadoop/conf"
   hadoop_bin_dir = "/usr/bin"
   falcon_webapp_dir = '/var/lib/falcon/webapp'
   falcon_home = '/usr/lib/falcon'
-  falcon_conf_dir = '/etc/falcon/conf'
 
+hadoop_conf_dir = "/etc/hadoop/conf"
+falcon_conf_dir = '/etc/falcon/conf'
 oozie_user = config['configurations']['oozie-env']['oozie_user']
 falcon_user = config['configurations']['falcon-env']['falcon_user']
 smoke_user =  config['configurations']['cluster-env']['smokeuser']

http://git-wip-us.apache.org/repos/asf/ambari/blob/d7ed4316/ambari-server/src/main/resources/stacks/HDP/2.1/services/STORM/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.1/services/STORM/package/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.1/services/STORM/package/scripts/params.py
index 7115de4..84e2174 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.1/services/STORM/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.1/services/STORM/package/scripts/params.py
@@ -24,6 +24,16 @@ import status_params
 # server configurations
 config = Script.get_config()
 
+#RPM versioning support
+rpm_version = default("/configurations/hadoop-env/rpm_version", None)
+
+#hadoop params
+if rpm_version is not None:
+  rest_lib_dir = format('/usr/hdp/current/storm/contrib/storm-rest')
+
+else:
+  rest_lib_dir = "/usr/lib/storm/contrib/storm-rest"
+
 storm_user = config['configurations']['storm-env']['storm_user']
 log_dir = config['configurations']['storm-env']['storm_log_dir']
 pid_dir = status_params.pid_dir
@@ -37,7 +47,6 @@ nimbus_host = 
config['configurations']['storm-site']['nimbus.host']
 rest_api_port = "8745"
 rest_api_admin_port = "8746"
 rest_api_conf_file = format("{conf_dir}/config.yaml")
-rest_lib_dir = 
default("/configurations/storm-env/rest_lib_dir","/usr/lib/storm/contrib/storm-rest")
 storm_env_sh_template = config['configurations']['storm-env']['content']
 
 if 'ganglia_server_host' in config['clusterHostInfo'] and \

http://git-wip-us.apache.org/repos/asf/ambari/blob/d7ed4316/ambari-server/src/main/resources/stacks/HDP/2.2/services/FALCON/metainfo.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.2/services/FALCON/metainfo.xml 
b/ambari-server/src/main/resources/stacks/HDP/2.2/services/FALCON/metainfo.xml
index 2164b98..197c9dc 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.2/services/FALCON/metainfo.xml
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.2/services/FALCON/metainfo.xml
@@ -27,7 +27,7 @@
           <osFamily>any</osFamily>
           <packages>
             <package>
-              <name>falcon_2_9_9_9_117</name>
+              <name>falcon_2_9_9_9_*</name>
             </package>
           </packages>
         </osSpecific>

http://git-wip-us.apache.org/repos/asf/ambari/blob/d7ed4316/ambari-server/src/main/resources/stacks/HDP/2.2/services/FLUME/metainfo.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.2/services/FLUME/metainfo.xml 
b/ambari-server/src/main/resources/stacks/HDP/2.2/services/FLUME/metainfo.xml
index 9cc6909..808e2af 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.2/services/FLUME/metainfo.xml
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.2/services/FLUME/metainfo.xml
@@ -28,7 +28,7 @@
           <osFamily>any</osFamily>
           <packages>
             <package>
-              <name>flume_2_9_9_9_117</name>
+              <name>flume_2_9_9_9_*</name>
             </package>
           </packages>
         </osSpecific>

http://git-wip-us.apache.org/repos/asf/ambari/blob/d7ed4316/ambari-server/src/main/resources/stacks/HDP/2.2/services/HBASE/metainfo.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HBASE/metainfo.xml 
b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HBASE/metainfo.xml
index 76cfb3b..5c189f4 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HBASE/metainfo.xml
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HBASE/metainfo.xml
@@ -28,7 +28,7 @@
           <osFamily>any</osFamily>
           <packages>
             <package>
-              <name>hbase_2_9_9_9_117</name>
+              <name>hbase_2_9_9_9_*</name>
             </package>
           </packages>
         </osSpecific>

http://git-wip-us.apache.org/repos/asf/ambari/blob/d7ed4316/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hadoop-env.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hadoop-env.xml
 
b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hadoop-env.xml
index 460e946..8907098 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hadoop-env.xml
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hadoop-env.xml
@@ -23,7 +23,7 @@
 <configuration>
   <property>
     <name>rpm_version</name>
-    <value>2.9.9.9-117</value>
+    <value>2.9.9.9</value>
     <description>Hadoop RPM version</description>
   </property>
 </configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/d7ed4316/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hdfs-site.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hdfs-site.xml
 
b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hdfs-site.xml
deleted file mode 100644
index af0064c..0000000
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hdfs-site.xml
+++ /dev/null
@@ -1,34 +0,0 @@
-<?xml version="1.0"?>
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-
-<!--
-   Licensed to the Apache Software Foundation (ASF) under one or more
-   contributor license agreements.  See the NOTICE file distributed with
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with
-   the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
--->
-
-<!-- Put site-specific property overrides in this file. -->
-
-<configuration supports_final="true">
-
-  <property>
-    <name>dfs.hosts.exclude</name>
-    <value>/usr/hdp/2.9.9.9-117/etc/hadoop/conf/dfs.exclude</value>
-    <description>Names a file that contains a list of hosts that are
-      not permitted to connect to the namenode.  The full pathname of the
-      file must be specified.  If the value is empty, no hosts are
-      excluded.</description>
-  </property>
-
-</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/d7ed4316/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/metainfo.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/metainfo.xml 
b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/metainfo.xml
index 0a129b9..e680155 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/metainfo.xml
@@ -28,7 +28,7 @@
           <osFamily>any</osFamily>
           <packages>
             <package>
-              <name>hadoop_2_9_9_9_117</name>
+              <name>hadoop_2_9_9_9_*</name>
             </package>
             <package>
               <name>hadoop-lzo</name>
@@ -52,7 +52,7 @@
               <name>hadoop-lzo-native</name>
             </package>
             <package>
-              <name>hadoop_2_9_9_9_117-libhdfs</name>
+              <name>hadoop_2_9_9_9_*-libhdfs</name>
             </package>
             <package>
               <name>ambari-log4j</name>

http://git-wip-us.apache.org/repos/asf/ambari/blob/d7ed4316/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/configuration/webhcat-site.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/configuration/webhcat-site.xml
 
b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/configuration/webhcat-site.xml
index ce9f2c5..bdecb67 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/configuration/webhcat-site.xml
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/configuration/webhcat-site.xml
@@ -24,34 +24,28 @@ limitations under the License.
 <configuration supports_final="true">
 
   <property>
-    <name>templeton.hadoop.conf.dir</name>
-    <value>/usr/hdp/2.9.9.9-117/etc/hadoop/conf</value>
-    <description>The path to the Hadoop configuration.</description>
-  </property>
-
-  <property>
     <name>templeton.jar</name>
-    <value>/usr/hdp/2.9.9.9-117/hcatalog/share/webhcat/svr/webhcat.jar</value>
+    <value>/usr/hdp/current/hcatalog/share/webhcat/svr/webhcat.jar</value>
     <description>The path to the Templeton jar file.</description>
   </property>
 
   <property>
     <name>templeton.libjars</name>
-    <value>/usr/hdp/2.9.9.9-117/zookeeper/zookeeper.jar</value>
+    <value>/usr/hdp/current/zookeeper/zookeeper.jar</value>
     <description>Jars to add the the classpath.</description>
   </property>
 
 
   <property>
     <name>templeton.hadoop</name>
-    <value>/usr/hdp/2.9.9.9-117/hadoop/bin/hadoop</value>
+    <value>/usr/hdp/current/hadoop/bin/hadoop</value>
     <description>The path to the Hadoop executable.</description>
   </property>
 
 
   <property>
     <name>templeton.hcat</name>
-    <value>/usr/hdp/2.9.9.9-117/hive/bin/hcat</value>
+    <value>/usr/hdp/current/hive/bin/hcat</value>
     <description>The path to the hcatalog executable.</description>
   </property>
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/d7ed4316/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/metainfo.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/metainfo.xml 
b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/metainfo.xml
index 29835b1..90765d6 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/metainfo.xml
@@ -26,16 +26,16 @@
           <osFamily>any</osFamily>
           <packages>
             <package>
-              <name>hive_2_9_9_9_117</name>
+              <name>hive_2_9_9_9_*</name>
             </package>
             <package>
               <name>mysql-connector-java</name>
             </package>
             <package>
-              <name>hive_2_9_9_9_117-hcatalog</name>
+              <name>hive_2_9_9_9_*-hcatalog</name>
             </package>
             <package>
-              <name>hive_2_9_9_9_117-webhcat</name>
+              <name>hive_2_9_9_9_*-webhcat</name>
             </package>
             <package>
               <name>webhcat-tar-hive</name>

http://git-wip-us.apache.org/repos/asf/ambari/blob/d7ed4316/ambari-server/src/main/resources/stacks/HDP/2.2/services/OOZIE/configuration/oozie-site.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.2/services/OOZIE/configuration/oozie-site.xml
 
b/ambari-server/src/main/resources/stacks/HDP/2.2/services/OOZIE/configuration/oozie-site.xml
index b725341..dc56d8b 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.2/services/OOZIE/configuration/oozie-site.xml
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.2/services/OOZIE/configuration/oozie-site.xml
@@ -20,19 +20,6 @@
 <configuration supports_final="true">
 
   <property>
-    <name>oozie.service.HadoopAccessorService.hadoop.configurations</name>
-    <value>*=/usr/hdp/2.9.9.9-117/etc/hadoop/conf</value>
-    <description>
-      Comma separated AUTHORITY=HADOOP_CONF_DIR, where AUTHORITY is the 
HOST:PORT of
-      the Hadoop service (JobTracker, HDFS). The wildcard '*' configuration is
-      used when there is no exact match for an authority. The HADOOP_CONF_DIR 
contains
-      the relevant Hadoop *-site.xml files. If the path is relative is looked 
within
-      the Oozie configuration directory; though the path can be absolute (i.e. 
to point
-      to Hadoop client conf/ directories in the local filesystem.
-    </description>
-  </property>
-
-  <property>
     <name>oozie.service.coord.check.maximum.frequency</name>
     <value>false</value>
     <description>

http://git-wip-us.apache.org/repos/asf/ambari/blob/d7ed4316/ambari-server/src/main/resources/stacks/HDP/2.2/services/OOZIE/metainfo.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.2/services/OOZIE/metainfo.xml 
b/ambari-server/src/main/resources/stacks/HDP/2.2/services/OOZIE/metainfo.xml
index b888d9f..6a1ca49 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.2/services/OOZIE/metainfo.xml
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.2/services/OOZIE/metainfo.xml
@@ -26,13 +26,13 @@
           <osFamily>any</osFamily>
           <packages>
             <package>
-              <name>oozie_2_9_9_9_117</name>
+              <name>oozie_2_9_9_9_*</name>
             </package>
             <package>
-              <name>oozie_2_9_9_9_117-client</name>
+              <name>oozie_2_9_9_9_*-client</name>
             </package>
             <package>
-              <name>falcon_2_9_9_9_117</name>
+              <name>falcon_2_9_9_9_*</name>
             </package>
             <package>
               <name>zip</name>

http://git-wip-us.apache.org/repos/asf/ambari/blob/d7ed4316/ambari-server/src/main/resources/stacks/HDP/2.2/services/PIG/metainfo.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.2/services/PIG/metainfo.xml 
b/ambari-server/src/main/resources/stacks/HDP/2.2/services/PIG/metainfo.xml
index 1895470..a35aec4 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/PIG/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/PIG/metainfo.xml
@@ -27,7 +27,7 @@
           <osFamily>any</osFamily>
           <packages>
             <package>
-              <name>pig_2_9_9_9_117</name>
+              <name>pig_2_9_9_9_*</name>
             </package>
           </packages>
         </osSpecific>

http://git-wip-us.apache.org/repos/asf/ambari/blob/d7ed4316/ambari-server/src/main/resources/stacks/HDP/2.2/services/SLIDER/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.2/services/SLIDER/package/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.2/services/SLIDER/package/scripts/params.py
index af6939c..062e5bd 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.2/services/SLIDER/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.2/services/SLIDER/package/scripts/params.py
@@ -28,8 +28,8 @@ rpm_version = 
default("/configurations/hadoop-env/rpm_version", None)
 
 #hadoop params
 if rpm_version is not None:
-  slider_conf_dir = format('/usr/lib/{rpm_version}/slider/conf')
-  slider_bin_dir = format('/usr/lib/{rpm_version}/slider/bin')
+  slider_conf_dir = '/usr/lib/current/slider/conf'
+  slider_bin_dir = '/usr/lib/current/slider/bin'
 else:
   slider_conf_dir = "/usr/lib/slider/conf"
   slider_bin_dir = "/usr/lib/slider/bin"

http://git-wip-us.apache.org/repos/asf/ambari/blob/d7ed4316/ambari-server/src/main/resources/stacks/HDP/2.2/services/SQOOP/metainfo.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.2/services/SQOOP/metainfo.xml 
b/ambari-server/src/main/resources/stacks/HDP/2.2/services/SQOOP/metainfo.xml
index 02859e4..f72cfb2 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.2/services/SQOOP/metainfo.xml
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.2/services/SQOOP/metainfo.xml
@@ -26,7 +26,7 @@
           <osFamily>any</osFamily>
           <packages>
             <package>
-              <name>sqoop_2_9_9_9_117</name>
+              <name>sqoop_2_9_9_9_*</name>
             </package>
             <package>
               <name>mysql-connector-java</name>

http://git-wip-us.apache.org/repos/asf/ambari/blob/d7ed4316/ambari-server/src/main/resources/stacks/HDP/2.2/services/STORM/configuration/storm-env.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.2/services/STORM/configuration/storm-env.xml
 
b/ambari-server/src/main/resources/stacks/HDP/2.2/services/STORM/configuration/storm-env.xml
deleted file mode 100644
index 6b2b550..0000000
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.2/services/STORM/configuration/storm-env.xml
+++ /dev/null
@@ -1,29 +0,0 @@
-<?xml version="1.0"?>
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-<!--
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
--->
-
-<configuration>
-  <property>
-    <name>rest_lib_dir</name>
-    <value>/usr/lib/storm/external/storm-rest</value>
-    <description></description>
-  </property>
-</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/d7ed4316/ambari-server/src/main/resources/stacks/HDP/2.2/services/STORM/configuration/storm-site.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.2/services/STORM/configuration/storm-site.xml
 
b/ambari-server/src/main/resources/stacks/HDP/2.2/services/STORM/configuration/storm-site.xml
index 396af4a..57a4889 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.2/services/STORM/configuration/storm-site.xml
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.2/services/STORM/configuration/storm-site.xml
@@ -25,13 +25,13 @@
 
   <property>
     <name>nimbus.childopts</name>
-    <value>-Xmx1024m 
-Djava.security.auth.login.config=/etc/storm/conf/storm_jaas.conf 
-javaagent:/usr/lib/storm/external/storm-jmxetric/lib/jmxetric-1.0.4.jar=host=localhost,port=8649,wireformat31x=true,mode=multicast,config=/usr/lib/storm/external/storm-jmxetric/conf/jmxetric-conf.xml,process=Nimbus_JVM</value>
+    <value>-Xmx1024m 
-Djava.security.auth.login.config=/etc/storm/conf/storm_jaas.conf 
-javaagent:/usr/hdp/current/storm/contrib/storm-jmxetric/lib/jmxetric-1.0.4.jar=host=localhost,port=8649,wireformat31x=true,mode=multicast,config=/usr/hdp/current/storm/contrib/storm-jmxetric/conf/jmxetric-conf.xml,process=Nimbus_JVM</value>
     <description>This parameter is used by the storm-deploy project to 
configure the jvm options for the nimbus daemon.</description>
   </property>
 
   <property>
     <name>worker.childopts</name>
-    <value>-Xmx768m 
-javaagent:/usr/lib/storm/external/storm-jmxetric/lib/jmxetric-1.0.4.jar=host=localhost,port=8650,wireformat31x=true,mode=multicast,config=/usr/lib/storm/external/storm-jmxetric/conf/jmxetric-conf.xml,process=Worker_%ID%_JVM</value>
+    <value>-Xmx768m 
-javaagent:/usr/hdp/current/storm/contrib/storm-jmxetric/lib/jmxetric-1.0.4.jar=host=localhost,port=8650,wireformat31x=true,mode=multicast,config=/usr/hdp/current/storm/contrib/storm-jmxetric/conf/jmxetric-conf.xml,process=Worker_%ID%_JVM</value>
     <description>The jvm opts provided to workers launched by this supervisor. 
All \"%ID%\" substrings are replaced with an identifier for this 
worker.</description>
   </property>
 
@@ -45,7 +45,7 @@
 
   <property>
     <name>supervisor.childopts</name>
-    <value>-Xmx256m 
-Djava.security.auth.login.config=/etc/storm/conf/storm_jaas.conf 
-Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.ssl=false 
-Dcom.sun.management.jmxremote.authenticate=false 
-Dcom.sun.management.jmxremote.port=56431 
-javaagent:/usr/lib/storm/external/storm-jmxetric/lib/jmxetric-1.0.4.jar=host=localhost,port=8650,wireformat31x=true,mode=multicast,config=/usr/lib/storm/external/storm-jmxetric/conf/jmxetric-conf.xml,process=Supervisor_JVM</value>
+    <value>-Xmx256m 
-Djava.security.auth.login.config=/etc/storm/conf/storm_jaas.conf 
-Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.ssl=false 
-Dcom.sun.management.jmxremote.authenticate=false 
-Dcom.sun.management.jmxremote.port=56431 
-javaagent:/usr/hdp/current/storm/contrib/storm-jmxetric/lib/jmxetric-1.0.4.jar=host=localhost,port=8650,wireformat31x=true,mode=multicast,config=/usr/hdp/current/storm/contrib/storm-jmxetric/conf/jmxetric-conf.xml,process=Supervisor_JVM</value>
     <description>This parameter is used by the storm-deploy project to 
configure the jvm options for the supervisor daemon.</description>
   </property>
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/d7ed4316/ambari-server/src/main/resources/stacks/HDP/2.2/services/STORM/metainfo.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.2/services/STORM/metainfo.xml 
b/ambari-server/src/main/resources/stacks/HDP/2.2/services/STORM/metainfo.xml
index b187fd1..c5a9e20 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.2/services/STORM/metainfo.xml
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.2/services/STORM/metainfo.xml
@@ -28,7 +28,7 @@
           <osFamily>any</osFamily>
           <packages>
             <package>
-              <name>storm_2_9_9_9_117</name>
+              <name>storm_2_9_9_9_*</name>
             </package>
           </packages>
         </osSpecific>

http://git-wip-us.apache.org/repos/asf/ambari/blob/d7ed4316/ambari-server/src/main/resources/stacks/HDP/2.2/services/TEZ/metainfo.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.2/services/TEZ/metainfo.xml 
b/ambari-server/src/main/resources/stacks/HDP/2.2/services/TEZ/metainfo.xml
index 08160f1..5e0dac4 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/TEZ/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/TEZ/metainfo.xml
@@ -28,7 +28,7 @@
           <osFamily>any</osFamily>
           <packages>
             <package>
-              <name>tez_2_9_9_9_117</name>
+              <name>tez_2_9_9_9_*</name>
             </package>
           </packages>
         </osSpecific>

http://git-wip-us.apache.org/repos/asf/ambari/blob/d7ed4316/ambari-server/src/main/resources/stacks/HDP/2.2/services/WEBHCAT/metainfo.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.2/services/WEBHCAT/metainfo.xml 
b/ambari-server/src/main/resources/stacks/HDP/2.2/services/WEBHCAT/metainfo.xml
deleted file mode 100644
index 80d2351..0000000
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.2/services/WEBHCAT/metainfo.xml
+++ /dev/null
@@ -1,43 +0,0 @@
-<?xml version="1.0"?>
-<!--
-   Licensed to the Apache Software Foundation (ASF) under one or more
-   contributor license agreements.  See the NOTICE file distributed with
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with
-   the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
--->
-<metainfo>
-  <schemaVersion>2.0</schemaVersion>
-  <services>
-    <service>
-      <name>WEBHCAT</name>
-      <version>0.14.0.2.9.9.9</version>
-      <osSpecifics>
-        <osSpecific>
-          <osFamily>any</osFamily>
-          <packages>
-            <package>
-              <name>hive_2_9_9_9_117-webhcat</name>
-            </package>
-            <package>
-              <name>webhcat-tar-hive</name>
-            </package>
-            <package>
-              <name>webhcat-tar-pig</name>
-            </package>
-          </packages>
-        </osSpecific>
-      </osSpecifics>
-
-    </service>
-  </services>
-</metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/d7ed4316/ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/configuration-mapred/mapred-site.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/configuration-mapred/mapred-site.xml
 
b/ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/configuration-mapred/mapred-site.xml
index 91c9e09..8458357 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/configuration-mapred/mapred-site.xml
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/configuration-mapred/mapred-site.xml
@@ -24,7 +24,7 @@
 
   <property>
     <name>mapreduce.admin.user.env</name>
-    
<value>LD_LIBRARY_PATH=/usr/lib/hadoop/lib/native:/usr/hdp/2.9.9.9-117/hadoop/lib/native/Linux-amd64-64</value>
+    
<value>LD_LIBRARY_PATH=/usr/lib/hadoop/lib/native:/usr/hdp/current/hadoop/lib/native/Linux-amd64-64</value>
     <description>
       Additional execution environment entries for map and reduce task 
processes.
       This is not an additive property. You must preserve the original value if
@@ -32,5 +32,14 @@
     </description>
   </property>
 
+  <property>
+    <name>mapreduce.application.classpath</name>
+    
<value>$HADOOP_MAPRED_HOME/share/hadoop/mapreduce/*,$HADOOP_MAPRED_HOME/share/hadoop/mapreduce/lib/*,/usr/hdp/current/hadoop-mapreduce/,/usr/hdp/current/hadoop-mapreduce/lib,/usr/hdp/current/hadoop/</value>
+    <description>
+      CLASSPATH for MR applications. A comma-separated list of CLASSPATH
+      entries.
+    </description>
+  </property>
+
 
 </configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/d7ed4316/ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/configuration/yarn-site.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/configuration/yarn-site.xml
 
b/ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/configuration/yarn-site.xml
index 6bcf82e..3433864 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/configuration/yarn-site.xml
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/configuration/yarn-site.xml
@@ -22,14 +22,9 @@
 <configuration supports_final="true" 
xmlns:xi="http://www.w3.org/2001/XInclude";>
 
   <property>
-    <name>yarn.resourcemanager.nodes.exclude-path</name>
-    <value>/usr/hdp/2.9.9.9-117/etc/hadoop/conf/yarn.exclude</value>
-    <description>
-      Names a file that contains a list of hosts that are
-      not permitted to connect to the resource manager.  The full pathname of 
the
-      file must be specified.  If the value is empty, no hosts are
-      excluded.
-    </description>
+    <name>yarn.application.classpath</name>
+    
<value>/etc/hadoop/conf,/usr/hdp/current/hadoop/*,/usr/hdp/current/hadoop/lib/*,/usr/hdp/current/hadoop-hdfs/*,/usr/hdp/current/hadoop-hdfs/lib/*,/usr/hdp/current/hadoop-yarn/*,/usr/hdp/current/hadoop-yarn/lib/*,/usr/hdp/current/hadoop-mapreduce/*,/usr/hdp/current/hadoop-mapreduce/lib/*</value>
+    <description>Classpath for typical applications.</description>
   </property>
 
 </configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/d7ed4316/ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/metainfo.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/metainfo.xml 
b/ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/metainfo.xml
index d8fd7d3..b1da331 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/metainfo.xml
@@ -37,10 +37,10 @@
           <osFamily>any</osFamily>
           <packages>
             <package>
-              <name>hadoop_2_9_9_9_117-yarn</name>
+              <name>hadoop_2_9_9_9_*-yarn</name>
             </package>
             <package>
-              <name>hadoop_2_9_9_9_117-mapreduce</name>
+              <name>hadoop_2_9_9_9_*-mapreduce</name>
             </package>
           </packages>
         </osSpecific>
@@ -56,7 +56,7 @@
           <osFamily>any</osFamily>
           <packages>
             <package>
-              <name>hadoop_2_9_9_9_117-mapreduce</name>
+              <name>hadoop_2_9_9_9_*-mapreduce</name>
             </package>
           </packages>
         </osSpecific>

http://git-wip-us.apache.org/repos/asf/ambari/blob/d7ed4316/ambari-server/src/main/resources/stacks/HDP/2.2/services/ZOOKEEPER/metainfo.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.2/services/ZOOKEEPER/metainfo.xml
 
b/ambari-server/src/main/resources/stacks/HDP/2.2/services/ZOOKEEPER/metainfo.xml
index 33efb36..a9cfefb 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.2/services/ZOOKEEPER/metainfo.xml
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.2/services/ZOOKEEPER/metainfo.xml
@@ -28,7 +28,7 @@
           <osFamily>any</osFamily>
           <packages>
             <package>
-              <name>zookeeper_2_9_9_9_117</name>
+              <name>zookeeper_2_9_9_9_*</name>
             </package>
           </packages>
         </osSpecific>

http://git-wip-us.apache.org/repos/asf/ambari/blob/d7ed4316/ambari-server/src/test/python/stacks/1.3.2/HDFS/test_service_check.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/python/stacks/1.3.2/HDFS/test_service_check.py 
b/ambari-server/src/test/python/stacks/1.3.2/HDFS/test_service_check.py
index 629406f..b6a6e67 100644
--- a/ambari-server/src/test/python/stacks/1.3.2/HDFS/test_service_check.py
+++ b/ambari-server/src/test/python/stacks/1.3.2/HDFS/test_service_check.py
@@ -57,7 +57,7 @@ class TestServiceCheck(RMFTestCase):
         conf_dir = '/etc/hadoop/conf',
         bin_dir = '/usr/bin',
         logoutput = True,
-        not_if = 'su - ambari-qa -c \'hadoop --config /etc/hadoop/conf fs 
-test -e /tmp\'',
+        not_if = 'su - ambari-qa -c \'/usr/bin/hadoop --config 
/etc/hadoop/conf fs -test -e /tmp\'',
         try_sleep = 3,
         tries = 5,
         user = 'ambari-qa',

Reply via email to