AMBARI-9785. Root user has spnego (HTTP) kerberos ticket set after Kerberos is 
enabled, root should have no ticket. (rlevas)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/4c222ce6
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/4c222ce6
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/4c222ce6

Branch: refs/heads/trunk
Commit: 4c222ce6fbc51ed4e42e9f2ad0946c84ff17ea48
Parents: e8ea0a5
Author: Robert Levas <rle...@hortonworks.com>
Authored: Fri Feb 27 20:39:29 2015 -0500
Committer: Robert Levas <rle...@hortonworks.com>
Committed: Fri Feb 27 20:41:33 2015 -0500

----------------------------------------------------------------------
 .../ambari_agent/AlertSchedulerHandler.py       |  2 +-
 .../python/ambari_agent/alerts/web_alert.py     | 49 ++++++++++++++++++--
 .../libraries/functions/__init__.py             |  2 +
 .../libraries/functions/get_kdestroy_path.py    | 24 ++++------
 .../libraries/functions/get_kinit_path.py       | 24 ++++------
 .../package/alerts/alert_webhcat_server.py      | 30 +++++++++---
 .../package/alerts/alert_check_oozie_server.py  | 31 ++++++++++---
 .../4.0.0.2.0/package/scripts/oozie_service.py  |  3 +-
 .../services/HBASE/package/scripts/params.py    |  3 +-
 .../0.8/services/HDFS/package/scripts/params.py |  2 +-
 .../0.8/services/HIVE/package/scripts/params.py |  2 +-
 .../package/files/alert_check_oozie_server.py   | 31 ++++++++++---
 .../services/OOZIE/package/scripts/params.py    |  6 +--
 .../0.8/services/PIG/package/scripts/params.py  |  2 +-
 .../package/files/alert_webhcat_server.py       | 30 +++++++++---
 .../0.8/services/YARN/package/scripts/params.py |  4 +-
 .../ZOOKEEPER/package/scripts/params.py         |  2 +-
 .../services/YARN/package/scripts/params.py     |  5 +-
 .../services/FALCON/package/scripts/params.py   |  2 +-
 .../services/YARN/package/scripts/params.py     |  5 +-
 .../services/KNOX/package/scripts/params.py     |  2 +-
 .../services/SLIDER/package/scripts/params.py   |  2 +-
 .../2.1/services/YARN/package/scripts/params.py |  2 +-
 .../services/HBASE/package/scripts/params.py    |  3 +-
 .../services/HDFS/package/scripts/params.py     |  2 +-
 .../services/HIVE/package/scripts/params.py     |  2 +-
 .../services/NAGIOS/package/scripts/params.py   |  2 +-
 .../services/OOZIE/package/scripts/params.py    |  5 +-
 .../services/PIG/package/scripts/params.py      |  2 +-
 .../services/YARN/package/scripts/params.py     |  4 +-
 .../ZOOKEEPER/package/scripts/params.py         |  2 +-
 .../stacks/2.0.6/OOZIE/test_oozie_server.py     |  6 +--
 .../0.11.0.2.0.5.0/package/scripts/params.py    |  2 +-
 .../dummy_stack/HIVE/package/scripts/params.py  |  2 +-
 .../services/HIVE/package/scripts/params.py     |  2 +-
 35 files changed, 192 insertions(+), 107 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/4c222ce6/ambari-agent/src/main/python/ambari_agent/AlertSchedulerHandler.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/ambari_agent/AlertSchedulerHandler.py 
b/ambari-agent/src/main/python/ambari_agent/AlertSchedulerHandler.py
index a53201e..a2ea8ef 100644
--- a/ambari-agent/src/main/python/ambari_agent/AlertSchedulerHandler.py
+++ b/ambari-agent/src/main/python/ambari_agent/AlertSchedulerHandler.py
@@ -268,7 +268,7 @@ class AlertSchedulerHandler():
       source['host_scripts_directory'] = self.host_scripts_dir
       alert = ScriptAlert(json_definition, source, self.config)
     elif source_type == AlertSchedulerHandler.TYPE_WEB:
-      alert = WebAlert(json_definition, source)
+      alert = WebAlert(json_definition, source, self.config)
 
     if alert is not None:
       alert.set_cluster(clusterName, hostName)

http://git-wip-us.apache.org/repos/asf/ambari/blob/4c222ce6/ambari-agent/src/main/python/ambari_agent/alerts/web_alert.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/ambari_agent/alerts/web_alert.py 
b/ambari-agent/src/main/python/ambari_agent/alerts/web_alert.py
index 5052ca0..d7a833e 100644
--- a/ambari-agent/src/main/python/ambari_agent/alerts/web_alert.py
+++ b/ambari-agent/src/main/python/ambari_agent/alerts/web_alert.py
@@ -22,20 +22,33 @@ import logging
 import time
 import subprocess
 import os
-
+from  tempfile import gettempdir
 from alerts.base_alert import BaseAlert
 from collections import namedtuple
 from resource_management.libraries.functions.get_port_from_url import 
get_port_from_url
+from resource_management.libraries.functions import get_kinit_path
+from resource_management.libraries.functions import get_klist_path
 from ambari_commons import OSCheck
 from ambari_commons.inet_utils import resolve_address
 
+# hashlib is supplied as of Python 2.5 as the replacement interface for md5
+# and other secure hashes.  In 2.6, md5 is deprecated.  Import hashlib if
+# available, avoiding a deprecation warning under 2.6.  Import md5 otherwise,
+# preserving 2.4 compatibility.
+try:
+  import hashlib
+  _md5 = hashlib.md5
+except ImportError:
+  import md5
+  _md5 = md5.new
+
 logger = logging.getLogger()
 
 CURL_CONNECTION_TIMEOUT = '20'
 
 class WebAlert(BaseAlert):
   
-  def __init__(self, alert_meta, alert_source_meta):
+  def __init__(self, alert_meta, alert_source_meta, config):
     super(WebAlert, self).__init__(alert_meta, alert_source_meta)
     
     # extract any lookup keys from the URI structure
@@ -44,7 +57,9 @@ class WebAlert(BaseAlert):
       uri = alert_source_meta['uri']
       self.uri_property_keys = self._lookup_uri_property_keys(uri)
 
-      
+    self.config = config
+
+
   def _collect(self):
     if self.uri_property_keys is None:
       raise Exception("Could not determine result. URL(s) were not defined.")
@@ -131,14 +146,38 @@ class WebAlert(BaseAlert):
         kerberos_keytab = 
self._get_configuration_value(self.uri_property_keys.kerberos_keytab)
 
       if kerberos_principal is not None and kerberos_keytab is not None:
-        os.system("kinit -kt {0} {1} > /dev/null".format(kerberos_keytab, 
kerberos_principal))
+        # Create the kerberos credentials cache (ccache) file and set it in 
the environment to use
+        # when executing curl. Use the md5 hash of the combination of the 
principal and keytab file
+        # to generate a (relatively) unique cache filename so that we can use 
it as needed.
+        tmp_dir = self.config.get('agent', 'tmp_dir')
+        if tmp_dir is None:
+          tmp_dir = gettempdir()
+
+        ccache_file_name = _md5("{0}|{1}".format(kerberos_principal, 
kerberos_keytab)).hexdigest()
+        ccache_file_path = "{0}{1}web_alert_cc_{2}".format(tmp_dir, os.sep, 
ccache_file_name)
+        kerberos_env = {'KRB5CCNAME': ccache_file_path}
+
+        # If there are no tickets in the cache or they are expired, perform a 
kinit, else use what
+        # is in the cache
+        klist_path_local = get_klist_path()
+
+        if os.system("{0} -s {1}".format(klist_path_local, ccache_file_path)) 
!= 0:
+          kinit_path_local = get_kinit_path()
+          logger.debug("[Alert][{0}] Enabling Kerberos authentication via 
GSSAPI using ccache at {1}."
+                       .format(self.get_name(), ccache_file_path))
+          os.system("{0} -l 5m -c {1} -kt {2} {3} > 
/dev/null".format(kinit_path_local, ccache_file_path, kerberos_keytab, 
kerberos_principal))
+        else:
+          logger.debug("[Alert][{0}] Kerberos authentication via GSSAPI 
already enabled using ccache at {1}."
+                       .format(self.get_name(), ccache_file_path))
+      else:
+        kerberos_env = None
 
       # substitute 0.0.0.0 in url with actual fqdn
       url = url.replace('0.0.0.0', self.host_name)
       start_time = time.time()
       curl = subprocess.Popen(['curl', '--negotiate', '-u', ':', '-sL', '-w',
         '%{http_code}', url, '--connect-timeout', CURL_CONNECTION_TIMEOUT,
-        '-o', '/dev/null'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+        '-o', '/dev/null'], stdout=subprocess.PIPE, stderr=subprocess.PIPE, 
env=kerberos_env)
 
       out, err = curl.communicate()
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/4c222ce6/ambari-common/src/main/python/resource_management/libraries/functions/__init__.py
----------------------------------------------------------------------
diff --git 
a/ambari-common/src/main/python/resource_management/libraries/functions/__init__.py
 
b/ambari-common/src/main/python/resource_management/libraries/functions/__init__.py
index 6bcd7cd..f6db722 100644
--- 
a/ambari-common/src/main/python/resource_management/libraries/functions/__init__.py
+++ 
b/ambari-common/src/main/python/resource_management/libraries/functions/__init__.py
@@ -24,8 +24,10 @@ import platform
 
 from resource_management.libraries.functions.default import *
 from resource_management.libraries.functions.format import *
+from resource_management.libraries.functions.find_path import *
 from resource_management.libraries.functions.get_kinit_path import *
 from resource_management.libraries.functions.get_kdestroy_path import *
+from resource_management.libraries.functions.get_klist_path import *
 from resource_management.libraries.functions.get_unique_id_and_date import *
 from resource_management.libraries.functions.check_process_status import *
 from resource_management.libraries.functions.is_empty import *

http://git-wip-us.apache.org/repos/asf/ambari/blob/4c222ce6/ambari-common/src/main/python/resource_management/libraries/functions/get_kdestroy_path.py
----------------------------------------------------------------------
diff --git 
a/ambari-common/src/main/python/resource_management/libraries/functions/get_kdestroy_path.py
 
b/ambari-common/src/main/python/resource_management/libraries/functions/get_kdestroy_path.py
index f6d7f78..085ba6a 100644
--- 
a/ambari-common/src/main/python/resource_management/libraries/functions/get_kdestroy_path.py
+++ 
b/ambari-common/src/main/python/resource_management/libraries/functions/get_kdestroy_path.py
@@ -21,20 +21,14 @@ Ambari Agent
 """
 
 __all__ = ["get_kdestroy_path"]
-import os
+from find_path import find_path
 
-def get_kdestroy_path():
-
-  kdestroy_path = ""
-
-  for x in ["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"]:
-    if not x:
-      continue
 
-    path = os.path.join(x,"kdestroy")
-
-    if os.path.isfile(path):
-      kdestroy_path = path
-      break
-
-  return kdestroy_path
+def get_kdestroy_path():
+  """
+  Searches for the kdestroy executable using a default set of of paths to 
search:
+    /usr/bin
+    /usr/kerberos/bin
+    /usr/sbin
+  """
+  return find_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"], "kdestroy")
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/4c222ce6/ambari-common/src/main/python/resource_management/libraries/functions/get_kinit_path.py
----------------------------------------------------------------------
diff --git 
a/ambari-common/src/main/python/resource_management/libraries/functions/get_kinit_path.py
 
b/ambari-common/src/main/python/resource_management/libraries/functions/get_kinit_path.py
index 98434a2..7904a0f 100644
--- 
a/ambari-common/src/main/python/resource_management/libraries/functions/get_kinit_path.py
+++ 
b/ambari-common/src/main/python/resource_management/libraries/functions/get_kinit_path.py
@@ -21,20 +21,14 @@ Ambari Agent
 """
 
 __all__ = ["get_kinit_path"]
-import os
+from find_path import find_path
 
-def get_kinit_path():
-
-  kinit_path = ""
-  
-  for x in ["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"]:
-    if not x:
-      continue
-    
-    path = os.path.join(x,"kinit")
 
-    if os.path.isfile(path):
-      kinit_path = path
-      break
-    
-  return kinit_path
+def get_kinit_path():
+  """
+  Searches for the kinit executable using a default set of of paths to search:
+    /usr/bin
+    /usr/kerberos/bin
+    /usr/sbin
+  """
+  return find_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"], "kinit")
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/4c222ce6/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/alerts/alert_webhcat_server.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/alerts/alert_webhcat_server.py
 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/alerts/alert_webhcat_server.py
index 9773ec3..0b7535c 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/alerts/alert_webhcat_server.py
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/alerts/alert_webhcat_server.py
@@ -24,9 +24,13 @@ import socket
 import time
 import urllib2
 
+from resource_management.core.environment import Environment
 from resource_management.core.resources import Execute
+from resource_management.core.shell import call
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions import get_kinit_path
+from resource_management.libraries.functions import get_klist_path
+from os import getpid, sep
 
 RESULT_CODE_OK = "OK"
 RESULT_CODE_CRITICAL = "CRITICAL"
@@ -101,16 +105,30 @@ def execute(parameters=None, host_name=None):
       # substitute _HOST in kerberos principal with actual fqdn
       webhcat_principal = webhcat_principal.replace('_HOST', host_name)
 
-      kinit_path_local = get_kinit_path()
-      kinit_command = format("{kinit_path_local} -kt {webhcat_keytab} 
{webhcat_principal}; ")
+      # Create the kerberos credentials cache (ccache) file and set it in the 
environment to use
+      # when executing curl
+      env = Environment.get_instance()
+      ccache_file = "{0}{1}webhcat_alert_cc_{2}".format(env.tmp_dir, sep, 
getpid())
+      kerberos_env = {'KRB5CCNAME': ccache_file}
 
-      # kinit so that curl will work with --negotiate
-      Execute(kinit_command)
+      klist_path_local = get_klist_path()
+      klist_command = format("{klist_path_local} -s {ccache_file}")
+
+      # Determine if we need to kinit by testing to see if the relevant cache 
exists and has
+      # non-expired tickets.  Tickets are marked to expire after 5 minutes to 
help reduce the number
+      # it kinits we do but recover quickly when keytabs are regenerated
+      return_code, _ = call(klist_command)
+      if return_code != 0:
+        kinit_path_local = get_kinit_path()
+        kinit_command = format("{kinit_path_local} -l 5m -c {ccache_file} -kt 
{webhcat_keytab} {webhcat_principal}; ")
+
+        # kinit so that curl will work with --negotiate
+        Execute(kinit_command)
 
       # make a single curl call to get just the http code
       curl = subprocess.Popen(['curl', '--negotiate', '-u', ':', '-sL', '-w',
         '%{http_code}', '--connect-timeout', CURL_CONNECTION_TIMEOUT,
-        '-o', '/dev/null', query_url], stdout=subprocess.PIPE, 
stderr=subprocess.PIPE)
+        '-o', '/dev/null', query_url], stdout=subprocess.PIPE, 
stderr=subprocess.PIPE, env=kerberos_env)
 
       stdout, stderr = curl.communicate()
 
@@ -134,7 +152,7 @@ def execute(parameters=None, host_name=None):
       start_time = time.time()
       curl = subprocess.Popen(['curl', '--negotiate', '-u', ':', '-sL',
         '--connect-timeout', CURL_CONNECTION_TIMEOUT, query_url, ],
-        stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+        stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=kerberos_env)
 
       stdout, stderr = curl.communicate()
       total_time = time.time() - start_time

http://git-wip-us.apache.org/repos/asf/ambari/blob/4c222ce6/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/alerts/alert_check_oozie_server.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/alerts/alert_check_oozie_server.py
 
b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/alerts/alert_check_oozie_server.py
index 23a3482..9e2775b 100644
--- 
a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/alerts/alert_check_oozie_server.py
+++ 
b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/alerts/alert_check_oozie_server.py
@@ -17,11 +17,14 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either 
express or implied.
 See the License for the specific language governing permissions and
 limitations under the License.
 """
+from resource_management.core.environment import Environment
 from resource_management.core.resources import Execute
+from resource_management.core.shell import call
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions import get_kinit_path
+from resource_management.libraries.functions import get_klist_path
 from ambari_commons.os_check import OSConst, OSCheck
-
+from os import getpid, sep
 from urlparse import urlparse
 
 RESULT_CODE_OK = 'OK'
@@ -70,6 +73,8 @@ def execute(parameters=None, host_name=None):
 
   try:
     # kinit if security is enabled so that oozie-env.sh can make the web 
request
+    kerberos_env = None
+
     if security_enabled:
       if OOZIE_KEYTAB in parameters and OOZIE_PRINCIPAL in parameters:
         oozie_keytab = parameters[OOZIE_KEYTAB]
@@ -80,14 +85,28 @@ def execute(parameters=None, host_name=None):
       else:
         return (RESULT_CODE_UNKNOWN, ['The Oozie keytab and principal are 
required parameters when security is enabled.'])
 
-      kinit_path_local = get_kinit_path()
-      kinit_command = format("{kinit_path_local} -kt {oozie_keytab} 
{oozie_principal}; ")
+      # Create the kerberos credentials cache (ccache) file and set it in the 
environment to use
+      # when executing curl
+      env = Environment.get_instance()
+      ccache_file = "{0}{1}oozie_alert_cc_{2}".format(env.tmp_dir, sep, 
getpid())
+      kerberos_env = {'KRB5CCNAME': ccache_file}
+
+      klist_path_local = get_klist_path()
+      klist_command = format("{klist_path_local} -s {ccache_file}")
+
+      # Determine if we need to kinit by testing to see if the relevant cache 
exists and has
+      # non-expired tickets.  Tickets are marked to expire after 5 minutes to 
help reduce the number
+      # it kinits we do but recover quickly when keytabs are regenerated
+      return_code, _ = call(klist_command)
+      if return_code != 0:
+        kinit_path_local = get_kinit_path()
+        kinit_command = format("{kinit_path_local} -l 5m -kt {oozie_keytab} 
{oozie_principal}; ")
 
-      # kinit
-      Execute(kinit_command)
+        # kinit
+        Execute(kinit_command, environment=kerberos_env)
 
     # execute the command
-    Execute(command)
+    Execute(command, environment=kerberos_env)
 
     return (RESULT_CODE_OK, ["Successful connection to {0}".format(oozie_url)])
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/4c222ce6/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_service.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_service.py
 
b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_service.py
index 092149d..1715f82 100644
--- 
a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_service.py
+++ 
b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_service.py
@@ -70,7 +70,8 @@ def oozie_service(action = 'start', rolling_restart=False):
       Execute( cmd1, user = params.oozie_user, not_if = no_op_test,
         ignore_failures = True )
 
-      not_if_command = format("{kinit_if_needed} hadoop --config 
{hadoop_conf_dir} dfs -ls /user/oozie/share | awk 'BEGIN {{count=0;}} /share/ 
{{count++}} END {{if (count > 0) {{exit 0}} else {{exit 1}}}}'")
+      not_if_command = as_user(format("{kinit_if_needed} hadoop --config 
{hadoop_conf_dir} dfs -ls /user/oozie/share | awk 'BEGIN {{count=0;}} /share/ 
{{count++}} END {{if (count > 0) {{exit 0}} else {{exit 1}}}}'"),
+                               params.oozie_user)
       Execute( cmd2, user = params.oozie_user, not_if = not_if_command,
         path = params.execute_path )
     

http://git-wip-us.apache.org/repos/asf/ambari/blob/4c222ce6/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HBASE/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HBASE/package/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HBASE/package/scripts/params.py
index f368703..88e02e7 100644
--- 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HBASE/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HBASE/package/scripts/params.py
@@ -102,7 +102,7 @@ master_keytab_path = 
config['configurations']['hbase-site']['hbase.master.keytab
 regionserver_keytab_path = 
config['configurations']['hbase-site']['hbase.regionserver.keytab.file']
 smoke_user_keytab = config['configurations']['cluster-env']['smokeuser_keytab']
 hbase_user_keytab = config['configurations']['hbase-env']['hbase_user_keytab']
-kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", 
"/usr/sbin"])
+kinit_path_local = functions.get_kinit_path()
 if security_enabled:
   kinit_cmd = format("{kinit_path_local} -kt {hbase_user_keytab} 
{hbase_principal_name};")
 else:
@@ -123,7 +123,6 @@ hostname = config["hostname"]
 hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 hdfs_principal_name = 
config['configurations']['hadoop-env']['hdfs_principal_name']
-kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", 
"/usr/sbin"])
 import functools
 #create partial functions with common arguments for every HdfsDirectory call
 #to create hdfs directory we need to call params.HdfsDirectory in code

http://git-wip-us.apache.org/repos/asf/ambari/blob/4c222ce6/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HDFS/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HDFS/package/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HDFS/package/scripts/params.py
index 9fbce1d..7360835 100644
--- 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HDFS/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HDFS/package/scripts/params.py
@@ -57,7 +57,7 @@ hdfs_exclude_file = 
default("/clusterHostInfo/decom_dn_hosts", [])
 exclude_file_path = config['configurations']['hdfs-site']['dfs.hosts.exclude']
 update_exclude_file_only = 
default("/commandParams/update_exclude_file_only",False)
 
-kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", 
"/usr/sbin"])
+kinit_path_local = functions.get_kinit_path()
 #hosts
 hostname = config["hostname"]
 rm_host = default("/clusterHostInfo/rm_host", [])

http://git-wip-us.apache.org/repos/asf/ambari/blob/4c222ce6/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HIVE/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HIVE/package/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HIVE/package/scripts/params.py
index 90235e9..9dd7f89 100644
--- 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HIVE/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HIVE/package/scripts/params.py
@@ -119,7 +119,7 @@ smoke_user_keytab = 
config['configurations']['cluster-env']['smokeuser_keytab']
 fs_root = config['configurations']['core-site']['fs.defaultFS']
 security_enabled = config['configurations']['cluster-env']['security_enabled']
 
-kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", 
"/usr/sbin"])
+kinit_path_local = functions.get_kinit_path()
 hive_metastore_keytab_path =  
config['configurations']['hive-site']['hive.metastore.kerberos.keytab.file']
 
 #hive_env

http://git-wip-us.apache.org/repos/asf/ambari/blob/4c222ce6/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/OOZIE/package/files/alert_check_oozie_server.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/OOZIE/package/files/alert_check_oozie_server.py
 
b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/OOZIE/package/files/alert_check_oozie_server.py
index a5a066b..9e2775b 100644
--- 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/OOZIE/package/files/alert_check_oozie_server.py
+++ 
b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/OOZIE/package/files/alert_check_oozie_server.py
@@ -17,11 +17,14 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either 
express or implied.
 See the License for the specific language governing permissions and
 limitations under the License.
 """
+from resource_management.core.environment import Environment
 from resource_management.core.resources import Execute
+from resource_management.core.shell import call
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions import get_kinit_path
+from resource_management.libraries.functions import get_klist_path
 from ambari_commons.os_check import OSConst, OSCheck
-
+from os import getpid, sep
 from urlparse import urlparse
 
 RESULT_CODE_OK = 'OK'
@@ -70,6 +73,8 @@ def execute(parameters=None, host_name=None):
 
   try:
     # kinit if security is enabled so that oozie-env.sh can make the web 
request
+    kerberos_env = None
+
     if security_enabled:
       if OOZIE_KEYTAB in parameters and OOZIE_PRINCIPAL in parameters:
         oozie_keytab = parameters[OOZIE_KEYTAB]
@@ -80,14 +85,28 @@ def execute(parameters=None, host_name=None):
       else:
         return (RESULT_CODE_UNKNOWN, ['The Oozie keytab and principal are 
required parameters when security is enabled.'])
 
-      kinit_path_local = get_kinit_path(["/usr/bin", "/usr/kerberos/bin", 
"/usr/sbin"])
-      kinit_command = format("{kinit_path_local} -kt {oozie_keytab} 
{oozie_principal}; ")
+      # Create the kerberos credentials cache (ccache) file and set it in the 
environment to use
+      # when executing curl
+      env = Environment.get_instance()
+      ccache_file = "{0}{1}oozie_alert_cc_{2}".format(env.tmp_dir, sep, 
getpid())
+      kerberos_env = {'KRB5CCNAME': ccache_file}
+
+      klist_path_local = get_klist_path()
+      klist_command = format("{klist_path_local} -s {ccache_file}")
+
+      # Determine if we need to kinit by testing to see if the relevant cache 
exists and has
+      # non-expired tickets.  Tickets are marked to expire after 5 minutes to 
help reduce the number
+      # it kinits we do but recover quickly when keytabs are regenerated
+      return_code, _ = call(klist_command)
+      if return_code != 0:
+        kinit_path_local = get_kinit_path()
+        kinit_command = format("{kinit_path_local} -l 5m -kt {oozie_keytab} 
{oozie_principal}; ")
 
-      # kinit
-      Execute(kinit_command)
+        # kinit
+        Execute(kinit_command, environment=kerberos_env)
 
     # execute the command
-    Execute(command)
+    Execute(command, environment=kerberos_env)
 
     return (RESULT_CODE_OK, ["Successful connection to {0}".format(oozie_url)])
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/4c222ce6/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/OOZIE/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/OOZIE/package/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/OOZIE/package/scripts/params.py
index f13fbaf..a7236ce 100644
--- 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/OOZIE/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/OOZIE/package/scripts/params.py
@@ -101,7 +101,7 @@ catalina_properties_common_loader = 
"/usr/lib/hive-hcatalog/share/hcatalog/*.jar
 if (len(hive_jar_files) != 0):
     catalina_properties_common_loader = hive_jar_files + "," + 
catalina_properties_common_loader
 
-kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", 
"/usr/sbin"])
+kinit_path_local = functions.get_kinit_path()
 oozie_service_keytab = 
config['configurations']['oozie-site']['oozie.service.HadoopAccessorService.keytab.file']
 oozie_principal = 
config['configurations']['oozie-site']['oozie.service.HadoopAccessorService.kerberos.principal']
 smokeuser_keytab = config['configurations']['cluster-env']['smokeuser_keytab']
@@ -118,7 +118,6 @@ oozie_log_dir = 
config['configurations']['oozie-env']['oozie_log_dir']
 oozie_data_dir = config['configurations']['oozie-env']['oozie_data_dir']
 oozie_server_port = 
get_port_from_url(config['configurations']['oozie-site']['oozie.base.url'])
 oozie_server_admin_port = 
config['configurations']['oozie-env']['oozie_admin_port']
-oozie_env_sh_template = config['configurations']['oozie-env']['content']
 fs_root = config['configurations']['core-site']['fs.defaultFS']
 
 put_shared_lib_to_hdfs_cmd = format("{oozie_setup_sh} sharelib create -fs 
{fs_root} -locallib {oozie_shared_lib}")
@@ -145,14 +144,11 @@ if (('oozie-log4j' in config['configurations']) and 
('content' in config['config
 else:
   log4j_props = None
 
-oozie_hdfs_user_dir = format("/user/{oozie_user}")
 oozie_hdfs_user_mode = 0775
 #for create_hdfs_directory
-hostname = config["hostname"]
 hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 hdfs_principal_name = 
config['configurations']['hadoop-env']['hdfs_principal_name']
-kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", 
"/usr/sbin"])
 import functools
 #create partial functions with common arguments for every HdfsDirectory call
 #to create hdfs directory we need to call params.HdfsDirectory in code

http://git-wip-us.apache.org/repos/asf/ambari/blob/4c222ce6/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/PIG/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/PIG/package/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/PIG/package/scripts/params.py
index b334f3b..b2c5441 100644
--- 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/PIG/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/PIG/package/scripts/params.py
@@ -46,7 +46,7 @@ smokeuser = 
config['configurations']['cluster-env']['smokeuser']
 user_group = config['configurations']['cluster-env']['user_group']
 security_enabled = config['configurations']['cluster-env']['security_enabled']
 smoke_user_keytab = config['configurations']['cluster-env']['smokeuser_keytab']
-kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", 
"/usr/sbin"])
+kinit_path_local = functions.get_kinit_path()
 pig_env_sh_template = config['configurations']['pig-env']['content']
 
 # not supporting 32 bit jdk.

http://git-wip-us.apache.org/repos/asf/ambari/blob/4c222ce6/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/WEBHCAT/package/files/alert_webhcat_server.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/WEBHCAT/package/files/alert_webhcat_server.py
 
b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/WEBHCAT/package/files/alert_webhcat_server.py
index 970ddde..c484f0b 100644
--- 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/WEBHCAT/package/files/alert_webhcat_server.py
+++ 
b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/WEBHCAT/package/files/alert_webhcat_server.py
@@ -24,9 +24,13 @@ import socket
 import time
 import urllib2
 
+from resource_management.core.environment import Environment
 from resource_management.core.resources import Execute
+from resource_management.core.shell import call
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions import get_kinit_path
+from resource_management.libraries.functions import get_klist_path
+from os import getpid, sep
 
 RESULT_CODE_OK = "OK"
 RESULT_CODE_CRITICAL = "CRITICAL"
@@ -101,16 +105,30 @@ def execute(parameters=None, host_name=None):
       # substitute _HOST in kerberos principal with actual fqdn
       webhcat_principal = webhcat_principal.replace('_HOST', host_name)
 
-      kinit_path_local = get_kinit_path(["/usr/bin", "/usr/kerberos/bin", 
"/usr/sbin"])
-      kinit_command = format("{kinit_path_local} -kt {webhcat_keytab} 
{webhcat_principal}; ")
+      # Create the kerberos credentials cache (ccache) file and set it in the 
environment to use
+      # when executing curl
+      env = Environment.get_instance()
+      ccache_file = "{0}{1}webhcat_alert_cc_{2}".format(env.tmp_dir, sep, 
getpid())
+      kerberos_env = {'KRB5CCNAME': ccache_file}
 
-      # kinit so that curl will work with --negotiate
-      Execute(kinit_command)
+      klist_path_local = get_klist_path()
+      klist_command = format("{klist_path_local} -s {ccache_file}")
+
+      # Determine if we need to kinit by testing to see if the relevant cache 
exists and has
+      # non-expired tickets.  Tickets are marked to expire after 5 minutes to 
help reduce the number
+      # it kinits we do but recover quickly when keytabs are regenerated
+      return_code, _ = call(klist_command)
+      if return_code != 0:
+        kinit_path_local = get_kinit_path()
+        kinit_command = format("{kinit_path_local} -l 5m -c {ccache_file} -kt 
{webhcat_keytab} {webhcat_principal}; ")
+
+        # kinit so that curl will work with --negotiate
+        Execute(kinit_command)
 
       # make a single curl call to get just the http code
       curl = subprocess.Popen(['curl', '--negotiate', '-u', ':', '-sL', '-w',
         '%{http_code}', '--connect-timeout', CURL_CONNECTION_TIMEOUT,
-        '-o', '/dev/null', query_url], stdout=subprocess.PIPE, 
stderr=subprocess.PIPE)
+        '-o', '/dev/null', query_url], stdout=subprocess.PIPE, 
stderr=subprocess.PIPE, env=kerberos_env)
 
       stdout, stderr = curl.communicate()
 
@@ -134,7 +152,7 @@ def execute(parameters=None, host_name=None):
       start_time = time.time()
       curl = subprocess.Popen(['curl', '--negotiate', '-u', ':', '-sL',
         '--connect-timeout', CURL_CONNECTION_TIMEOUT, query_url, ],
-        stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+        stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=kerberos_env)
 
       stdout, stderr = curl.communicate()
       total_time = time.time() - start_time

http://git-wip-us.apache.org/repos/asf/ambari/blob/4c222ce6/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/YARN/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/YARN/package/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/YARN/package/scripts/params.py
index 187ddc0..5a31518 100644
--- 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/YARN/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/YARN/package/scripts/params.py
@@ -64,7 +64,7 @@ smokeuser = 
config['configurations']['cluster-env']['smokeuser']
 security_enabled = config['configurations']['cluster-env']['security_enabled']
 smoke_user_keytab = config['configurations']['cluster-env']['smokeuser_keytab']
 yarn_executor_container_group = 
config['configurations']['yarn-site']['yarn.nodemanager.linux-container-executor.group']
-kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", 
"/usr/sbin"])
+kinit_path_local = functions.get_kinit_path()
 rm_hosts = config['clusterHostInfo']['rm_host']
 rm_host = rm_hosts[0]
 rm_port = 
config['configurations']['yarn-site']['yarn.resourcemanager.webapp.address'].split(':')[-1]
@@ -149,9 +149,7 @@ jobhistory_heapsize = 
default("/configurations/mapred-env/jobhistory_heapsize",
 #for create_hdfs_directory
 hostname = config["hostname"]
 hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
-hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 hdfs_principal_name = 
config['configurations']['hadoop-env']['hdfs_principal_name']
-kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", 
"/usr/sbin"])
 import functools
 #create partial functions with common arguments for every HdfsDirectory call
 #to create hdfs directory we need to call params.HdfsDirectory in code

http://git-wip-us.apache.org/repos/asf/ambari/blob/4c222ce6/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/ZOOKEEPER/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/ZOOKEEPER/package/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/ZOOKEEPER/package/scripts/params.py
index b668f67..a0d504b 100644
--- 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/ZOOKEEPER/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/ZOOKEEPER/package/scripts/params.py
@@ -77,7 +77,7 @@ security_enabled = 
config['configurations']['cluster-env']['security_enabled']
 
 smoke_user_keytab = config['configurations']['cluster-env']['smokeuser_keytab']
 smokeuser = config['configurations']['cluster-env']['smokeuser']
-kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", 
"/usr/sbin"])
+kinit_path_local = functions.get_kinit_path()
 
 #log4j.properties
 if (('zookeeper-log4j' in config['configurations']) and ('content' in 
config['configurations']['zookeeper-log4j'])):

http://git-wip-us.apache.org/repos/asf/ambari/blob/4c222ce6/ambari-server/src/main/resources/stacks/HDP/2.0.6.GlusterFS/services/YARN/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6.GlusterFS/services/YARN/package/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6.GlusterFS/services/YARN/package/scripts/params.py
index 0fdaf18..55fda42 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6.GlusterFS/services/YARN/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6.GlusterFS/services/YARN/package/scripts/params.py
@@ -36,7 +36,7 @@ smokeuser = 
config['configurations']['cluster-env']['smokeuser']
 security_enabled = config['configurations']['cluster-env']['security_enabled']
 smoke_user_keytab = config['configurations']['cluster-env']['smokeuser_keytab']
 yarn_executor_container_group = 
config['configurations']['yarn-site']['yarn.nodemanager.linux-container-executor.group']
-kinit_path_local = functions.get_kinit_path([default("kinit_path_local",None), 
"/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
+kinit_path_local = functions.get_kinit_path()
 rm_hosts = config['clusterHostInfo']['rm_host']
 rm_host = rm_hosts[0]
 rm_port = 
config['configurations']['yarn-site']['yarn.resourcemanager.webapp.address'].split(':')[-1]
@@ -116,10 +116,7 @@ mapreduce_jobhistory_done_dir = 
config['configurations']['mapred-site']['mapredu
 
 #for create_hdfs_directory
 hostname = config["hostname"]
-hadoop_conf_dir = "/etc/hadoop/conf"
 hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
-hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
-kinit_path_local = functions.get_kinit_path([default("kinit_path_local",None), 
"/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
 import functools
 #create partial functions with common arguments for every HdfsDirectory call
 #to create hdfs directory we need to call params.HdfsDirectory in code

http://git-wip-us.apache.org/repos/asf/ambari/blob/4c222ce6/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/FALCON/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/FALCON/package/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/FALCON/package/scripts/params.py
index 056380b..78d22b4 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/FALCON/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/FALCON/package/scripts/params.py
@@ -56,7 +56,7 @@ hostname = config["hostname"]
 hadoop_conf_dir = "/etc/hadoop/conf"
 hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
-kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", 
"/usr/sbin"])
+kinit_path_local = functions.get_kinit_path()
 import functools
 #create partial functions with common arguments for every HdfsDirectory call
 #to create hdfs directory we need to call params.HdfsDirectory in code

http://git-wip-us.apache.org/repos/asf/ambari/blob/4c222ce6/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/YARN/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/YARN/package/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/YARN/package/scripts/params.py
index 0fdaf18..55fda42 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/YARN/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/YARN/package/scripts/params.py
@@ -36,7 +36,7 @@ smokeuser = 
config['configurations']['cluster-env']['smokeuser']
 security_enabled = config['configurations']['cluster-env']['security_enabled']
 smoke_user_keytab = config['configurations']['cluster-env']['smokeuser_keytab']
 yarn_executor_container_group = 
config['configurations']['yarn-site']['yarn.nodemanager.linux-container-executor.group']
-kinit_path_local = functions.get_kinit_path([default("kinit_path_local",None), 
"/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
+kinit_path_local = functions.get_kinit_path()
 rm_hosts = config['clusterHostInfo']['rm_host']
 rm_host = rm_hosts[0]
 rm_port = 
config['configurations']['yarn-site']['yarn.resourcemanager.webapp.address'].split(':')[-1]
@@ -116,10 +116,7 @@ mapreduce_jobhistory_done_dir = 
config['configurations']['mapred-site']['mapredu
 
 #for create_hdfs_directory
 hostname = config["hostname"]
-hadoop_conf_dir = "/etc/hadoop/conf"
 hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
-hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
-kinit_path_local = functions.get_kinit_path([default("kinit_path_local",None), 
"/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
 import functools
 #create partial functions with common arguments for every HdfsDirectory call
 #to create hdfs directory we need to call params.HdfsDirectory in code

http://git-wip-us.apache.org/repos/asf/ambari/blob/4c222ce6/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KNOX/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KNOX/package/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KNOX/package/scripts/params.py
index b11eddb..b5c61e8 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KNOX/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KNOX/package/scripts/params.py
@@ -131,7 +131,7 @@ java_home = config['hostLevelParams']['java_home']
 security_enabled = config['configurations']['cluster-env']['security_enabled']
 smokeuser = config['configurations']['cluster-env']['smokeuser']
 smoke_user_keytab = config['configurations']['cluster-env']['smokeuser_keytab']
-kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", 
"/usr/sbin"])
+kinit_path_local = functions.get_kinit_path()
 if security_enabled:
   knox_keytab_path = config['configurations']['knox-env']['knox_keytab_path']
   _hostname_lowercase = config['hostname'].lower()

http://git-wip-us.apache.org/repos/asf/ambari/blob/4c222ce6/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/SLIDER/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/SLIDER/package/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/SLIDER/package/scripts/params.py
index 121bbd4..9b6731e 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/SLIDER/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/SLIDER/package/scripts/params.py
@@ -39,7 +39,7 @@ hadoop_conf_dir = "/etc/hadoop/conf"
 smokeuser = config['configurations']['cluster-env']['smokeuser']
 security_enabled = config['configurations']['cluster-env']['security_enabled']
 smokeuser_keytab = config['configurations']['cluster-env']['smokeuser_keytab']
-kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", 
"/usr/sbin"])
+kinit_path_local = functions.get_kinit_path()
 slider_env_sh_template = config['configurations']['slider-env']['content']
 
 java64_home = config['hostLevelParams']['java_home']

http://git-wip-us.apache.org/repos/asf/ambari/blob/4c222ce6/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/YARN/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/YARN/package/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/YARN/package/scripts/params.py
index a548f98..e48655d 100644
--- 
a/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/YARN/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/YARN/package/scripts/params.py
@@ -38,7 +38,7 @@ hadoop_ssl_enabled = 
default("/configurations/core-site/hadoop.ssl.enabled", Fal
 _authentication = 
config['configurations']['core-site']['hadoop.security.authentication']
 security_enabled = ( not is_empty(_authentication) and _authentication == 
'kerberos')
 smoke_user_keytab = config['configurations']['hadoop-env']['smokeuser_keytab']
-kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", 
"/usr/sbin"])
+kinit_path_local = functions.get_kinit_path()
 rm_host = config['clusterHostInfo']['rm_host'][0]
 rm_port = 
config['configurations']['yarn-site']['yarn.resourcemanager.webapp.address'].split(':')[-1]
 rm_https_port = "8090"

http://git-wip-us.apache.org/repos/asf/ambari/blob/4c222ce6/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HBASE/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HBASE/package/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HBASE/package/scripts/params.py
index 8c4f5e5..f745e39 100644
--- 
a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HBASE/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HBASE/package/scripts/params.py
@@ -102,7 +102,7 @@ master_keytab_path = 
config['configurations']['hbase-site']['hbase.master.keytab
 regionserver_keytab_path = 
config['configurations']['hbase-site']['hbase.regionserver.keytab.file']
 smoke_user_keytab = config['configurations']['cluster-env']['smokeuser_keytab']
 hbase_user_keytab = config['configurations']['hbase-env']['hbase_user_keytab']
-kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", 
"/usr/sbin"])
+kinit_path_local = functions.get_kinit_path()
 if security_enabled:
   kinit_cmd = format("{kinit_path_local} -kt {hbase_user_keytab} 
{hbase_principal_name};")
 else:
@@ -123,7 +123,6 @@ hostname = config["hostname"]
 hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 hdfs_principal_name = 
config['configurations']['hadoop-env']['hdfs_principal_name']
-kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", 
"/usr/sbin"])
 import functools
 #create partial functions with common arguments for every HdfsDirectory call
 #to create hdfs directory we need to call params.HdfsDirectory in code

http://git-wip-us.apache.org/repos/asf/ambari/blob/4c222ce6/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HDFS/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HDFS/package/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HDFS/package/scripts/params.py
index ff61dd7..91aca2f 100644
--- 
a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HDFS/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HDFS/package/scripts/params.py
@@ -57,7 +57,7 @@ hdfs_exclude_file = 
default("/clusterHostInfo/decom_dn_hosts", [])
 exclude_file_path = config['configurations']['hdfs-site']['dfs.hosts.exclude']
 update_exclude_file_only = config['commandParams']['update_exclude_file_only']
 
-kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", 
"/usr/sbin"])
+kinit_path_local = functions.get_kinit_path()
 #hosts
 hostname = config["hostname"]
 rm_host = default("/clusterHostInfo/rm_host", [])

http://git-wip-us.apache.org/repos/asf/ambari/blob/4c222ce6/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/params.py
index a9a67f7..fed3287 100644
--- 
a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/HIVE/package/scripts/params.py
@@ -118,7 +118,7 @@ smoke_user_keytab = 
config['configurations']['cluster-env']['smokeuser_keytab']
 fs_root = config['configurations']['core-site']['fs.defaultFS']
 security_enabled = config['configurations']['cluster-env']['security_enabled']
 
-kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", 
"/usr/sbin"])
+kinit_path_local = functions.get_kinit_path()
 hive_metastore_keytab_path =  
config['configurations']['hive-site']['hive.metastore.kerberos.keytab.file']
 
 #hive_env

http://git-wip-us.apache.org/repos/asf/ambari/blob/4c222ce6/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/NAGIOS/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/NAGIOS/package/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/NAGIOS/package/scripts/params.py
index 5a0ffbb..8bec940 100644
--- 
a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/NAGIOS/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/NAGIOS/package/scripts/params.py
@@ -229,7 +229,7 @@ java64_home = config['hostLevelParams']['java_home']
 check_cpu_on = is_jdk_greater_6(java64_home)
 security_enabled = config['configurations']['cluster-env']['security_enabled']
 nagios_keytab_path = default("/configurations/nagios-env/nagios_keytab_path", 
"/etc/security/keytabs/nagios.service.keytab")
-kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", 
"/usr/sbin"])
+kinit_path_local = functions.get_kinit_path()
 
 dfs_ha_enabled = False
 dfs_ha_nameservices = default("/configurations/hdfs-site/dfs.nameservices", 
None)

http://git-wip-us.apache.org/repos/asf/ambari/blob/4c222ce6/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/OOZIE/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/OOZIE/package/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/OOZIE/package/scripts/params.py
index 7659529..b46ab73 100644
--- 
a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/OOZIE/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/OOZIE/package/scripts/params.py
@@ -101,7 +101,7 @@ catalina_properties_common_loader = 
"/usr/lib/hive-hcatalog/share/hcatalog/*.jar
 if (len(hive_jar_files) != 0):
     catalina_properties_common_loader = hive_jar_files + "," + 
catalina_properties_common_loader
 
-kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", 
"/usr/sbin"])
+kinit_path_local = functions.get_kinit_path()
 oozie_service_keytab = 
config['configurations']['oozie-site']['oozie.service.HadoopAccessorService.keytab.file']
 oozie_principal = 
config['configurations']['oozie-site']['oozie.service.HadoopAccessorService.kerberos.principal']
 smokeuser_keytab = config['configurations']['cluster-env']['smokeuser_keytab']
@@ -145,14 +145,11 @@ if (('oozie-log4j' in config['configurations']) and 
('content' in config['config
 else:
   log4j_props = None
 
-oozie_hdfs_user_dir = format("/user/{oozie_user}")
 oozie_hdfs_user_mode = 0775
 #for create_hdfs_directory
-hostname = config["hostname"]
 hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 hdfs_principal_name = 
config['configurations']['hadoop-env']['hdfs_principal_name']
-kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", 
"/usr/sbin"])
 import functools
 #create partial functions with common arguments for every HdfsDirectory call
 #to create hdfs directory we need to call params.HdfsDirectory in code

http://git-wip-us.apache.org/repos/asf/ambari/blob/4c222ce6/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/PIG/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/PIG/package/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/PIG/package/scripts/params.py
index 60a9806..6284aed 100644
--- 
a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/PIG/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/PIG/package/scripts/params.py
@@ -46,7 +46,7 @@ smokeuser = 
config['configurations']['cluster-env']['smokeuser']
 user_group = config['configurations']['cluster-env']['user_group']
 security_enabled = config['configurations']['cluster-env']['security_enabled']
 smoke_user_keytab = config['configurations']['cluster-env']['smokeuser_keytab']
-kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", 
"/usr/sbin"])
+kinit_path_local = functions.get_kinit_path()
 pig_env_sh_template = config['configurations']['pig-env']['content']
 
 # not supporting 32 bit jdk.

http://git-wip-us.apache.org/repos/asf/ambari/blob/4c222ce6/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/YARN/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/YARN/package/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/YARN/package/scripts/params.py
index 6e7f170..23bbc39 100644
--- 
a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/YARN/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/YARN/package/scripts/params.py
@@ -64,7 +64,7 @@ smokeuser = 
config['configurations']['cluster-env']['smokeuser']
 security_enabled = config['configurations']['cluster-env']['security_enabled']
 smoke_user_keytab = config['configurations']['cluster-env']['smokeuser_keytab']
 yarn_executor_container_group = 
config['configurations']['yarn-site']['yarn.nodemanager.linux-container-executor.group']
-kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", 
"/usr/sbin"])
+kinit_path_local = functions.get_kinit_path()
 rm_hosts = config['clusterHostInfo']['rm_host']
 rm_host = rm_hosts[0]
 rm_port = 
config['configurations']['yarn-site']['yarn.resourcemanager.webapp.address'].split(':')[-1]
@@ -149,9 +149,7 @@ jobhistory_heapsize = 
default("/configurations/mapred-env/jobhistory_heapsize",
 #for create_hdfs_directory
 hostname = config["hostname"]
 hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
-hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 hdfs_principal_name = 
config['configurations']['hadoop-env']['hdfs_principal_name']
-kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", 
"/usr/sbin"])
 import functools
 #create partial functions with common arguments for every HdfsDirectory call
 #to create hdfs directory we need to call params.HdfsDirectory in code

http://git-wip-us.apache.org/repos/asf/ambari/blob/4c222ce6/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/ZOOKEEPER/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/ZOOKEEPER/package/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/ZOOKEEPER/package/scripts/params.py
index 9fa99b5..fdf7ddc 100644
--- 
a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/ZOOKEEPER/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/services/ZOOKEEPER/package/scripts/params.py
@@ -77,7 +77,7 @@ security_enabled = 
config['configurations']['cluster-env']['security_enabled']
 
 smoke_user_keytab = config['configurations']['cluster-env']['smokeuser_keytab']
 smokeuser = config['configurations']['cluster-env']['smokeuser']
-kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", 
"/usr/sbin"])
+kinit_path_local = functions.get_kinit_path()
 
 #log4j.properties
 if (('zookeeper-log4j' in config['configurations']) and ('content' in 
config['configurations']['zookeeper-log4j'])):

http://git-wip-us.apache.org/repos/asf/ambari/blob/4c222ce6/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py 
b/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py
index 321a9c1..31c8d0d 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py
@@ -58,8 +58,8 @@ class TestOozieServer(RMFTestCase):
         user = 'oozie',
         )
     self.assertResourceCalled('Execute', ' hadoop --config /etc/hadoop/conf 
dfs -put /usr/lib/oozie/share /user/oozie ; hadoop --config /etc/hadoop/conf 
dfs -chmod -R 755 /user/oozie/share',
-        not_if = " hadoop --config /etc/hadoop/conf dfs -ls /user/oozie/share 
| awk 'BEGIN {count=0;} /share/ {count++} END {if (count > 0) {exit 0} else 
{exit 1}}'",
-        user = 'oozie',
+        not_if = shell.as_user(" hadoop --config /etc/hadoop/conf dfs -ls 
/user/oozie/share | awk 'BEGIN {count=0;} /share/ {count++} END {if (count > 0) 
{exit 0} else {exit 1}}'", "oozie"),
+        user = u'oozie',
         path = ['/usr/bin:/usr/bin'],
         )
     self.assertResourceCalled('Execute', 'cd /var/tmp/oozie && 
/usr/lib/oozie/bin/oozie-start.sh',
@@ -115,7 +115,7 @@ class TestOozieServer(RMFTestCase):
                               user = 'oozie',
                               )
     self.assertResourceCalled('Execute', '/usr/bin/kinit -kt 
/etc/security/keytabs/oozie.service.keytab 
oozie/c6402.ambari.apache....@example.com; hadoop --config /etc/hadoop/conf dfs 
-put /usr/lib/oozie/share /user/oozie ; hadoop --config /etc/hadoop/conf dfs 
-chmod -R 755 /user/oozie/share',
-                              not_if = "/usr/bin/kinit -kt 
/etc/security/keytabs/oozie.service.keytab 
oozie/c6402.ambari.apache....@example.com; hadoop --config /etc/hadoop/conf dfs 
-ls /user/oozie/share | awk 'BEGIN {count=0;} /share/ {count++} END {if (count 
> 0) {exit 0} else {exit 1}}'",
+                              not_if = shell.as_user("/usr/bin/kinit -kt 
/etc/security/keytabs/oozie.service.keytab 
oozie/c6402.ambari.apache....@example.com; hadoop --config /etc/hadoop/conf dfs 
-ls /user/oozie/share | awk 'BEGIN {count=0;} /share/ {count++} END {if (count 
> 0) {exit 0} else {exit 1}}'", "oozie"),
                               user = 'oozie',
                               path = ['/usr/bin:/usr/bin'],
                               )

http://git-wip-us.apache.org/repos/asf/ambari/blob/4c222ce6/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_common_services/HIVE/0.11.0.2.0.5.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_common_services/HIVE/0.11.0.2.0.5.0/package/scripts/params.py
 
b/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_common_services/HIVE/0.11.0.2.0.5.0/package/scripts/params.py
index 0cf89be..a49e231 100644
--- 
a/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_common_services/HIVE/0.11.0.2.0.5.0/package/scripts/params.py
+++ 
b/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_common_services/HIVE/0.11.0.2.0.5.0/package/scripts/params.py
@@ -56,7 +56,7 @@ smoke_user_keytab = 
config['configurations']['global']['smokeuser_keytab']
 
 security_enabled = config['configurations']['global']['security_enabled']
 
-kinit_path_local = get_kinit_path([default("kinit_path_local",None), 
"/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
+kinit_path_local = get_kinit_path()
 hive_metastore_keytab_path =  
config['configurations']['hive-site']['hive.metastore.kerberos.keytab.file']
 
 #hive_env

http://git-wip-us.apache.org/repos/asf/ambari/blob/4c222ce6/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/scripts/params.py
 
b/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/scripts/params.py
index 0cf89be..a49e231 100644
--- 
a/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/scripts/params.py
+++ 
b/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/scripts/params.py
@@ -56,7 +56,7 @@ smoke_user_keytab = 
config['configurations']['global']['smokeuser_keytab']
 
 security_enabled = config['configurations']['global']['security_enabled']
 
-kinit_path_local = get_kinit_path([default("kinit_path_local",None), 
"/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
+kinit_path_local = get_kinit_path()
 hive_metastore_keytab_path =  
config['configurations']['hive-site']['hive.metastore.kerberos.keytab.file']
 
 #hive_env

http://git-wip-us.apache.org/repos/asf/ambari/blob/4c222ce6/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/HIVE/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/HIVE/package/scripts/params.py
 
b/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/HIVE/package/scripts/params.py
index 648913b..7817a4d 100644
--- 
a/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/HIVE/package/scripts/params.py
+++ 
b/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/HIVE/package/scripts/params.py
@@ -57,7 +57,7 @@ smoke_user_keytab = 
config['configurations']['global']['smokeuser_keytab']
 _authentication = 
config['configurations']['core-site']['hadoop.security.authentication']
 security_enabled = ( not is_empty(_authentication) and _authentication == 
'kerberos')
 
-kinit_path_local = get_kinit_path([default("kinit_path_local",None), 
"/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
+kinit_path_local = get_kinit_path()
 hive_metastore_keytab_path =  
config['configurations']['hive-site']['hive.metastore.kerberos.keytab.file']
 
 #hive_env

Reply via email to