Repository: ambari
Updated Branches:
  refs/heads/branch-2.1 b10ace406 -> 38493e4d4


AMBARI-12355. DATANODE START failed on secure cluster.(vbrodetskyi)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/38493e4d
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/38493e4d
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/38493e4d

Branch: refs/heads/branch-2.1
Commit: 38493e4d411e92a42e623a77fb02d8928bd6acb7
Parents: b10ace4
Author: Vitaly Brodetskyi <vbrodets...@hortonworks.com>
Authored: Fri Jul 10 14:50:32 2015 +0300
Committer: Vitaly Brodetskyi <vbrodets...@hortonworks.com>
Committed: Fri Jul 10 14:50:32 2015 +0300

----------------------------------------------------------------------
 .../HDFS/2.1.0.2.0/package/scripts/utils.py     |  4 +--
 .../python/stacks/2.0.6/HDFS/test_datanode.py   | 24 +++++++--------
 .../stacks/2.0.6/HDFS/test_journalnode.py       | 12 ++++----
 .../python/stacks/2.0.6/HDFS/test_namenode.py   | 32 ++++++++++----------
 .../python/stacks/2.0.6/HDFS/test_nfsgateway.py | 12 ++++----
 .../python/stacks/2.0.6/HDFS/test_snamenode.py  | 12 ++++----
 .../test/python/stacks/2.0.6/HDFS/test_zkfc.py  | 20 ++++++------
 7 files changed, 57 insertions(+), 59 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/38493e4d/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/utils.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/utils.py
 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/utils.py
index 745a8d4..a185b5f 100644
--- 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/utils.py
+++ 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/utils.py
@@ -185,9 +185,7 @@ def service(action=None, name=None, user=None, options="", 
create_pid_dir=False,
     }
     hadoop_env_exports.update(custom_export)
 
-  check_process = as_user(format(
-    "ls {pid_file} >/dev/null 2>&1 &&"
-    " ps -p `cat {pid_file}` >/dev/null 2>&1"), user=params.hdfs_user)
+  check_process = as_sudo(["test", "-f", pid_file]) + " && " + 
as_sudo(["pgrep", "-F", pid_file])
 
   # on STOP directories shouldn't be created
   # since during stop still old dirs are used (which were created during 
previous start)

http://git-wip-us.apache.org/repos/asf/ambari/blob/38493e4d/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py 
b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
index b99f53a..efb17be 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
@@ -63,11 +63,11 @@ class TestDatanode(RMFTestCase):
                               )
     self.assertResourceCalled('File', 
'/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid',
         action = ['delete'],
-        not_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 
'[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid 
>/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` 
>/dev/null 2>&1'",
+        not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f 
/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid && ambari-sudo.sh 
[RMF_ENV_PLACEHOLDER] -H -E pgrep -F 
/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid",
     )
     self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s 
/bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ;  
/usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start 
datanode'",
         environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
-        not_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 
'[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid 
>/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` 
>/dev/null 2>&1'",
+        not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f 
/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid && ambari-sudo.sh 
[RMF_ENV_PLACEHOLDER] -H -E pgrep -F 
/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid",
     )
     self.assertNoMoreResources()
 
@@ -82,7 +82,7 @@ class TestDatanode(RMFTestCase):
     )
     self.assertResourceCalled('File', 
'/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid',
         action = ['delete'],
-        not_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 
'[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid 
>/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` 
>/dev/null 2>&1'",
+        not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f 
/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid && ambari-sudo.sh 
[RMF_ENV_PLACEHOLDER] -H -E pgrep -F 
/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid",
     )
     self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s 
/bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ;  
/usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop datanode'",
         environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
@@ -128,11 +128,11 @@ class TestDatanode(RMFTestCase):
                               )
     self.assertResourceCalled('File', 
'/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid',
         action = ['delete'],
-        not_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 
'[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid 
>/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` 
>/dev/null 2>&1'",
+        not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f 
/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid && ambari-sudo.sh 
[RMF_ENV_PLACEHOLDER] -H -E pgrep -F 
/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid",
     )
     self.assertResourceCalled('Execute', 'ambari-sudo.sh [RMF_ENV_PLACEHOLDER] 
-H -E /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start 
datanode',
         environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
-        not_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 
'[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid 
>/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` 
>/dev/null 2>&1'",
+        not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f 
/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid && ambari-sudo.sh 
[RMF_ENV_PLACEHOLDER] -H -E pgrep -F 
/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid",
     )
     self.assertNoMoreResources()
 
@@ -166,11 +166,11 @@ class TestDatanode(RMFTestCase):
                               )
     self.assertResourceCalled('File', 
'/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid',
         action = ['delete'],
-        not_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 
'[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid 
>/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` 
>/dev/null 2>&1'",
+        not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f 
/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid && ambari-sudo.sh 
[RMF_ENV_PLACEHOLDER] -H -E pgrep -F 
/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid",
     )
     self.assertResourceCalled('Execute', 'ambari-sudo.sh [RMF_ENV_PLACEHOLDER] 
-H -E /usr/hdp/current/hadoop-client/sbin/hadoop-daemon.sh --config 
/usr/hdp/current/hadoop-client/conf start datanode',
         environment = {'HADOOP_LIBEXEC_DIR': 
'/usr/hdp/current/hadoop-client/libexec'},
-        not_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 
'[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid 
>/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` 
>/dev/null 2>&1'",
+        not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f 
/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid && ambari-sudo.sh 
[RMF_ENV_PLACEHOLDER] -H -E pgrep -F 
/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid",
     )
     self.assertNoMoreResources()
 
@@ -207,11 +207,11 @@ class TestDatanode(RMFTestCase):
                               )
     self.assertResourceCalled('File', 
'/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid',
         action = ['delete'],
-        not_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 
'[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid 
>/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` 
>/dev/null 2>&1'",
+        not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f 
/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid && ambari-sudo.sh 
[RMF_ENV_PLACEHOLDER] -H -E pgrep -F 
/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid",
     )
     self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s 
/bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ;  
/usr/hdp/current/hadoop-client/sbin/hadoop-daemon.sh --config 
/usr/hdp/current/hadoop-client/conf start datanode'",
         environment = {'HADOOP_LIBEXEC_DIR': 
'/usr/hdp/current/hadoop-client/libexec'},
-        not_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 
'[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid 
>/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` 
>/dev/null 2>&1'",
+        not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f 
/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid && ambari-sudo.sh 
[RMF_ENV_PLACEHOLDER] -H -E pgrep -F 
/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid",
     )
     self.assertNoMoreResources()
 
@@ -226,7 +226,7 @@ class TestDatanode(RMFTestCase):
     )
     self.assertResourceCalled('File', 
'/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid',
         action = ['delete'],
-        not_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 
'[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid 
>/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` 
>/dev/null 2>&1'",
+        not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f 
/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid && ambari-sudo.sh 
[RMF_ENV_PLACEHOLDER] -H -E pgrep -F 
/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid",
     )
     self.assertResourceCalled('Execute', 'ambari-sudo.sh [RMF_ENV_PLACEHOLDER] 
-H -E /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop 
datanode',
         environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
@@ -255,7 +255,7 @@ class TestDatanode(RMFTestCase):
     )
     self.assertResourceCalled('File', 
'/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid',
         action = ['delete'],
-        not_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 
'[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid 
>/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` 
>/dev/null 2>&1'",
+        not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f 
/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid && ambari-sudo.sh 
[RMF_ENV_PLACEHOLDER] -H -E pgrep -F 
/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid",
     )
     self.assertResourceCalled('Execute', 'ambari-sudo.sh [RMF_ENV_PLACEHOLDER] 
-H -E /usr/hdp/current/hadoop-client/sbin/hadoop-daemon.sh --config 
/usr/hdp/current/hadoop-client/conf stop datanode',
         environment = {'HADOOP_LIBEXEC_DIR': 
'/usr/hdp/current/hadoop-client/libexec'},
@@ -286,7 +286,7 @@ class TestDatanode(RMFTestCase):
     )
     self.assertResourceCalled('File', 
'/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid',
         action = ['delete'],
-        not_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 
'[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid 
>/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` 
>/dev/null 2>&1'",
+        not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f 
/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid && ambari-sudo.sh 
[RMF_ENV_PLACEHOLDER] -H -E pgrep -F 
/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid",
     )
     self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s 
/bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ;  
/usr/hdp/current/hadoop-client/sbin/hadoop-daemon.sh --config 
/usr/hdp/current/hadoop-client/conf stop datanode'",
         environment = {'HADOOP_LIBEXEC_DIR': 
'/usr/hdp/current/hadoop-client/libexec'},

http://git-wip-us.apache.org/repos/asf/ambari/blob/38493e4d/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_journalnode.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_journalnode.py 
b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_journalnode.py
index ecf4b06..e5da966 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_journalnode.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_journalnode.py
@@ -63,11 +63,11 @@ class TestJournalnode(RMFTestCase):
                               )
     self.assertResourceCalled('File', 
'/var/run/hadoop/hdfs/hadoop-hdfs-journalnode.pid',
         action = ['delete'],
-        not_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 
'[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop/hdfs/hadoop-hdfs-journalnode.pid 
>/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-journalnode.pid` 
>/dev/null 2>&1'",
+        not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f 
/var/run/hadoop/hdfs/hadoop-hdfs-journalnode.pid && ambari-sudo.sh 
[RMF_ENV_PLACEHOLDER] -H -E pgrep -F 
/var/run/hadoop/hdfs/hadoop-hdfs-journalnode.pid",
     )
     self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s 
/bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ;  
/usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start 
journalnode'",
         environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
-        not_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 
'[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop/hdfs/hadoop-hdfs-journalnode.pid 
>/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-journalnode.pid` 
>/dev/null 2>&1'",
+        not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f 
/var/run/hadoop/hdfs/hadoop-hdfs-journalnode.pid && ambari-sudo.sh 
[RMF_ENV_PLACEHOLDER] -H -E pgrep -F 
/var/run/hadoop/hdfs/hadoop-hdfs-journalnode.pid",
     )
     self.assertNoMoreResources()
 
@@ -81,7 +81,7 @@ class TestJournalnode(RMFTestCase):
     )
     self.assertResourceCalled('File', 
'/var/run/hadoop/hdfs/hadoop-hdfs-journalnode.pid',
         action = ['delete'],
-        not_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 
'[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop/hdfs/hadoop-hdfs-journalnode.pid 
>/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-journalnode.pid` 
>/dev/null 2>&1'",
+        not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f 
/var/run/hadoop/hdfs/hadoop-hdfs-journalnode.pid && ambari-sudo.sh 
[RMF_ENV_PLACEHOLDER] -H -E pgrep -F 
/var/run/hadoop/hdfs/hadoop-hdfs-journalnode.pid",
     )
     self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s 
/bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ;  
/usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop 
journalnode'",
         environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
@@ -127,11 +127,11 @@ class TestJournalnode(RMFTestCase):
                               )
     self.assertResourceCalled('File', 
'/var/run/hadoop/hdfs/hadoop-hdfs-journalnode.pid',
         action = ['delete'],
-        not_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 
'[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop/hdfs/hadoop-hdfs-journalnode.pid 
>/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-journalnode.pid` 
>/dev/null 2>&1'",
+        not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f 
/var/run/hadoop/hdfs/hadoop-hdfs-journalnode.pid && ambari-sudo.sh 
[RMF_ENV_PLACEHOLDER] -H -E pgrep -F 
/var/run/hadoop/hdfs/hadoop-hdfs-journalnode.pid",
     )
     self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s 
/bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ;  
/usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start 
journalnode'",
         environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
-        not_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 
'[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop/hdfs/hadoop-hdfs-journalnode.pid 
>/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-journalnode.pid` 
>/dev/null 2>&1'",
+        not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f 
/var/run/hadoop/hdfs/hadoop-hdfs-journalnode.pid && ambari-sudo.sh 
[RMF_ENV_PLACEHOLDER] -H -E pgrep -F 
/var/run/hadoop/hdfs/hadoop-hdfs-journalnode.pid",
     )
     self.assertNoMoreResources()
 
@@ -145,7 +145,7 @@ class TestJournalnode(RMFTestCase):
     )
     self.assertResourceCalled('File', 
'/var/run/hadoop/hdfs/hadoop-hdfs-journalnode.pid',
         action = ['delete'],
-        not_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 
'[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop/hdfs/hadoop-hdfs-journalnode.pid 
>/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-journalnode.pid` 
>/dev/null 2>&1'",
+        not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f 
/var/run/hadoop/hdfs/hadoop-hdfs-journalnode.pid && ambari-sudo.sh 
[RMF_ENV_PLACEHOLDER] -H -E pgrep -F 
/var/run/hadoop/hdfs/hadoop-hdfs-journalnode.pid",
     )
     self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s 
/bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ;  
/usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop 
journalnode'",
         environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},

http://git-wip-us.apache.org/repos/asf/ambari/blob/38493e4d/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py 
b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
index abce658..a6bec42 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
@@ -81,11 +81,11 @@ class TestNamenode(RMFTestCase):
                               )
     self.assertResourceCalled('File', 
'/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',
         action = ['delete'],
-        not_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 
'[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid 
>/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` 
>/dev/null 2>&1'",
+        not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f 
/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh 
[RMF_ENV_PLACEHOLDER] -H -E pgrep -F 
/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
     )
     self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s 
/bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ;  
/usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start 
namenode'",
         environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
-        not_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 
'[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid 
>/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` 
>/dev/null 2>&1'",
+        not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f 
/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh 
[RMF_ENV_PLACEHOLDER] -H -E pgrep -F 
/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
     )
     self.assertResourceCalled('Execute', "hdfs dfsadmin -fs 
hdfs://c6405.ambari.apache.org:8020 -safemode get | grep 'Safe mode is OFF'",
         tries=180,
@@ -191,11 +191,11 @@ class TestNamenode(RMFTestCase):
                               )
     self.assertResourceCalled('File', 
'/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',
         action = ['delete'],
-        not_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 
'[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid 
>/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` 
>/dev/null 2>&1'",
+        not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f 
/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh 
[RMF_ENV_PLACEHOLDER] -H -E pgrep -F 
/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
     )
     self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s 
/bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ;  
/usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start 
namenode'",
         environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
-        not_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 
'[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid 
>/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` 
>/dev/null 2>&1'",
+        not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f 
/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh 
[RMF_ENV_PLACEHOLDER] -H -E pgrep -F 
/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
     )
     self.assertResourceCalled('Execute', "hdfs dfsadmin -fs 
hdfs://c6401.ambari.apache.org:8020 -safemode get | grep 'Safe mode is OFF'",
         tries=180,
@@ -260,7 +260,7 @@ class TestNamenode(RMFTestCase):
     )
     self.assertResourceCalled('File', 
'/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',
         action = ['delete'],
-        not_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 
'[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid 
>/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` 
>/dev/null 2>&1'",
+        not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f 
/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh 
[RMF_ENV_PLACEHOLDER] -H -E pgrep -F 
/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
     )
     self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s 
/bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ;  
/usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop namenode'",
         environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
@@ -321,11 +321,11 @@ class TestNamenode(RMFTestCase):
                               )
     self.assertResourceCalled('File', 
'/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',
         action = ['delete'],
-        not_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 
'[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid 
>/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` 
>/dev/null 2>&1'",
+        not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f 
/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh 
[RMF_ENV_PLACEHOLDER] -H -E pgrep -F 
/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
     )
     self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s 
/bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ;  
/usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start 
namenode'",
         environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
-        not_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 
'[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid 
>/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` 
>/dev/null 2>&1'",
+        not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f 
/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh 
[RMF_ENV_PLACEHOLDER] -H -E pgrep -F 
/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
     )
     self.assertResourceCalled('Execute', '/usr/bin/kinit -kt 
/etc/security/keytabs/hdfs.headless.keytab hdfs',
                               user='hdfs',
@@ -384,7 +384,7 @@ class TestNamenode(RMFTestCase):
     )
     self.assertResourceCalled('File', 
'/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',
         action = ['delete'],
-        not_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 
'[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid 
>/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` 
>/dev/null 2>&1'",
+        not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f 
/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh 
[RMF_ENV_PLACEHOLDER] -H -E pgrep -F 
/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
     )
     self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s 
/bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ;  
/usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop namenode'",
         environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
@@ -424,11 +424,11 @@ class TestNamenode(RMFTestCase):
                               )
     self.assertResourceCalled('File', 
'/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',
         action = ['delete'],
-        not_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 
'[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid 
>/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` 
>/dev/null 2>&1'",
+        not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f 
/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh 
[RMF_ENV_PLACEHOLDER] -H -E pgrep -F 
/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
     )
     self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s 
/bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ;  
/usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start 
namenode'",
         environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
-        not_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 
'[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid 
>/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` 
>/dev/null 2>&1'",
+        not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f 
/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh 
[RMF_ENV_PLACEHOLDER] -H -E pgrep -F 
/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
     )
     self.assertResourceCalled('Execute', "hdfs dfsadmin -fs hdfs://ns1 
-safemode get | grep 'Safe mode is OFF'",
         tries=180,
@@ -512,11 +512,11 @@ class TestNamenode(RMFTestCase):
                               )
     self.assertResourceCalled('File', 
'/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',
         action = ['delete'],
-        not_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 
'[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid 
>/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` 
>/dev/null 2>&1'",
+        not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f 
/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh 
[RMF_ENV_PLACEHOLDER] -H -E pgrep -F 
/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
     )
     self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s 
/bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ;  
/usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start 
namenode'",
         environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
-        not_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 
'[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid 
>/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` 
>/dev/null 2>&1'",
+        not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f 
/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh 
[RMF_ENV_PLACEHOLDER] -H -E pgrep -F 
/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
     )
     self.assertResourceCalled('Execute', '/usr/bin/kinit -kt 
/etc/security/keytabs/hdfs.headless.keytab hdfs',
         user = 'hdfs',
@@ -615,11 +615,11 @@ class TestNamenode(RMFTestCase):
                               )
     self.assertResourceCalled('File', 
'/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',
         action = ['delete'],
-        not_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 
'[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid 
>/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` 
>/dev/null 2>&1'",
+        not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f 
/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh 
[RMF_ENV_PLACEHOLDER] -H -E pgrep -F 
/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
     )
     self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s 
/bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ;  
/usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start 
namenode'",
         environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
-        not_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 
'[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid 
>/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` 
>/dev/null 2>&1'",
+        not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f 
/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh 
[RMF_ENV_PLACEHOLDER] -H -E pgrep -F 
/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
     )
     self.assertResourceCalled('Execute', "hdfs dfsadmin -fs hdfs://ns1 
-safemode get | grep 'Safe mode is OFF'",
         tries=180,
@@ -712,11 +712,11 @@ class TestNamenode(RMFTestCase):
                               )
     self.assertResourceCalled('File', 
'/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid',
         action = ['delete'],
-        not_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 
'[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid 
>/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` 
>/dev/null 2>&1'",
+        not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f 
/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh 
[RMF_ENV_PLACEHOLDER] -H -E pgrep -F 
/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
     )
     self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s 
/bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ;  
/usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start 
namenode'",
         environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
-        not_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 
'[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid 
>/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` 
>/dev/null 2>&1'",
+        not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f 
/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid && ambari-sudo.sh 
[RMF_ENV_PLACEHOLDER] -H -E pgrep -F 
/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid",
     )
     self.assertResourceCalled('Execute', "hdfs dfsadmin -fs hdfs://ns1 
-safemode get | grep 'Safe mode is OFF'",
         tries=180,

http://git-wip-us.apache.org/repos/asf/ambari/blob/38493e4d/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_nfsgateway.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_nfsgateway.py 
b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_nfsgateway.py
index a7e507e..37dcb20 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_nfsgateway.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_nfsgateway.py
@@ -66,14 +66,14 @@ class TestNFSGateway(RMFTestCase):
                               )
     self.assertResourceCalled('File', 
'/var/run/hadoop/root/hadoop_privileged_nfs3.pid',
         action = ['delete'],
-        not_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 
'[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop/root/hadoop_privileged_nfs3.pid 
>/dev/null 2>&1 && ps -p `cat /var/run/hadoop/root/hadoop_privileged_nfs3.pid` 
>/dev/null 2>&1'",
+        not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f 
/var/run/hadoop/root/hadoop_privileged_nfs3.pid && ambari-sudo.sh 
[RMF_ENV_PLACEHOLDER] -H -E pgrep -F 
/var/run/hadoop/root/hadoop_privileged_nfs3.pid",
     )
     self.assertResourceCalled('Execute', 'ambari-sudo.sh [RMF_ENV_PLACEHOLDER] 
-H -E /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start 
nfs3',
         environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec',
            'HADOOP_PRIVILEGED_NFS_LOG_DIR': u'/var/log/hadoop/root',
            'HADOOP_PRIVILEGED_NFS_PID_DIR': u'/var/run/hadoop/root',
            'HADOOP_PRIVILEGED_NFS_USER': u'hdfs'},
-        not_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 
'[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop/root/hadoop_privileged_nfs3.pid 
>/dev/null 2>&1 && ps -p `cat /var/run/hadoop/root/hadoop_privileged_nfs3.pid` 
>/dev/null 2>&1'",
+        not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f 
/var/run/hadoop/root/hadoop_privileged_nfs3.pid && ambari-sudo.sh 
[RMF_ENV_PLACEHOLDER] -H -E pgrep -F 
/var/run/hadoop/root/hadoop_privileged_nfs3.pid",
     )
     self.assertNoMoreResources()
 
@@ -87,7 +87,7 @@ class TestNFSGateway(RMFTestCase):
     )
     self.assertResourceCalled('File', 
'/var/run/hadoop/root/hadoop_privileged_nfs3.pid',
         action = ['delete'],
-        not_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 
'[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop/root/hadoop_privileged_nfs3.pid 
>/dev/null 2>&1 && ps -p `cat /var/run/hadoop/root/hadoop_privileged_nfs3.pid` 
>/dev/null 2>&1'",
+        not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f 
/var/run/hadoop/root/hadoop_privileged_nfs3.pid && ambari-sudo.sh 
[RMF_ENV_PLACEHOLDER] -H -E pgrep -F 
/var/run/hadoop/root/hadoop_privileged_nfs3.pid",
     )
     self.assertResourceCalled('Execute', 'ambari-sudo.sh [RMF_ENV_PLACEHOLDER] 
-H -E /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop 
nfs3',
         environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec',
@@ -139,14 +139,14 @@ class TestNFSGateway(RMFTestCase):
                               )
     self.assertResourceCalled('File', 
'/var/run/hadoop/root/hadoop_privileged_nfs3.pid',
         action = ['delete'],
-        not_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 
'[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop/root/hadoop_privileged_nfs3.pid 
>/dev/null 2>&1 && ps -p `cat /var/run/hadoop/root/hadoop_privileged_nfs3.pid` 
>/dev/null 2>&1'",
+        not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f 
/var/run/hadoop/root/hadoop_privileged_nfs3.pid && ambari-sudo.sh 
[RMF_ENV_PLACEHOLDER] -H -E pgrep -F 
/var/run/hadoop/root/hadoop_privileged_nfs3.pid",
     )
     self.assertResourceCalled('Execute', 'ambari-sudo.sh [RMF_ENV_PLACEHOLDER] 
-H -E /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start 
nfs3',
         environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec',
            'HADOOP_PRIVILEGED_NFS_LOG_DIR': u'/var/log/hadoop/root',
            'HADOOP_PRIVILEGED_NFS_PID_DIR': u'/var/run/hadoop/root',
            'HADOOP_PRIVILEGED_NFS_USER': u'hdfs'},
-        not_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 
'[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop/root/hadoop_privileged_nfs3.pid 
>/dev/null 2>&1 && ps -p `cat /var/run/hadoop/root/hadoop_privileged_nfs3.pid` 
>/dev/null 2>&1'",
+        not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f 
/var/run/hadoop/root/hadoop_privileged_nfs3.pid && ambari-sudo.sh 
[RMF_ENV_PLACEHOLDER] -H -E pgrep -F 
/var/run/hadoop/root/hadoop_privileged_nfs3.pid",
     )
     self.assertNoMoreResources()
 
@@ -160,7 +160,7 @@ class TestNFSGateway(RMFTestCase):
     )
     self.assertResourceCalled('File', 
'/var/run/hadoop/root/hadoop_privileged_nfs3.pid',
         action = ['delete'],
-        not_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 
'[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop/root/hadoop_privileged_nfs3.pid 
>/dev/null 2>&1 && ps -p `cat /var/run/hadoop/root/hadoop_privileged_nfs3.pid` 
>/dev/null 2>&1'",
+        not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f 
/var/run/hadoop/root/hadoop_privileged_nfs3.pid && ambari-sudo.sh 
[RMF_ENV_PLACEHOLDER] -H -E pgrep -F 
/var/run/hadoop/root/hadoop_privileged_nfs3.pid",
     )
     self.assertResourceCalled('Execute', 'ambari-sudo.sh [RMF_ENV_PLACEHOLDER] 
-H -E /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop 
nfs3',
         environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec',

http://git-wip-us.apache.org/repos/asf/ambari/blob/38493e4d/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_snamenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_snamenode.py 
b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_snamenode.py
index b5dc82d..dfbd887 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_snamenode.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_snamenode.py
@@ -70,11 +70,11 @@ class TestSNamenode(RMFTestCase):
                               )
     self.assertResourceCalled('File', 
'/var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid',
         action = ['delete'],
-        not_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 
'[RMF_EXPORT_PLACEHOLDER]ls 
/var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid >/dev/null 2>&1 && ps -p 
`cat /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid` >/dev/null 2>&1'",
+        not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f 
/var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid && ambari-sudo.sh 
[RMF_ENV_PLACEHOLDER] -H -E pgrep -F 
/var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid",
     )
     self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s 
/bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ;  
/usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start 
secondarynamenode'",
         environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
-        not_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 
'[RMF_EXPORT_PLACEHOLDER]ls 
/var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid >/dev/null 2>&1 && ps -p 
`cat /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid` >/dev/null 2>&1'",
+        not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f 
/var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid && ambari-sudo.sh 
[RMF_ENV_PLACEHOLDER] -H -E pgrep -F 
/var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid",
     )
     self.assertNoMoreResources()
 
@@ -88,7 +88,7 @@ class TestSNamenode(RMFTestCase):
     )
     self.assertResourceCalled('File', 
'/var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid',
         action = ['delete'],
-        not_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 
'[RMF_EXPORT_PLACEHOLDER]ls 
/var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid >/dev/null 2>&1 && ps -p 
`cat /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid` >/dev/null 2>&1'",
+        not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f 
/var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid && ambari-sudo.sh 
[RMF_ENV_PLACEHOLDER] -H -E pgrep -F 
/var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid",
     )
     self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s 
/bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ;  
/usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop 
secondarynamenode'",
         environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
@@ -144,11 +144,11 @@ class TestSNamenode(RMFTestCase):
                               )
     self.assertResourceCalled('File', 
'/var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid',
         action = ['delete'],
-        not_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 
'[RMF_EXPORT_PLACEHOLDER]ls 
/var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid >/dev/null 2>&1 && ps -p 
`cat /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid` >/dev/null 2>&1'",
+        not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f 
/var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid && ambari-sudo.sh 
[RMF_ENV_PLACEHOLDER] -H -E pgrep -F 
/var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid",
     )
     self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s 
/bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ;  
/usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start 
secondarynamenode'",
         environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
-        not_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 
'[RMF_EXPORT_PLACEHOLDER]ls 
/var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid >/dev/null 2>&1 && ps -p 
`cat /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid` >/dev/null 2>&1'",
+        not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f 
/var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid && ambari-sudo.sh 
[RMF_ENV_PLACEHOLDER] -H -E pgrep -F 
/var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid",
     )
     self.assertNoMoreResources()
 
@@ -162,7 +162,7 @@ class TestSNamenode(RMFTestCase):
     )
     self.assertResourceCalled('File', 
'/var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid',
         action = ['delete'],
-        not_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 
'[RMF_EXPORT_PLACEHOLDER]ls 
/var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid >/dev/null 2>&1 && ps -p 
`cat /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid` >/dev/null 2>&1'",
+        not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f 
/var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid && ambari-sudo.sh 
[RMF_ENV_PLACEHOLDER] -H -E pgrep -F 
/var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid",
     )
     self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s 
/bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ;  
/usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop 
secondarynamenode'",
         environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},

http://git-wip-us.apache.org/repos/asf/ambari/blob/38493e4d/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_zkfc.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_zkfc.py 
b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_zkfc.py
index d3dcaf7..744d3ba 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_zkfc.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_zkfc.py
@@ -97,11 +97,11 @@ class TestZkfc(RMFTestCase):
                               )
     self.assertResourceCalled('File', 
'/var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid',
         action = ['delete'],
-        not_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 
'[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid 
>/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid` 
>/dev/null 2>&1'",
+        not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f 
/var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid && ambari-sudo.sh 
[RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid",
     )
     self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s 
/bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ;  
/usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start zkfc'",
         environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
-        not_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 
'[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid 
>/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid` 
>/dev/null 2>&1'",
+        not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f 
/var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid && ambari-sudo.sh 
[RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid",
     )
     self.assertNoMoreResources()
 
@@ -116,7 +116,7 @@ class TestZkfc(RMFTestCase):
     )
     self.assertResourceCalled('File', 
'/var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid',
         action = ['delete'],
-        not_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 
'[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid 
>/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid` 
>/dev/null 2>&1'",
+        not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f 
/var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid && ambari-sudo.sh 
[RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid",
     )
     self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s 
/bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ;  
/usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop zkfc'",
         environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
@@ -198,11 +198,11 @@ class TestZkfc(RMFTestCase):
                               )
     self.assertResourceCalled('File', 
'/var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid',
         action = ['delete'],
-        not_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 
'[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid 
>/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid` 
>/dev/null 2>&1'",
+        not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f 
/var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid && ambari-sudo.sh 
[RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid",
     )
     self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s 
/bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ;  
/usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start zkfc'",
         environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
-        not_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 
'[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid 
>/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid` 
>/dev/null 2>&1'",
+        not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f 
/var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid && ambari-sudo.sh 
[RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid",
     )
     self.assertNoMoreResources()
 
@@ -216,7 +216,7 @@ class TestZkfc(RMFTestCase):
     )
     self.assertResourceCalled('File', 
'/var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid',
         action = ['delete'],
-        not_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 
'[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid 
>/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid` 
>/dev/null 2>&1'",
+        not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f 
/var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid && ambari-sudo.sh 
[RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid",
     )
     self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s 
/bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ;  
/usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop zkfc'",
         environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
@@ -299,11 +299,11 @@ class TestZkfc(RMFTestCase):
                               )
     self.assertResourceCalled('File', 
'/var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid',
         action = ['delete'],
-        not_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 
'[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid 
>/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid` 
>/dev/null 2>&1'",
+        not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f 
/var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid && ambari-sudo.sh 
[RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid",
     )
     self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s 
/bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ;  
/usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start zkfc'",
         environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
-        not_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 
'[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid 
>/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid` 
>/dev/null 2>&1'",
+        not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f 
/var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid && ambari-sudo.sh 
[RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid",
     )
     self.assertNoMoreResources()
 
@@ -379,11 +379,11 @@ class TestZkfc(RMFTestCase):
                               )
     self.assertResourceCalled('File', 
'/var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid',
         action = ['delete'],
-        not_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 
'[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid 
>/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid` 
>/dev/null 2>&1'",
+        not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f 
/var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid && ambari-sudo.sh 
[RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid",
     )
     self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s 
/bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ;  
/usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start zkfc'",
         environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
-        not_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 
'[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid 
>/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid` 
>/dev/null 2>&1'",
+        not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f 
/var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid && ambari-sudo.sh 
[RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid",
     )
     self.assertNoMoreResources()
 

Reply via email to