Repository: ambari
Updated Branches:
  refs/heads/trunk 6625ae5dd -> e3f47c061


AMBARI-5204. Host clean up should rely on host check to tell it what processes 
to kill (dlysnichenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/e3f47c06
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/e3f47c06
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/e3f47c06

Branch: refs/heads/trunk
Commit: e3f47c061da47ef805b08df78410885c20d7ae5f
Parents: 6625ae5
Author: Lisnichenko Dmitro <dlysniche...@hortonworks.com>
Authored: Tue Mar 25 18:32:49 2014 +0200
Committer: Lisnichenko Dmitro <dlysniche...@hortonworks.com>
Committed: Tue Mar 25 18:33:22 2014 +0200

----------------------------------------------------------------------
 .../src/main/python/ambari_agent/HostCleanup.py | 24 ++-----
 .../test/python/ambari_agent/TestHostCleanup.py | 67 +++++++++-----------
 2 files changed, 36 insertions(+), 55 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/e3f47c06/ambari-agent/src/main/python/ambari_agent/HostCleanup.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/ambari_agent/HostCleanup.py 
b/ambari-agent/src/main/python/ambari_agent/HostCleanup.py
index 181b67b..da16dec 100644
--- a/ambari-agent/src/main/python/ambari_agent/HostCleanup.py
+++ b/ambari-agent/src/main/python/ambari_agent/HostCleanup.py
@@ -167,7 +167,11 @@ class HostCleanup:
         propertyMap[PACKAGE_SECTION] = config.get(PACKAGE_SECTION, 
PACKAGE_KEY).split(',')
     except:
       logger.warn("Cannot read package list: " + str(sys.exc_info()[0]))
-
+    try:
+      if config.has_option(PROCESS_SECTION, PROCESS_KEY):
+        propertyMap[PROCESS_SECTION] = config.get(PROCESS_SECTION, 
PROCESS_KEY).split(',')
+    except:
+        logger.warn("Cannot read process list: " + str(sys.exc_info()[0]))
     try:
       if config.has_option(USER_SECTION, USER_KEY):
         propertyMap[USER_SECTION] = config.get(USER_SECTION, 
USER_KEY).split(',')
@@ -192,19 +196,6 @@ class HostCleanup:
     except:
       logger.warn("Cannot read dir list: " + str(sys.exc_info()[0]))
 
-    process_items = []
-    try:
-      pids = [pid for pid in os.listdir('/proc') if pid.isdigit()]
-      for pid in pids:
-        cmd = open(os.path.join('/proc', pid, 'cmdline'), 'rb').read()
-        cmd = cmd.replace('\0', ' ')
-        if not 'AmbariServer' in cmd and not 'HostCleanup' in cmd:
-          if 'java' in cmd and JAVA_HOME in cmd:
-            process_items.append(int(pid))
-    except:
-      pass
-    propertyMap[PROCESS_SECTION] = process_items
-
     try:
       alt_map = {}
       if config.has_option(ALT_SECTION, ALT_KEYS[0]):
@@ -501,8 +492,6 @@ def main():
   parser.add_option("-s", "--silent",
                     action="store_true", dest="silent", default=False,
                     help="Silently accepts default prompt values")
-  parser.add_option('-j', '--java-home', default="/usr/jdk64/jdk1.6.0_31", 
dest="java_home",
-                    help="Use specified java_home.")
 
 
   (options, args) = parser.parse_args()
@@ -515,9 +504,6 @@ def main():
   handler.setFormatter(formatter)
   logger.addHandler(handler)
 
-  # set java_home
-  global JAVA_HOME
-  JAVA_HOME = options.java_home
 
   # set verbose
   if options.verbose:

http://git-wip-us.apache.org/repos/asf/ambari/blob/e3f47c06/ambari-agent/src/test/python/ambari_agent/TestHostCleanup.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/ambari_agent/TestHostCleanup.py 
b/ambari-agent/src/test/python/ambari_agent/TestHostCleanup.py
index 568711b..b624212 100644
--- a/ambari-agent/src/test/python/ambari_agent/TestHostCleanup.py
+++ b/ambari-agent/src/test/python/ambari_agent/TestHostCleanup.py
@@ -44,27 +44,7 @@ ALT_KEYS = ["symlink_list", "target_list"]
 ALT_ERASE_CMD = "alternatives --remove {0} {1}"
 USER_HOMEDIR_SECTION = "usr_homedir"
 
-class TestHostCleanup(TestCase):
-
-  def setUp(self):
-    HostCleanup.logger = MagicMock()
-    self.hostcleanup = HostCleanup.HostCleanup()
-    # disable stdout
-    out = StringIO.StringIO()
-    sys.stdout = out
-
-
-  def tearDown(self):
-    # enable stdout
-    sys.stdout = sys.__stdout__
-
-  @patch("os.listdir", create=True, autospec=True)
-  def test_read_host_check_file_with_content(self, os_listdir_mock):
-    out = StringIO.StringIO()
-    sys.stdout = out
-    tmpfile = tempfile.mktemp()
-    f = open(tmpfile,'w')
-    fileContent = """[processes]
+hostcheck_result_fileContent = """[processes]
 proc_list = 323,434
 
 [users]
@@ -85,7 +65,28 @@ pkg_list = 
sqoop.noarch,hadoop-libhdfs.x86_64,rrdtool.x86_64,ganglia-gmond.x86_6
 
 [metadata]
 created = 2013-07-02 20:39:22.162757"""
-    f.write(fileContent)
+
+class TestHostCleanup(TestCase):
+
+  def setUp(self):
+    HostCleanup.logger = MagicMock()
+    self.hostcleanup = HostCleanup.HostCleanup()
+    # disable stdout
+    out = StringIO.StringIO()
+    sys.stdout = out
+
+
+  def tearDown(self):
+    # enable stdout
+    sys.stdout = sys.__stdout__
+
+  @patch("os.listdir", create=True, autospec=True)
+  def test_read_host_check_file_with_content(self, os_listdir_mock):
+    out = StringIO.StringIO()
+    sys.stdout = out
+    tmpfile = tempfile.mktemp()
+    f = open(tmpfile,'w')
+    f.write(hostcheck_result_fileContent)
     f.close()
 
     os_listdir_mock.return_value = ['111']
@@ -97,7 +98,7 @@ created = 2013-07-02 20:39:22.162757"""
       patch_join_mock.return_value = f2.name
       propMap = self.hostcleanup.read_host_check_file(tmpfile)
 
-    self.assertTrue(111 in propMap["processes"])
+    self.assertTrue('434' in propMap["processes"])
     self.assertTrue("mysql" in propMap["users"])
     self.assertTrue("HDP-epel" in propMap["repositories"])
     self.assertTrue("/etc/hadoop" in propMap["directories"])
@@ -317,29 +318,23 @@ created = 2013-07-02 20:39:22.162757"""
     calls = [call('userdel -rf a'), call('userdel -rf b'), call('groupdel 
hadoop')]
     run_os_command_mock.assert_has_calls(calls)
 
-  @patch("ConfigParser.RawConfigParser")
-  @patch("__builtin__.open")
   @patch("os.listdir", create=True, autospec=True)
-  def test_read_host_check_file(self, os_listdir_mock, openMock, readMock):
+  def test_read_host_check_file(self, os_listdir_mock):
     out = StringIO.StringIO()
     sys.stdout = out
-    f = MagicMock()
-
-    openRead = MagicMock()
-    openRead.read.return_value = 'java_home|hadoop'
-    openMock.side_effect = [f, openRead]
-    os_listdir_mock.return_value = ['111']
+    tmpfile = tempfile.mktemp()
+    f = open(tmpfile,'w')
+    f.write(hostcheck_result_fileContent)
+    f.close()
 
-    propertyMap = self.hostcleanup.read_host_check_file('test')
+    propertyMap = self.hostcleanup.read_host_check_file(tmpfile)
 
-    self.assertTrue(openMock.called)
-    self.assertTrue(readMock.called)
     self.assertTrue(propertyMap.has_key(PACKAGE_SECTION))
     self.assertTrue(propertyMap.has_key(REPO_SECTION))
     self.assertTrue(propertyMap.has_key(USER_SECTION))
     self.assertTrue(propertyMap.has_key(DIR_SECTION))
     self.assertTrue(propertyMap.has_key(PROCESS_SECTION))
-    self.assertEquals(propertyMap[PROCESS_SECTION][0], 111)
+    self.assertEquals(propertyMap[PROCESS_SECTION][0], "323")
 
     sys.stdout = sys.__stdout__
 

Reply via email to